text stringlengths 8 4.13M |
|---|
use std::error::Error;
use http::Request;
use lambda_runtime::{Context, error::HandlerError, lambda};
use lambda_utils::apigateway::ApiGatewayRequest;
use lambda_utils::apigateway::ApiGatewayResponse;
use http::header::HeaderValue;
use tokio::runtime::Runtime;
fn main() -> Result<(), Box<dyn Error>> {
let runtime = Runtime::new().unwrap();
lambda!(lambda_handler, runtime);
Ok(())
}
/// respond to an API Gateway Proxy Request with some static content
fn lambda_handler(req: ApiGatewayRequest, _c: Context) -> Result<ApiGatewayResponse, HandlerError> {
let http_response = http::Response::builder()
.status(200)
.header(http::header::CONTENT_TYPE, HeaderValue::from_static("application/json"))
.body("Hello".to_string())
.unwrap();
let request = Request::from(req);
println!("{:?}", request);
Ok(ApiGatewayResponse::from(http_response))
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::string::String;
use alloc::vec::Vec;
use alloc::sync::Arc;
use spin::Mutex;
use core::any::Any;
use socket::unix::transport::unix::BoundEndpoint;
use super::super::super::qlib::common::*;
use super::super::super::qlib::linux_def::*;
use super::super::super::qlib::auth::*;
use super::super::super::qlib::device::*;
use super::super::super::kernel::time::*;
use super::super::super::kernel::pipe::pipe::*;
use super::super::super::kernel::pipe::node::*;
use super::super::super::task::*;
use super::super::attr::*;
use super::super::mount::*;
use super::super::flags::*;
use super::super::file::*;
use super::super::inode::*;
use super::super::dirent::*;
use super::super::host::hostinodeop::*;
use super::tmpfs_dir::*;
pub fn NewTmpfsFifoInode(task: &Task, perms: &FilePermissions, msrc: &Arc<Mutex<MountSource>>) -> Result<Inode> {
// First create a pipe.
let pipe = Pipe::New(task, true, DEFAULT_PIPE_SIZE, MemoryDef::PAGE_SIZE as usize);
let iops = NewPipeInodeOps(task, perms, pipe);
let fifo = TmpfsFifoInodeOp(iops);
let deviceId = TMPFS_DEVICE.lock().DeviceID();
let inodeId = TMPFS_DEVICE.lock().NextIno();
let attr = StableAttr {
Type: InodeType::Pipe,
DeviceId: deviceId,
InodeId: inodeId,
BlockSize: MemoryDef::PAGE_SIZE as i64,
DeviceFileMajor: 0,
DeviceFileMinor: 0,
};
return Ok(Inode::New(&Arc::new(fifo), msrc, &attr));
}
pub struct TmpfsFifoInodeOp(PipeIops);
impl InodeOperations for TmpfsFifoInodeOp {
fn as_any(&self) -> &Any {
return self
}
fn IopsType(&self) -> IopsType {
return IopsType::TmpfsFifoInodeOp;
}
fn InodeType(&self) -> InodeType {
return self.0.InodeType();
}
fn InodeFileType(&self) -> InodeFileType{
return InodeFileType::TmpfsFifo;
}
fn WouldBlock(&self) -> bool {
return self.0.WouldBlock();
}
fn Lookup(&self, _task: &Task, _dir: &Inode, _name: &str) -> Result<Dirent> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn Create(&self, _task: &Task, _dir: &mut Inode, _name: &str, _flags: &FileFlags, _perm: &FilePermissions) -> Result<File> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn CreateDirectory(&self, _task: &Task, _dir: &mut Inode, _name: &str, _perm: &FilePermissions) -> Result<()> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn CreateLink(&self, _task: &Task, _dir: &mut Inode, _oldname: &str, _newname: &str) -> Result<()> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn CreateHardLink(&self, _task: &Task, _dir: &mut Inode, _target: &Inode, _name: &str) -> Result<()> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn CreateFifo(&self, _task: &Task, _dir: &mut Inode, _name: &str, _perm: &FilePermissions) -> Result<()> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn Remove(&self, _task: &Task, _dir: &mut Inode, _name: &str) -> Result<()> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn RemoveDirectory(&self, _task: &Task, _dir: &mut Inode, _name: &str) -> Result<()>{
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn Rename(&self, task: &Task, _dir: &mut Inode, oldParent: &Inode, oldname: &str, newParent: &Inode, newname: &str, replacement: bool) -> Result<()> {
return TmpfsRename(task, oldParent, oldname, newParent, newname, replacement)
}
fn Bind(&self, _task: &Task, _dir: &Inode, _name: &str, _data: &BoundEndpoint, _perms: &FilePermissions) -> Result<Dirent> {
return Err(Error::SysError(SysErr::ENOTDIR))
}
fn BoundEndpoint(&self, _task: &Task, _inode: &Inode, _path: &str) -> Option<BoundEndpoint> {
return None;
}
fn GetFile(&self, task: &Task, dir: &Inode, dirent: &Dirent, flags: FileFlags) -> Result<File> {
return self.0.GetFile(task, dir, dirent, flags)
}
fn UnstableAttr(&self, task: &Task, dir: &Inode) -> Result<UnstableAttr> {
return self.0.UnstableAttr(task, dir)
}
fn Getxattr(&self, dir: &Inode, name: &str) -> Result<String> {
return self.0.Getxattr(dir, name)
}
fn Setxattr(&self, dir: &mut Inode, name: &str, value: &str) -> Result<()> {
return self.0.Setxattr(dir, name, value)
}
fn Listxattr(&self, dir: &Inode) -> Result<Vec<String>> {
return self.0.Listxattr(dir)
}
fn Check(&self, task: &Task, inode: &Inode, reqPerms: &PermMask) -> Result<bool> {
return ContextCanAccessFile(task, inode, reqPerms)
}
fn SetPermissions(&self, task: &Task, dir: &mut Inode, f: FilePermissions) -> bool {
self.0.SetPermissions(task, dir, f);
return true;
}
fn SetOwner(&self, task: &Task, dir: &mut Inode, owner: &FileOwner) -> Result<()> {
return self.0.SetOwner(task, dir, owner);
}
fn SetTimestamps(&self, task: &Task, dir: &mut Inode, ts: &InterTimeSpec) -> Result<()> {
return self.0.SetTimestamps(task, dir, ts)
}
fn Truncate(&self, _task: &Task, _dir: &mut Inode, _size: i64) -> Result<()> {
return Ok(())
}
fn Allocate(&self, _task: &Task, _dir: &mut Inode, _offset: i64, _length: i64) -> Result<()> {
return Err(Error::SysError(SysErr::EPIPE))
}
fn ReadLink(&self, _task: &Task, _dir: &Inode) -> Result<String> {
return Err(Error::SysError(SysErr::ENOLINK))
}
fn GetLink(&self, _task: &Task, _dir: &Inode) -> Result<Dirent> {
return Err(Error::SysError(SysErr::ENOLINK))
}
fn AddLink(&self, task: &Task) {
self.0.AddLink(task)
}
fn DropLink(&self, task: &Task) {
self.0.DropLink(task)
}
fn IsVirtual(&self) -> bool {
return false;
}
fn Sync(&self) -> Result<()> {
return Ok(())
}
fn StatFS(&self, _task: &Task) -> Result<FsInfo> {
return Ok(TMPFS_FSINFO)
}
fn Mappable(&self) -> Result<HostInodeOp> {
return Err(Error::SysError(SysErr::ENODEV))
}
} |
pub(crate) mod actor_key;
pub(crate) mod actor_packet_writer;
pub(crate) mod actor_record;
pub(crate) mod mut_handler;
pub(crate) mod server_actor_manager;
pub(crate) mod server_actor_message;
pub(crate) mod server_actor_mutator;
|
use crate::util::color::Color;
use crate::util::rng::get_rng;
use rand::Rng;
use serde::{Deserialize, Serialize};
use std::f64;
use std::ops::{Add, AddAssign, Div, Mul, Sub};
const EPSILON: f64 = 0.00001;
trait Clamp01 {
fn clamp01(self) -> Self;
}
impl Clamp01 for f64 {
fn clamp01(self) -> Self {
self.min(1.).max(0.)
}
}
#[derive(PartialEq, Debug, Clone, Copy, Default, Serialize, Deserialize)]
pub struct Vector {
pub x: f64,
pub y: f64,
pub z: f64,
}
impl Vector {
pub fn new(x: f64, y: f64, z: f64) -> Self {
Self { x, y, z }
}
/// Makes a vector from one value, making the x, y and z coponent the same
pub fn repeated(a: f64) -> Self {
Self { x: a, y: a, z: a }
}
pub fn from_arr([a, b, c]: [f32; 3]) -> Self {
Self::new(a as f64, b as f64, c as f64)
}
pub fn iszero(&self) -> bool {
self.x.abs() < EPSILON && self.y.abs() < EPSILON && self.z.abs() < EPSILON
}
pub fn dot(&self, other: Self) -> f64 {
self.x * other.x + self.y * other.y + self.z * other.z
}
pub fn cross(&self, other: Self) -> Self {
Vector::new(
self.y * other.z - self.z * other.y,
self.z * other.x - self.x * other.z,
self.x * other.y - self.y * other.x,
)
}
pub fn length2(&self) -> f64 {
self.x.powi(2) + self.y.powi(2) + self.z.powi(2)
}
pub fn length(&self) -> f64 {
self.length2().sqrt()
}
pub fn normalize(&mut self) {
let length = self.length();
if length > 0f64 {
self.x /= length;
self.y /= length;
self.z /= length;
}
}
pub fn unit(&self) -> Vector {
let length = self.length();
Vector::new(self.x / length, self.y / length, self.z / length)
}
pub fn powf(&self, exp: f64) -> Vector {
Vector::new(self.x.powf(exp), self.y.powf(exp), self.z.powf(exp))
}
pub fn max_item(&self) -> f64 {
if self.x > self.y {
if self.x > self.z {
self.x
} else {
self.z
}
} else if self.y > self.z {
self.y
} else {
self.z
}
}
pub fn min(&self, other: &Self) -> Self {
Self {
x: self.x.min(other.x),
y: self.y.min(other.y),
z: self.z.min(other.z),
}
}
pub fn max(&self, other: &Self) -> Self {
Self {
x: self.x.max(other.x),
y: self.y.max(other.y),
z: self.z.max(other.z),
}
}
pub fn gamma(&self, exp: f64) -> Vector {
self.powf(exp) * (exp + 1f64) / 2f64
}
pub fn rotated(&self, rotation: Vector) -> Vector {
let nt = if rotation.x.abs() > rotation.y.abs() {
Vector::new(rotation.z, 0f64, -rotation.x)
/ (rotation.x.powi(2) + rotation.z.powi(2)).sqrt()
} else {
Vector::new(0f64, -rotation.z, rotation.y)
/ (rotation.y.powi(2) + rotation.z.powi(2)).sqrt()
};
let nb = rotation.cross(nt);
let x = self.x * nb.x + self.y * rotation.x + self.z * nt.x;
let y = self.x * nb.y + self.y * rotation.y + self.z * nt.y;
let z = self.x * nb.z + self.y * rotation.z + self.z * nt.z;
Vector::new(x, y, z)
}
pub fn point_on_hemisphere() -> Vector {
let theta = get_rng(|mut r| r.gen::<f64>()) * 2f64 * f64::consts::PI;
let phi = (1f64 - 2f64 * get_rng(|mut r| r.gen::<f64>())).acos();
Vector::new(
phi.sin() * theta.cos(),
(phi.sin() * theta.sin()).abs(),
phi.cos(),
)
}
pub fn point_on_sphere() -> Vector {
let theta = get_rng(|mut r| r.gen::<f64>()) * 2f64 * f64::consts::PI;
let phi = (1f64 - 2f64 * get_rng(|mut r| r.gen::<f64>())).acos();
Vector::new(
phi.sin() * theta.cos(),
phi.sin() * theta.sin(),
phi.cos(),
)
}
pub fn point_on_diffuse_hemisphere() -> Vector {
let u = get_rng(|mut r| r.gen::<f64>());
let v = 2. * f64::consts::PI * get_rng(|mut r| r.gen::<f64>());
Vector::new(v.cos() * u.sqrt(), (1. - u).sqrt(), v.sin() * u.sqrt())
}
}
impl Into<Color> for Vector {
fn into(self) -> Color {
Color {
r: (self.x.clamp01() * 255.) as u8,
g: (self.y.clamp01() * 255.) as u8,
b: (self.z.clamp01() * 255.) as u8,
}
}
}
impl Mul<Vector> for f64 {
type Output = Vector;
fn mul(self, rhs: Vector) -> Self::Output {
Vector {
x: rhs.x * self,
y: rhs.y * self,
z: rhs.z * self,
}
}
}
impl Add for Vector {
type Output = Vector;
fn add(self, rhs: Self) -> Self::Output {
Self {
x: self.x + rhs.x,
y: self.y + rhs.y,
z: self.z + rhs.z,
}
}
}
impl AddAssign for Vector {
fn add_assign(&mut self, rhs: Self) {
self.x += rhs.x;
self.y += rhs.y;
self.z += rhs.z;
}
}
impl Sub for Vector {
type Output = Vector;
fn sub(self, rhs: Self) -> Self::Output {
Self {
x: self.x - rhs.x,
y: self.y - rhs.y,
z: self.z - rhs.z,
}
}
}
impl Div<f64> for Vector {
type Output = Vector;
fn div(self, rhs: f64) -> Self::Output {
Self {
x: self.x / rhs,
y: self.y / rhs,
z: self.z / rhs,
}
}
}
impl Mul<f64> for Vector {
type Output = Vector;
fn mul(self, rhs: f64) -> Self::Output {
Self {
x: self.x * rhs,
y: self.y * rhs,
z: self.z * rhs,
}
}
}
impl Mul<Vector> for Vector {
type Output = Vector;
fn mul(self, rhs: Vector) -> Self::Output {
Self {
x: self.x * rhs.x,
y: self.y * rhs.y,
z: self.z * rhs.z,
}
}
}
#[cfg(test)]
mod tests {
use crate::util::color::Color;
use crate::util::vector::Vector;
#[test]
fn test_add() {
let a = Vector::new(1f64, 2f64, 3f64);
let b = Vector::new(5f64, 3f64, 2f64);
let c = a + b;
assert_eq!(c, Vector::new(6f64, 5f64, 5f64));
}
#[test]
fn test_to_color_1() {
let a: Vector = Vector::new(5., -5., 0.5);
let c: Color = a.into();
assert_eq!(
c,
Color {
r: 255,
g: 0,
b: 127
}
);
}
}
|
use proc_macro::TokenStream;
use quote::quote;
use crate::{
args,
utils::{get_crate_name, get_rustdoc, GeneratorResult},
};
pub fn generate(desc_args: &args::Description) -> GeneratorResult<TokenStream> {
let crate_name = get_crate_name(desc_args.internal);
let ident = &desc_args.ident;
let (impl_generics, ty_generics, where_clause) = desc_args.generics.split_for_impl();
let doc = get_rustdoc(&desc_args.attrs)?.unwrap_or_default();
let expanded = quote! {
impl #impl_generics #crate_name::Description for #ident #ty_generics #where_clause {
fn description() -> &'static str {
#doc
}
}
};
Ok(expanded.into())
}
|
use crate::sim::*;
// it just finds where the cdf crosses 0.5 or whatever
pub struct CdfBisect {
bisect_point: f64,
earliest_bug_seen: usize,
}
impl CdfBisect {
pub fn new(s: &SimulationState, bisect_point: f64) -> Self {
Self {
bisect_point,
// never test the last commit
earliest_bug_seen: s.pdf.len() - 1,
}
}
}
impl BisectStrategy for CdfBisect {
fn name(&self) -> String { format!("cdf_{}", self.bisect_point) }
fn select_commit(&mut self, s: &SimulationState) -> usize {
let res = s.first_cdf_index_eq_or_greater(self.bisect_point);
if res == self.earliest_bug_seen {
assert!(res > 0, "saw bug on commit 0 -> should be 100% already");
res - 1
} else {
res
}
}
fn notify_result(&mut self, result: BisectAttempt) {
if result.bug_repros {
if result.commit < self.earliest_bug_seen {
self.earliest_bug_seen = result.commit;
} else if result.commit == self.earliest_bug_seen {
panic!("tested known buggy commit twice");
}
}
}
}
|
use wasm_bindgen::prelude::*;
const KEY: &str = "draco.examples.local_storage";
#[derive(Debug)]
pub struct LocalStorage {
value: String,
}
impl LocalStorage {
fn new() -> Self {
LocalStorage {
value: Self::storage()
.get_item(KEY)
.expect("get_item")
.unwrap_or("".into()),
}
}
fn storage() -> web_sys::Storage {
web_sys::window()
.expect("window")
.local_storage()
.expect("window.local_storage")
.expect("window.local_storage")
}
}
pub enum Message {
Update(String),
}
impl draco::Application for LocalStorage {
type Message = Message;
fn update(&mut self, message: Self::Message, _: &draco::Mailbox<Self::Message>) {
match message {
Message::Update(value) => {
Self::storage().set_item(KEY, &value).expect("set_item");
self.value = value;
}
}
}
fn view(&self) -> draco::VNode<Self::Message> {
use draco::html as h;
h::div()
.with((
h::p().with("Type anything below."),
h::p().with(
"The value is automatically stored in LocalStorage and restored on page load.",
),
h::textarea()
.value(self.value.clone())
.on_input(Message::Update),
h::button()
.with("Clear")
.on("click", |_| Message::Update("".into())),
h::pre().with(format!("{:?}", self)),
))
.into()
}
}
#[wasm_bindgen(start)]
pub fn start() {
draco::start(
LocalStorage::new(),
draco::select("main").expect("<main>").into(),
);
}
|
use jni::objects::GlobalRef;
use wasi_common::WasiCtx;
pub(crate) struct StoreData {
pub wasi: Option<WasiCtx>,
pub java_data: Option<GlobalRef>,
}
|
#![deny(clippy::all)]
pub mod cbc;
pub mod ctr;
pub mod ecb;
pub mod padding;
#[derive(Debug)]
pub enum Mode {
ECB,
CBC,
CTR,
}
/// Represents a cipher
pub trait Cipher {
fn encrypt(&self, key: &[u8], msg: &[u8]) -> Vec<u8>;
fn decrypt(&self, key: &[u8], ct: &[u8]) -> Vec<u8>;
}
/// Instantiate a new cipher provided a specific mode and default initialization vector/nonce
pub fn new(mode: Mode) -> Box<dyn Cipher> {
match mode {
Mode::CBC => Box::from(cbc::AES_128_CBC::new()),
Mode::ECB => Box::from(ecb::AES_128_ECB::new()),
Mode::CTR => Box::from(ctr::AES_128_CTR::new()),
}
}
/// Transforms a slice of bytes to a 2D vector (blocks) given the block size
pub fn into_blocks(s: &[u8], size: usize) -> Vec<Vec<u8>> {
let mut blocks: Vec<Vec<u8>> = vec![];
for chunk in s.chunks(size) {
blocks.push(chunk.to_vec());
}
blocks
}
/// Inverse transformation of `into_blocks`
pub fn from_blocks(blocks: &[Vec<u8>]) -> Vec<u8> {
blocks.to_vec().into_iter().flatten().collect::<Vec<u8>>()
}
pub fn random_bytes_array(arr: &mut [u8]) {
for item in arr.iter_mut() {
*item = rand::random::<u8>();
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_into_blocks() {
assert_eq!(
into_blocks(b"helloworld", 3),
vec![
b"hel".to_vec(),
b"low".to_vec(),
b"orl".to_vec(),
b"d".to_vec()
]
);
}
#[test]
fn test_from_blocks() {
assert_eq!(
from_blocks(&[
b"hel".to_vec(),
b"low".to_vec(),
b"orl".to_vec(),
b"d".to_vec(),
]),
b"helloworld"
);
}
}
|
pub mod article;
pub mod file_stores;
pub mod item_name;
pub mod price;
pub mod shop;
|
// error-pattern: can't refer to a module as a first-class value
mod m1 {
mod a {
}
}
fn main(vec[str] args) {
log m1.a;
}
|
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use alloc::sync::Arc;
use spin::Mutex;
use alloc::vec::Vec;
use core::slice;
use super::super::super::super::qlib::common::*;
use super::super::super::super::qlib::linux_def::*;
use super::super::super::super::qlib::auth::*;
use super::super::super::fsutil::file::readonly_file::*;
use super::super::super::fsutil::inode::simple_file_inode::*;
use super::super::super::super::task::*;
use super::super::super::attr::*;
use super::super::super::file::*;
use super::super::super::flags::*;
use super::super::super::dirent::*;
use super::super::super::mount::*;
use super::super::super::inode::*;
use super::super::super::super::threadmgr::thread::*;
use super::super::inode::*;
pub fn NewAUXVec(task: &Task, thread: &Thread, msrc: &Arc<Mutex<MountSource>>) -> Inode {
let v = NewAUXVecSimpleFileInode(task, thread, &ROOT_OWNER, &FilePermissions::FromMode(FileMode(0o400)), FSMagic::PROC_SUPER_MAGIC);
return NewProcInode(&Arc::new(v), msrc, InodeType::SpecialFile, Some(thread.clone()))
}
pub fn NewAUXVecSimpleFileInode(task: &Task,
thread: &Thread,
owner: &FileOwner,
perms: &FilePermissions,
typ: u64)
-> SimpleFileInode<AUXVecSimpleFileTrait> {
return SimpleFileInode::New(task, owner, perms, typ, false, AUXVecSimpleFileTrait {
thread: thread.clone(),
})
}
pub struct AUXVecSimpleFileTrait {
pub thread: Thread,
}
impl SimpleFileTrait for AUXVecSimpleFileTrait {
fn GetFile(&self, _task: &Task, _dir: &Inode, dirent: &Dirent, flags: FileFlags) -> Result<File> {
let fops = NewAUXVecReadonlyFileOperations(&self.thread);
let file = File::New(dirent, &flags, fops);
return Ok(file);
}
}
pub fn NewAUXVecReadonlyFileOperations(thread: &Thread) -> ReadonlyFileOperations<AUXVecReadonlyFileNode> {
return ReadonlyFileOperations {
node: AUXVecReadonlyFileNode {
thread: thread.clone(),
}
}
}
pub struct AUXVecReadonlyFileNode {
pub thread: Thread,
}
impl ReadonlyFileNode for AUXVecReadonlyFileNode {
fn ReadAt(&self, task: &Task, _f: &File, dsts: &mut [IoVec], offset: i64, _blocking: bool) -> Result<i64> {
if offset < 0 {
return Err(Error::SysError(SysErr::EINVAL))
}
let mm = self.thread.lock().memoryMgr.clone();
let metadata = mm.metadata.lock();
let auxvlen = metadata.auxv.len();
// Space for buffer with AT_NULL (0) terminator at the end.
let size = (auxvlen + 1) * 16 - 16;
if offset >= size as i64 {
return Ok(0)
}
let mut buf : Vec<u64> = Vec::with_capacity(auxvlen + 1);
for i in 1..auxvlen {
let e = &metadata.auxv[i];
buf.push(e.Key as u64);
buf.push(e.Val);
}
buf.push(0);
buf.push(0);
let ptr = &buf[0] as * const _ as u64 as * const u8;
assert!(buf.len() * 8 >= size);
let slice = unsafe { slice::from_raw_parts(ptr, size) };
let n = task.CopyDataOutToIovs(slice, dsts)?;
return Ok(n as i64)
}
}
|
#[doc = "Reader of register APB1SECSR1"]
pub type R = crate::R<u32, super::APB1SECSR1>;
#[doc = "Reader of field `LPTIM1SECF`"]
pub type LPTIM1SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `OPAMPSECF`"]
pub type OPAMPSECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `DACSECF`"]
pub type DACSECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `PWRSECF`"]
pub type PWRSECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `CRSSECF`"]
pub type CRSSECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `I2C3SECF`"]
pub type I2C3SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `I2C2SECF`"]
pub type I2C2SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `I2C1SECF`"]
pub type I2C1SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `UART5SECF`"]
pub type UART5SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `UART4SECF`"]
pub type UART4SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `UART3SECF`"]
pub type UART3SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `UART2SECF`"]
pub type UART2SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `SPI3SECF`"]
pub type SPI3SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `SPI2SECF`"]
pub type SPI2SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `WWDGSECF`"]
pub type WWDGSECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `RTCAPBSECF`"]
pub type RTCAPBSECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TIM7SECF`"]
pub type TIM7SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TIM6SECF`"]
pub type TIM6SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TIM5SECF`"]
pub type TIM5SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TIM4SECF`"]
pub type TIM4SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TIM3SECF`"]
pub type TIM3SECF_R = crate::R<bool, bool>;
#[doc = "Reader of field `TIM2SECF`"]
pub type TIM2SECF_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 31 - LPTIM1SECF"]
#[inline(always)]
pub fn lptim1secf(&self) -> LPTIM1SECF_R {
LPTIM1SECF_R::new(((self.bits >> 31) & 0x01) != 0)
}
#[doc = "Bit 30 - OPAMPSECF"]
#[inline(always)]
pub fn opampsecf(&self) -> OPAMPSECF_R {
OPAMPSECF_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 29 - DACSECF"]
#[inline(always)]
pub fn dacsecf(&self) -> DACSECF_R {
DACSECF_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 28 - PWRSECF"]
#[inline(always)]
pub fn pwrsecf(&self) -> PWRSECF_R {
PWRSECF_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 24 - CRSSECF"]
#[inline(always)]
pub fn crssecf(&self) -> CRSSECF_R {
CRSSECF_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 23 - I2C3SECF"]
#[inline(always)]
pub fn i2c3secf(&self) -> I2C3SECF_R {
I2C3SECF_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 22 - I2C2SECF"]
#[inline(always)]
pub fn i2c2secf(&self) -> I2C2SECF_R {
I2C2SECF_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 21 - I2C1SECF"]
#[inline(always)]
pub fn i2c1secf(&self) -> I2C1SECF_R {
I2C1SECF_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 20 - UART5SECF"]
#[inline(always)]
pub fn uart5secf(&self) -> UART5SECF_R {
UART5SECF_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 19 - UART4SECF"]
#[inline(always)]
pub fn uart4secf(&self) -> UART4SECF_R {
UART4SECF_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 18 - UART3SECF"]
#[inline(always)]
pub fn uart3secf(&self) -> UART3SECF_R {
UART3SECF_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 17 - UART2SECF"]
#[inline(always)]
pub fn uart2secf(&self) -> UART2SECF_R {
UART2SECF_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 15 - SPI3SECF"]
#[inline(always)]
pub fn spi3secf(&self) -> SPI3SECF_R {
SPI3SECF_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 14 - SPI2SECF"]
#[inline(always)]
pub fn spi2secf(&self) -> SPI2SECF_R {
SPI2SECF_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 11 - WWDGSECF"]
#[inline(always)]
pub fn wwdgsecf(&self) -> WWDGSECF_R {
WWDGSECF_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 10 - RTCAPBSECF"]
#[inline(always)]
pub fn rtcapbsecf(&self) -> RTCAPBSECF_R {
RTCAPBSECF_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 5 - TIM7SECF"]
#[inline(always)]
pub fn tim7secf(&self) -> TIM7SECF_R {
TIM7SECF_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 4 - TIM6SECF"]
#[inline(always)]
pub fn tim6secf(&self) -> TIM6SECF_R {
TIM6SECF_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 3 - TIM5SECF"]
#[inline(always)]
pub fn tim5secf(&self) -> TIM5SECF_R {
TIM5SECF_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 2 - TIM4SECF"]
#[inline(always)]
pub fn tim4secf(&self) -> TIM4SECF_R {
TIM4SECF_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - TIM3SECF"]
#[inline(always)]
pub fn tim3secf(&self) -> TIM3SECF_R {
TIM3SECF_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - TIM2SECF"]
#[inline(always)]
pub fn tim2secf(&self) -> TIM2SECF_R {
TIM2SECF_R::new((self.bits & 0x01) != 0)
}
}
|
use crate::{
charactermeta::CharacterDirection, damage::Destroyer, delayedremove::DelayedRemove,
};
use specs_physics::{PhysicsBodyBuilder, PhysicsBody,
nphysics::object::BodyStatus,
nalgebra::{Vector3},
PhysicsColliderBuilder,
PhysicsCollider,
colliders::Shape,
};
use amethyst::{core::Transform, ecs::world::World, prelude::*, renderer::SpriteRender};
pub fn sword_attack(
world: &mut World,
strength: f32,
transform: Transform,
direciton: CharacterDirection,
sprite: SpriteRender,
) {
let translation = transform.translation();
let (x, y) = match direciton {
CharacterDirection::Up => (translation.x, translation.y + 32.0),
CharacterDirection::Down => (translation.x, translation.y -32.0),
CharacterDirection::Left => (translation.x -32.0, translation.y),
CharacterDirection::Right => (translation.x + 32.0, translation.y),
};
let mut damage_transform = Transform::default();
damage_transform.set_translation_xyz(x, y, -y);
let physics_body: PhysicsBody<f32> = PhysicsBodyBuilder::from(BodyStatus::Dynamic)
.build();
let physics_collider: PhysicsCollider<f32> =
PhysicsColliderBuilder::from(Shape::Cuboid {
half_extents: Vector3::new(16.0, 16.0, 300.0)
})
.sensor(true)
.build();
world
.create_entity()
.with(damage_transform)
.with(DelayedRemove::new(0.1))
.with(Destroyer { damage: strength })
.with(physics_body)
.with(physics_collider)
.with(sprite)
.build();
}
|
#[cfg(all(not(target_arch = "wasm32"), test))]
mod test;
use liblumen_alloc::erts::exception;
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::*;
#[native_implemented::function(erlang:tuple_size/1)]
pub fn result(process: &Process, tuple: Term) -> exception::Result<Term> {
let tuple = term_try_into_tuple!(tuple)?;
let size = process.integer(tuple.len());
Ok(size)
}
|
use std::{
collections::{HashMap, VecDeque},
io, mem,
path::{Path, PathBuf},
sync::Arc,
};
use abi_stable::{
external_types::crossbeam_channel::{self, RReceiver, RSender},
library::{lib_header_from_path, LibraryError, LibrarySuffix, RawLibrary},
sabi_trait::prelude::TD_Opaque,
std_types::{RErr, ROk, RResult, RSome, RStr, RString, RVec},
};
#[allow(unused_imports)]
use core_extensions::{SelfOps, SliceExt, StringExt};
use example_1_interface::{
Application, Application_TO, AsyncCommand, Error as AppError, PluginId, PluginMod_Ref,
PluginResponse, PluginType, WhichPlugin,
};
use serde::Deserialize;
use serde_json::value::RawValue;
use smallvec::SmallVec;
mod app;
mod vec_from_map;
use crate::{
app::{ApplicationState, TheApplication},
vec_from_map::VecFromMap,
};
/// Returns the path the plugin will be loaded from.
fn compute_plugin_path(base_name: &str) -> io::Result<PathBuf> {
let debug_dir = "../../../target/debug/"
.as_ref_::<Path>()
.into_::<PathBuf>();
let release_dir = "../../../target/release/"
.as_ref_::<Path>()
.into_::<PathBuf>();
let debug_path = RawLibrary::path_in_directory(&debug_dir, base_name, LibrarySuffix::NoSuffix);
let release_path =
RawLibrary::path_in_directory(&release_dir, base_name, LibrarySuffix::NoSuffix);
match (debug_path.exists(), release_path.exists()) {
(false, false) => debug_path,
(true, false) => debug_path,
(false, true) => release_path,
(true, true) => {
if debug_path.metadata()?.modified()? < release_path.metadata()?.modified()? {
release_path
} else {
debug_path
}
}
}
.piped(Ok)
}
/// A description of what plugin to load.
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged)]
pub enum PluginToLoad {
Named(String),
WithInstances {
#[serde(alias = "name")]
named: String,
#[serde(default = "one_u64")]
instances: u64,
#[serde(alias = "renamed")]
rename: Option<String>,
},
}
fn one_u64() -> u64 {
1
}
/// The type that the configuration file is deserialized into.
#[derive(Debug, Clone, Deserialize)]
pub struct Config {
pub plugins: RVec<PluginToLoad>,
pub commands: VecFromMap<WhichPlugin, Box<RawValue>>,
}
/// A description of plugin instances that were instantiated and left to instantiate,
/// as well as the root module of the plugin's dynamic library to instantiate the plugins.
pub struct PluginModAndIndices {
root_module: PluginMod_Ref,
to_be_instantiated: u64,
indices: Vec<usize>,
}
pub type PluginIndices = SmallVec<[usize; 1]>;
/// Commands sent to plugins after calling `Application::send_command_to_plugin`.
#[derive(Debug)]
pub struct DelayedCommand {
/// Which plugin sent the command
from: PluginId,
/// The index in plugins to which the command is sent.
plugin_index: usize,
/// The command for the `plugin_index` plugin.
command: Arc<RString>,
}
/// Used to handle the responses to the delayed commands sent to plugins after calling
/// `Application::send_command_to_plugin`.
#[derive(Debug)]
pub struct DelayedResponse {
/// The plugin that sends the reponse.
from: PluginId,
/// The plugin that sent the command for which this is the reponse.
to: usize,
/// The response from the `from` plugin to the `to` plugin.
response: RString,
}
fn main() -> io::Result<()> {
let config_path = match std::env::args_os().nth(1) {
Some(os) => PathBuf::from(os),
None => {
println!("Help:You can pass a configuration's path as a command-line argument.");
PathBuf::from("./data/app_config.json")
}
};
let file_contents = match std::fs::read_to_string(&*config_path) {
Ok(x) => x,
Err(e) => {
eprintln!(
"Could not load the configuration file at:\n\
\t{}\n\
Because of this error:\n{}\n",
config_path.display(),
e
);
std::process::exit(1);
}
};
let config: Config = match serde_json::from_str(&file_contents) {
Ok(x) => x,
Err(e) => {
eprintln!(
"Could not parse the configuration file at:\n\
\t{}\n\
Because of this error:\n\
{}\n",
config_path.display(),
e
);
std::process::exit(1);
}
};
let mut nonexistent_files = Vec::<(String, io::Error)>::new();
let mut library_errs = Vec::<(String, LibraryError)>::new();
let mut loaded_libraries = Vec::<String>::new();
let mut plugins = Vec::new();
let mut state = ApplicationState::new();
for plug in &config.plugins {
let (named, instances, rename) = match plug {
PluginToLoad::Named(named) => ((*named).clone(), 1, None),
PluginToLoad::WithInstances {
named,
instances,
rename,
} => ((*named).clone(), *instances, rename.clone()),
};
let name_key = rename.unwrap_or_else(|| named.clone());
if let Some(mod_i) = state.id_map.get_mut(&*name_key) {
mod_i.to_be_instantiated += instances;
continue;
}
let library_path: PathBuf = match compute_plugin_path(named.as_ref()) {
Ok(x) => x,
Err(e) => {
nonexistent_files.push((named.clone(), e));
continue;
}
};
let res = (|| {
let header = lib_header_from_path(&library_path)?;
header.init_root_module::<PluginMod_Ref>()
})();
let root_module = match res {
Ok(x) => x,
Err(e) => {
library_errs.push((named.clone(), e));
continue;
}
};
loaded_libraries.push(name_key.clone());
state.id_map.insert(
name_key.into_::<RString>(),
PluginModAndIndices {
root_module,
to_be_instantiated: instances,
indices: Vec::with_capacity(instances as usize),
},
);
}
if !nonexistent_files.is_empty() {
for (name, e) in nonexistent_files {
eprintln!(
"Could not load librarr:\n\
\t{}\n\
because of this error:\n\
{}\n\
",
name, e
)
}
std::process::exit(1);
}
if !library_errs.is_empty() {
for (name, e) in library_errs {
eprintln!(
"Could not load librarr:\n\
\t{}\n\
because of this error:\n\
{}\n\
",
name, e
)
}
std::process::exit(1);
}
let mut plugin_new_errs = Vec::<(String, AppError)>::new();
for name in loaded_libraries {
let mod_i = state.id_map.get_mut(&*name).unwrap();
for _ in 0..mem::replace(&mut mod_i.to_be_instantiated, 0) {
let plugin_constructor = mod_i.root_module.new();
let new_id = PluginId {
named: name.clone().into(),
instance: mod_i.indices.len() as u64,
};
let plugin = match plugin_constructor(state.sender.clone(), new_id.clone()) {
ROk(x) => x,
RErr(e) => {
plugin_new_errs.push((name.clone(), e));
continue;
}
};
let new_index = plugins.len();
plugins.push(plugin);
mod_i.indices.push(new_index);
state.plugin_ids.push(new_id);
}
}
if !plugin_new_errs.is_empty() {
for (name, e) in plugin_new_errs {
eprintln!(
"Could not instantiate plugin:\n\
\t{}\n\
because of this error:\n\
{}\n\
",
name, e
)
}
std::process::exit(1);
}
let mut config_commands = config.commands.vec.into_iter();
let mut app = TheApplication { plugins, state };
while !app.is_finished() {
if let Some((which_plugin, command)) = config_commands.next() {
let command = command.get();
if let Err(e) = app.run_command(which_plugin.clone(), command.into()) {
eprintln!(
"Error while running command on:\n{:?}\nError:{}\nCommand:\n{:?}\n",
which_plugin, e, command
);
}
}
if let Err(e) = app.tick() {
eprintln!("Error in application loop:\n{}\n", e);
}
}
if app.is_finished() {
println!("timeout waiting for events");
}
Ok(())
}
|
mod with_trap_exit_flag;
use super::*;
#[test]
fn without_supported_flag_errors_badarg() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
unsupported_flag_atom(),
strategy::term(arc_process.clone()),
)
},
|(arc_process, flag, value)| {
prop_assert_badarg!(result(&arc_process, flag, value), "supported flags are error_handler, max_heap_size, message_queue_data, min_bin_vheap_size, min_heap_size, priority, save_calls, sensitive, and trap_exit");
Ok(())
},
);
}
fn unsupported_flag_atom() -> BoxedStrategy<Term> {
strategy::term::atom()
.prop_filter("Cannot be a supported flag name", |atom| {
let atom_atom: Atom = (*atom).try_into().unwrap();
match atom_atom.name() {
"trap_exit" => false,
_ => true,
}
})
.boxed()
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
fidl::endpoints::ServerEnd,
fidl_fuchsia_io::{DirectoryProxy, NodeMarker, DIRENT_TYPE_SERVICE, INO_UNKNOWN},
fuchsia_vfs_pseudo_fs as fvfs,
fuchsia_vfs_pseudo_fs::directory::entry::DirectoryEntry,
futures::{future::FusedFuture, task::Context, Future, Poll},
std::pin::Pin,
void::Void,
};
pub type RoutingFn = Box<dyn FnMut(u32, u32, String, ServerEnd<NodeMarker>) + Send>;
// TODO(ZX-3606): move this into the pseudo dir fs crate.
/// DirectoryBroker exists to hold a slot in a fuchsia_vfs_pseudo_fs directory and proxy open
/// requests. A DirectoryBroker holds a closure provided at creation time, and whenever an open
/// request for this directory entry is received the given ServerEnd is passed into the closure,
/// which will presumably make an open request somewhere else and forward on the ServerEnd.
pub struct DirectoryBroker {
/// The parameters are as follows:
/// flags: u32
/// mode: u32
/// relative_path: String
/// server_end: ServerEnd<NodeMarker>
route_open: RoutingFn,
entry_info: fvfs::directory::entry::EntryInfo,
}
impl DirectoryBroker {
/// new will create a new DirectoryBroker to forward directory open requests.
pub fn new(route_open: RoutingFn) -> Self {
return DirectoryBroker {
route_open,
entry_info: fvfs::directory::entry::EntryInfo::new(INO_UNKNOWN, DIRENT_TYPE_SERVICE),
};
}
pub fn from_directory_proxy(dir: DirectoryProxy) -> DirectoryBroker {
Self::new(Box::new(
move |flags: u32,
mode: u32,
relative_path: String,
server_end: ServerEnd<NodeMarker>| {
// If we want to open the 'dir' directory directly, then call clone.
// Otherwise, pass long the remaining 'relative_path' to the component
// hosting the out directory to resolve.
if !relative_path.is_empty() {
// TODO(fsamuel): Currently DirectoryEntry::open does not return
// a Result so we cannot propagate this error up. We probably
// want to change that.
let _ = dir.open(flags, mode, &relative_path, server_end);
} else {
let _ = dir.clone(flags, server_end);
}
},
))
}
}
impl DirectoryEntry for DirectoryBroker {
fn open(
&mut self,
flags: u32,
mode: u32,
path: &mut dyn Iterator<Item = &str>,
server_end: ServerEnd<NodeMarker>,
) {
let relative_path = path.collect::<Vec<&str>>().join("/");
(self.route_open)(flags, mode, relative_path, server_end);
}
fn entry_info(&self) -> fvfs::directory::entry::EntryInfo {
return fvfs::directory::entry::EntryInfo::new(
self.entry_info.inode(),
self.entry_info.type_(),
);
}
}
impl FusedFuture for DirectoryBroker {
fn is_terminated(&self) -> bool {
// TODO: ibobyr says:
// As this kind of service is special, it can forward `is_terminated` to the contained
// proxy, via the EventStreams, but for now "true" should work as well.
true
}
}
impl Future for DirectoryBroker {
type Output = Void;
fn poll(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Pending
}
}
|
use crate::{RegisterBits, Register};
use core::marker;
/// A 16-bit timer.
pub trait Timer16 : Sized {
/// The first compare register.
/// For example, OCR0A.
type CompareA: Register<T=u16>;
/// The second compare register.
/// For example, OCR0B.
type CompareB: Register<T=u16>;
/// The counter register.
///
/// For example, TCNT0.
type Counter: Register<T=u16>;
/// The first control register.
///
/// For example, TCCR0A.
type ControlA: Register<T=u8>;
/// The second control register.
///
/// For example, TCCR0B.
type ControlB: Register<T=u8>;
/// The third control register.
///
/// For example, TCCR0C.
type ControlC: Register<T=u8>;
/// The interrupt mask register.
///
/// For example, TIMSK0.
type InterruptMask: Register<T=u8>;
/// The interrupt flag register.
///
/// For example, TIFR0.
type InterruptFlag: Register<T=u8>;
const CS0: RegisterBits<Self::ControlB>;
const CS1: RegisterBits<Self::ControlB>;
const CS2: RegisterBits<Self::ControlB>;
const WGM0: RegisterBits<Self::ControlA>;
const WGM1: RegisterBits<Self::ControlA>;
const WGM2: RegisterBits<Self::ControlB>;
const WGM3: RegisterBits<Self::ControlB>;
const OCIEA: RegisterBits<Self::InterruptMask>;
fn setup() -> Timer16Setup<Self> { Timer16Setup::new() }
}
pub enum ClockSource {
None,
Prescale1,
Prescale8,
Prescale64,
Prescale256,
Prescale1024,
ExternalFalling,
ExternalRising,
}
impl ClockSource {
fn bits<T: Timer16>(&self) -> RegisterBits<T::ControlB> {
use self::ClockSource::*;
match *self {
None => RegisterBits::zero() | RegisterBits::zero() | RegisterBits::zero(),
Prescale1 => RegisterBits::zero() | RegisterBits::zero() | T::CS0,
Prescale8 => RegisterBits::zero() | T::CS1 | RegisterBits::zero(),
Prescale64 => RegisterBits::zero() | T::CS1 | T::CS0,
Prescale256 => T::CS2 | RegisterBits::zero() | RegisterBits::zero(),
Prescale1024 => T::CS2 | RegisterBits::zero() | T::CS0,
ExternalFalling => T::CS2 | T::CS1 | RegisterBits::zero(),
ExternalRising => T::CS2 | T::CS1 | T::CS0,
}
}
#[inline]
fn mask<T: Timer16>() -> RegisterBits<T::ControlB> {
!(T::CS2 | T::CS1 | T::CS0)
}
}
pub enum WaveformGenerationMode {
Normal,
PwmPhaseCorrect8Bit,
PwmPhaseCorrect9Bit,
PwmPhaseCorrect10Bit,
ClearOnTimerMatchOutputCompare,
FastPwm8Bit,
FastPwm9Bit,
FastPwm10Bit,
PwmPhaseAndFrequencyCorrectInputCapture,
PwmPhaseAndFrequencyCorrectOutputCompare,
PwmPhaseCorrectInputCapture,
PwmPhaseCorrectOutputCompare,
ClearOnTimerMatchInputCapture,
FastPwmInputCapture,
FastPwmOutputCompare,
}
impl WaveformGenerationMode {
/// Returns bits for TCCR1A, TCCR1B
#[inline]
fn bits<T: Timer16>(&self) -> (RegisterBits<T::ControlA>, RegisterBits<T::ControlB>) {
use self::WaveformGenerationMode::*;
use RegisterBits as B;
// It makes more sense to return bytes (A,B), but the manual
// lists the table as (B,A). We match the manual here for
// inspection purposes and flip the values for sanity
// purposes.
let (b, a) = match *self {
Normal => (B::zero() | B::zero(), B::zero() | B::zero()),
PwmPhaseCorrect8Bit => (B::zero() | B::zero(), B::zero() | T::WGM0),
PwmPhaseCorrect9Bit => (B::zero() | B::zero(), T::WGM1 | B::zero()),
PwmPhaseCorrect10Bit => (B::zero() | B::zero(), T::WGM1 | T::WGM0),
ClearOnTimerMatchOutputCompare => (B::zero() | T::WGM2, B::zero() | B::zero()),
FastPwm8Bit => (B::zero() | T::WGM2, B::zero() | T::WGM0),
FastPwm9Bit => (B::zero() | T::WGM2, T::WGM1 | B::zero()),
FastPwm10Bit => (B::zero() | T::WGM2, T::WGM1 | T::WGM0),
PwmPhaseAndFrequencyCorrectInputCapture => (T::WGM3 | B::zero(), B::zero() | B::zero()),
PwmPhaseAndFrequencyCorrectOutputCompare => (T::WGM3 | B::zero(), B::zero() | T::WGM0),
PwmPhaseCorrectInputCapture => (T::WGM3 | B::zero(), T::WGM1 | B::zero()),
PwmPhaseCorrectOutputCompare => (T::WGM3 | B::zero(), T::WGM1 | T::WGM0),
ClearOnTimerMatchInputCapture => (T::WGM3 | T::WGM2, B::zero() | B::zero()),
// Reserved => (T::WGM3 | T::WGM2, B::zero() | T::WGM0),
FastPwmInputCapture => (T::WGM3 | T::WGM2, T::WGM1 | B::zero()),
FastPwmOutputCompare => (T::WGM3 | T::WGM2, T::WGM1 | T::WGM0),
};
(a, b)
}
#[inline]
fn mask<T: Timer16>() -> (RegisterBits<T::ControlA>, RegisterBits<T::ControlB>) {
(!(T::WGM0 | T::WGM1), !(T::WGM2 | T::WGM3))
}
}
pub struct Timer16Setup<T: Timer16> {
a: RegisterBits<T::ControlA>,
b: RegisterBits<T::ControlB>,
c: RegisterBits<T::ControlC>,
output_compare_1: Option<u16>,
_phantom: marker::PhantomData<T>,
}
impl<T: Timer16> Timer16Setup<T> {
#[inline]
pub fn new() -> Self {
Timer16Setup {
a: RegisterBits::zero(),
b: RegisterBits::zero(),
c: RegisterBits::zero(),
output_compare_1: None,
_phantom: marker::PhantomData,
}
}
#[inline]
pub fn clock_source(mut self, source: ClockSource) -> Self {
self.b &= ClockSource::mask::<T>();
self.b |= source.bits::<T>();
self
}
#[inline]
pub fn waveform_generation_mode(mut self, mode: WaveformGenerationMode) -> Self {
let (a, b) = WaveformGenerationMode::mask::<T>();
self.a &= a;
self.b &= b;
let (a, b) = mode.bits::<T>();
self.a |= a;
self.b |= b;
self
}
#[inline]
pub fn output_compare_1(mut self, value: Option<u16>) -> Self {
self.output_compare_1 = value;
self
}
#[inline]
pub fn configure(self) {
T::ControlA::write(self.a);
T::ControlB::write(self.b);
T::ControlC::write(self.c);
// Reset counter to zero
T::Counter::write(0u16);
if let Some(v) = self.output_compare_1 {
// Set the match
T::CompareA::write(v);
// Enable compare interrupt
T::InterruptMask::set(T::OCIEA);
}
}
}
|
#[cfg(feature = "bindings")]
mod error {
use cbindgen::Error as BindgenError;
#[cfg(feature = "ctests")]
use cc::Error as CcError;
use std::{env::VarError, error, fmt, io};
#[allow(unused)]
#[derive(Debug)]
pub enum Error {
Bindgen(BindgenError),
#[cfg(feature = "ctests")]
Cc(CcError),
Io(io::Error),
Env(VarError),
Unspecified,
}
impl error::Error for Error {
#[inline]
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match *self {
Error::Bindgen(ref e) => Some(e),
// Error::Cc(ref e) => Some(e),
Error::Io(ref e) => Some(e),
Error::Env(ref e) => Some(e),
_ => None,
}
}
#[inline]
fn description(&self) -> &str {
match *self {
Error::Bindgen(ref e) => e.description(),
// Error::Cc(ref e) => e.description(),
Error::Io(ref e) => e.description(),
Error::Env(ref e) => e.description(),
#[cfg(feature = "ctests")]
Error::Cc(_) => "A c compiler error occurred",
Error::Unspecified => "An unspecified error occurred",
}
}
}
impl fmt::Display for Error {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Error::Bindgen(ref e) => fmt::Display::fmt(e, f),
#[cfg(feature = "ctests")]
Error::Cc(_) => f.write_str(error::Error::description(self)),
Error::Unspecified => f.write_str(error::Error::description(self)),
Error::Io(ref e) => fmt::Display::fmt(e, f),
Error::Env(ref e) => fmt::Display::fmt(e, f),
}
}
}
#[cfg(feature = "ctests")]
impl From<CcError> for Error {
#[inline]
fn from(e: CcError) -> Self {
Error::Cc(e)
}
}
impl From<io::Error> for Error {
#[inline]
fn from(e: io::Error) -> Self {
Error::Io(e)
}
}
impl From<BindgenError> for Error {
#[inline]
fn from(e: BindgenError) -> Self {
Error::Bindgen(e)
}
}
impl From<VarError> for Error {
#[inline]
fn from(e: VarError) -> Self {
Error::Env(e)
}
}
}
#[cfg(feature = "bindings")]
fn main() -> Result<(), error::Error> {
use cbindgen::{self, Config, Language};
use std::{
env::{self, VarError},
path::PathBuf,
};
let crate_dir = env::var("CARGO_MANIFEST_DIR")?;
let mut bindings = cbindgen::generate(crate_dir)?;
push_stdio(&mut bindings.config);
let mut header_path = target_dir()?;
header_path.push("include/bvh_anim/bvh_anim.h");
bindings.write_to_file(header_path);
build_and_run_ctests()?;
#[inline]
fn target_dir() -> Result<PathBuf, VarError> {
env::var("CARGO_TARGET_DIR")
.map(PathBuf::from)
.or_else(|_| {
env::var("CARGO_MANIFEST_DIR")
.map(PathBuf::from)
.map(|p| p.join("target"))
})
}
#[inline]
fn push_stdio(config: &mut Config) {
match config.language {
Language::Cxx => config.sys_includes.push("cstdio".to_string()),
Language::C => config.sys_includes.push("stdio.h".to_string()),
}
}
#[cfg(feature = "ctests")]
fn build_and_run_ctests() -> Result<(), error::Error> {
use cc::Build;
use std::{fs, io};
let crate_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?);
let ctests_dir = crate_dir.join("ctests");
if !ctests_dir.exists() {
println!("Skipping ctests");
return Err(error::Error::Unspecified);
}
let entries = fs::read_dir(ctests_dir)?;
let mut include_dir = target_dir()?;
include_dir.push("include");
let mut out_dir = PathBuf::from(env::var("OUT_DIR")?);
out_dir.push("ctests");
panic!("{:?}", out_dir);
for entry in entries {
let entry = entry?;
let entry_path = entry.path();
match entry_path.extension() {
Some(ext) if ext == "c" || ext == "cpp" => (),
_ => continue,
}
let file_name = entry_path.file_name().unwrap();
Build::new()
.file(&entry_path)
.include(&include_dir)
.out_dir(&out_dir)
.try_compile(file_name.to_str().unwrap())?;
}
Ok(())
}
#[cfg(not(feature = "ctests"))]
fn build_and_run_ctests() -> Result<(), error::Error> {
Ok(())
}
Ok(())
}
#[cfg(not(feature = "bindings"))]
fn main() {}
|
if k == 0 {
return head
}
// 3 components:
// - head: remaining list
// - k_group: current accumulating group, targeting k elements
// - prev_tail: tail of already processed part of list
//
// Basic idea:
// Take nodes from head, prepend the node to k_group, so after doing it k times,
// k_group will be the reversed list of size k taken from head.
// Then we append k_group to prev_tail, and search for the new prev_tail.
// If there is not enough nodes to form a k_group of size k, that means the current k_group is the last group
// and its size is smaller than k. In this case, we reverse the k_group again to revert the change, and append
// it to prev_tail, then return.
let mut p_head: Option<Box<ListNode>> = None;
let mut prev_tail = &mut p_head;
let mut k_group: Option<Box<ListNode>> = None;
loop {
for k_group_len in 0..k {
if let Some(mut node) = head {
head = node.next.take();
node.next = k_group;
k_group = Some(node);
} else {
let mut reverted_k_group: Option<Box<ListNode>> = None;
for _ in 0..k_group_len {
let mut node = k_group.unwrap();
k_group = node.next.take();
node.next = reverted_k_group;
reverted_k_group = Some(node);
}
*prev_tail = reverted_k_group;
return p_head
}
}
*prev_tail = k_group;
for _ in 0..k {
prev_tail = &mut prev_tail.as_mut().unwrap().next;
}
k_group = None;
} |
use super::{Gesture, MouseButtonCode, VirtualKeyCode};
use num_traits::FromPrimitive;
use serde::{ser::SerializeStruct, Deserialize, Deserializer, Serialize};
use smart_default::SmartDefault;
use std::{convert::TryFrom, fmt};
/// Describes the current event type. Users can make most events freely, though
/// special care should be taken that `Alarm`'s .0 field is less than
/// `ALARM_MAX`, and the same for the `OtherEvent`'s usize wrappers. To make
/// sure some event has been validly created, `is_valid` has been provided.
///
/// **Note: only serde_json serialization and deserialization is supported for
/// EventType.** Yaml, and other text / WYSIWYG data formats should be fine, but
/// Bincode and other binary sequences are unlikely to succesfully serialize
/// this. This is due to our use of serde's `flatten`, which runs afoul of this issue: https://github.com/servo/bincode/issues/245
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub enum EventType {
#[default]
Create,
Destroy,
Cleanup,
Step(Stage),
Alarm(usize),
Draw(DrawEvent),
Collision,
Mouse(MouseEvent),
KeyDown(VirtualKeyCode),
KeyPress(VirtualKeyCode),
KeyRelease(VirtualKeyCode),
Gesture(GestureEvent),
Other(OtherEvent),
Async(AsyncEvent),
}
impl EventType {
/// The maximum number of alarms which are available in the Gms2 IDE.
pub const ALARM_MAX: usize = 11;
/// Gets the filename for a given event with its requisite base. We return
/// in this format to reduce allocating a string per call, as this
/// filename will likely become allocated on some path in the future.
///
/// ```rs
/// let (base_name, numeric_id) = EventType::Create;
/// assert_eq!(base_name, "Create_");
/// assert_eq!(numeric_id, 0);
/// ```
pub fn filename(&self) -> (&'static str, usize) {
let name = match self {
EventType::Create => "Create",
EventType::Destroy => "Destroy",
EventType::Cleanup => "CleanUp",
EventType::Step(_) => "Step",
EventType::Alarm(_) => "Alarm",
EventType::Draw(_) => "Draw",
EventType::Collision => "Collision",
EventType::Mouse(_) => "Mouse",
EventType::KeyDown(_) => "Keyboard",
EventType::KeyPress(_) => "KeyPress",
EventType::KeyRelease(_) => "KeyRelease",
EventType::Gesture(_) => "Gesture",
EventType::Other(_) => "Other",
EventType::Async(_) => "Other",
};
let number = EventIntermediary::from(*self).event_num;
(name, number)
}
/// Returns the filename like it will appear in a file.
pub fn filename_simple(&self) -> String {
let (word, number) = self.filename();
format!("{}_{}", word, number)
}
/// Parses a given filename and number into an `EventType`, if valid.
pub fn parse_filename(
value: &str,
event_num: usize,
) -> Result<EventType, EventTypeConvertErrors> {
let event_type = match value {
"Create" => 0,
"Destroy" => 1,
"CleanUp" => 12,
"Step" => 3,
"Alarm" => 2,
"Draw" => 8,
"Collision" => 4,
"Mouse" => 6,
"Keyboard" => 5,
"KeyPress" => 9,
"KeyRelease" => 10,
"Gesture" => 13,
"Other" => 7,
_ => return Err(EventTypeConvertErrors::CannotFindEventType),
};
EventType::try_from(EventIntermediary {
event_type,
event_num,
})
}
/// A simple way to parse a value. It does a split on the string, which
/// basically means it needs to follow the pattern `Create_0` and
/// similar.
pub fn parse_filename_simple(value: &str) -> Result<EventType, EventTypeConvertErrors> {
let mut iter = value.split('_');
let name = iter.next().unwrap();
let value: usize = iter.next().unwrap().parse().unwrap_or_default();
if iter.next().is_some() {
return Err(EventTypeConvertErrors::BadString);
}
EventType::parse_filename(name, value)
}
pub fn is_valid(value: EventType) -> bool {
EventType::try_from(EventIntermediary::from(value)).is_ok()
}
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub enum Stage {
#[default]
Main,
Begin,
End,
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub enum DrawEvent {
#[default]
Draw(Stage),
DrawGui(Stage),
PreDraw,
PostDraw,
WindowResize,
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub enum MouseEvent {
#[default]
Down(MouseButton),
Pressed(MouseButton),
Released(MouseButton),
NoInput,
MouseEnter,
MouseExit,
MouseWheelUp,
MouseWheelDown,
}
impl MouseEvent {
pub const DOWN_OFFSET: usize = 0;
pub const PRESSED_OFFSET: usize = 4;
pub const RELEASED_OFFSET: usize = 7;
/// Tries to convert an `event_num` to a MouseEvent. This is for internal
/// usage, but is made public to attempt to be a 100% pub facing crate.
pub fn convert_to_input(mut value: usize) -> Option<MouseEvent> {
let mut local_input = true;
if value >= MouseButton::GLOBAL_OFFSET {
local_input = false;
value -= MouseButton::GLOBAL_OFFSET;
}
let output = match value {
0..=2 => MouseEvent::Down(MouseButton {
mb_code: num_traits::FromPrimitive::from_usize(value - Self::DOWN_OFFSET).unwrap(),
local: local_input,
}),
3 => {
if local_input {
MouseEvent::NoInput
} else {
return None;
}
}
4..=6 => MouseEvent::Pressed(MouseButton {
mb_code: num_traits::FromPrimitive::from_usize(value - Self::PRESSED_OFFSET)
.unwrap(),
local: local_input,
}),
_ => {
if let Some(mouse_button_code) =
FromPrimitive::from_usize(value - Self::RELEASED_OFFSET)
{
MouseEvent::Released(MouseButton {
mb_code: mouse_button_code,
local: local_input,
})
} else {
return None;
}
}
};
Some(output)
}
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub struct MouseButton {
/// The mouse button code used for this input.
pub mb_code: MouseButtonCode,
/// Whether the input is a "global" input, or a "local" input. In the Gms2
/// IDE, these are separated into different categories. "Local" events
/// only file when the object itself is clicked on, while "global" can
/// be fire whenever the input is held at all.
pub local: bool,
}
impl MouseButton {
/// The offset for the `event_num` if this mouse button is a global. We use
/// this number internally for serialization/deserialization.
pub const GLOBAL_OFFSET: usize = 50;
/// Calculates the `event_num` offset for this `MouseButton`, largely for
/// internal use in serialization and deserialization. We make this
/// public as this library attempts to be 100% public.
pub fn event_offset(&self) -> usize {
self.mb_code as usize + if self.local { 0 } else { Self::GLOBAL_OFFSET }
}
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub struct GestureEvent {
/// The type of gesture used.
pub gesture: Gesture,
/// Whether the input is a "global" input, or a "local" input. In the Gms2
/// IDE, these are separated into different categories. "Local" events
/// only file when the object itself is clicked on, while "global" can
/// be fire whenever the input is held at all.
pub local: bool,
}
impl GestureEvent {
/// The offset for the `event_num` if this gesture is a global. We use this
/// number internally for serialization/deserialization.
pub const GLOBAL_OFFSET: usize = 64;
/// Converts an `event_num`, if possible, into a Gesture.
pub fn convert_to_input(mut value: usize) -> Option<GestureEvent> {
let mut local = true;
if value & Self::GLOBAL_OFFSET == Self::GLOBAL_OFFSET {
value &= !Self::GLOBAL_OFFSET;
local = false;
}
FromPrimitive::from_usize(value).map(|gesture| GestureEvent { gesture, local })
}
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub enum OtherEvent {
#[default]
OutsideRoom,
IntersectBoundary,
OutsideView(usize),
IntersectView(usize),
GameStart,
GameEnd,
RoomStart,
RoomEnd,
AnimationEnd,
AnimationUpdate,
AnimationEvent,
PathEnded,
UserEvent(usize),
BroadcastMessage,
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Copy, Clone)]
pub enum AsyncEvent {
#[default]
AudioPlayback,
AudioRecording,
Cloud, // like FF7
Dialog,
Http,
InAppPurchase,
ImageLoaded,
Networking,
PushNotification,
SaveLoad,
Social,
Steam,
System,
}
impl OtherEvent {
pub const OUTSIDE_VIEW_BASE: usize = 40;
pub const OUTSIDE_VIEW_MAX: usize = 7;
pub const INTERSECT_VIEW_BASE: usize = 50;
pub const USER_EVENT_BASE: usize = 10;
pub const USER_EVENT_MAX: usize = 15;
}
/// A simpler, less idiomatic and less understandable, but more direct,
/// representation of Gms2 event types and numbers. We use this internally in
/// the serde of the higher level `EventType` enum, which is also given.
///
/// This struct is made public largely so non-Rust applications downstream can
/// have an easier interface to work with. Rust applications are encouraged to
/// stick with the more idiomatic and user-friendly `EventType`, which is far
/// more type safe while being equally performant.
#[derive(
Debug, PartialEq, Eq, Ord, PartialOrd, Hash, SmartDefault, Serialize, Deserialize, Copy, Clone,
)]
pub struct EventIntermediary {
event_type: usize,
event_num: usize,
}
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Copy, Clone, Serialize, Deserialize)]
pub enum EventTypeConvertErrors {
CannotFindEventNumber(usize),
CannotFindEventType,
BadString,
}
impl std::error::Error for EventTypeConvertErrors {}
impl fmt::Display for EventTypeConvertErrors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
EventTypeConvertErrors::CannotFindEventNumber(event_type) => {
write!(f, "invalid event_number for event type {}", event_type)
}
EventTypeConvertErrors::CannotFindEventType => write!(f, "invalid event_type"),
EventTypeConvertErrors::BadString => write!(f, "string didn't follow pattern x_y"),
}
}
}
impl From<EventType> for EventIntermediary {
fn from(o: EventType) -> Self {
match o {
EventType::Create => EventIntermediary {
event_num: 0,
event_type: 0,
},
EventType::Destroy => EventIntermediary {
event_type: 1,
event_num: 0,
},
EventType::Cleanup => EventIntermediary {
event_type: 12,
event_num: 0,
},
EventType::Step(stage) => match stage {
Stage::Main => EventIntermediary {
event_type: 3,
event_num: 0,
},
Stage::Begin => EventIntermediary {
event_type: 3,
event_num: 1,
},
Stage::End => EventIntermediary {
event_type: 3,
event_num: 2,
},
},
EventType::Alarm(alarm_number) => EventIntermediary {
event_type: 2,
event_num: alarm_number,
},
EventType::Draw(draw_event) => match draw_event {
DrawEvent::Draw(stage) => match stage {
Stage::Main => EventIntermediary {
event_type: 8,
event_num: 0,
},
Stage::Begin => EventIntermediary {
event_type: 8,
event_num: 72,
},
Stage::End => EventIntermediary {
event_type: 8,
event_num: 73,
},
},
DrawEvent::DrawGui(stage) => match stage {
Stage::Main => EventIntermediary {
event_type: 8,
event_num: 64,
},
Stage::Begin => EventIntermediary {
event_type: 8,
event_num: 74,
},
Stage::End => EventIntermediary {
event_type: 8,
event_num: 75,
},
},
DrawEvent::PreDraw => EventIntermediary {
event_type: 8,
event_num: 76,
},
DrawEvent::PostDraw => EventIntermediary {
event_type: 8,
event_num: 77,
},
DrawEvent::WindowResize => EventIntermediary {
event_type: 8,
event_num: 65,
},
},
EventType::Collision => EventIntermediary {
event_type: 4,
event_num: 0,
},
EventType::Mouse(mouse_event) => EventIntermediary {
event_type: 6,
event_num: match mouse_event {
MouseEvent::Down(mb) => MouseEvent::DOWN_OFFSET + mb.event_offset(),
MouseEvent::Pressed(mb) => MouseEvent::PRESSED_OFFSET + mb.event_offset(),
MouseEvent::Released(mb) => MouseEvent::RELEASED_OFFSET + mb.event_offset(),
MouseEvent::NoInput => 3,
MouseEvent::MouseEnter => 10,
MouseEvent::MouseExit => 11,
MouseEvent::MouseWheelUp => 60,
MouseEvent::MouseWheelDown => 61,
},
},
EventType::KeyDown(vk) => EventIntermediary {
event_type: 5,
event_num: vk as usize,
},
EventType::KeyPress(vk) => EventIntermediary {
event_type: 9,
event_num: vk as usize,
},
EventType::KeyRelease(vk) => EventIntermediary {
event_type: 10,
event_num: vk as usize,
},
EventType::Gesture(gv) => EventIntermediary {
event_type: 13,
event_num: gv.gesture as usize
+ if gv.local {
0
} else {
GestureEvent::GLOBAL_OFFSET
},
},
EventType::Other(other_event) => EventIntermediary {
event_type: 7,
event_num: match other_event {
OtherEvent::OutsideRoom => 0,
OtherEvent::IntersectBoundary => 1,
OtherEvent::OutsideView(val) => OtherEvent::OUTSIDE_VIEW_BASE + val,
OtherEvent::IntersectView(val) => OtherEvent::INTERSECT_VIEW_BASE + val,
OtherEvent::GameStart => 2,
OtherEvent::GameEnd => 3,
OtherEvent::RoomStart => 4,
OtherEvent::RoomEnd => 5,
OtherEvent::AnimationEnd => 7,
OtherEvent::AnimationUpdate => 58,
OtherEvent::AnimationEvent => 59,
OtherEvent::PathEnded => 8,
OtherEvent::UserEvent(ev_num) => OtherEvent::USER_EVENT_BASE + ev_num,
OtherEvent::BroadcastMessage => 76,
},
},
EventType::Async(async_event) => EventIntermediary {
event_type: 7,
event_num: match async_event {
AsyncEvent::AudioPlayback => 74,
AsyncEvent::AudioRecording => 73,
AsyncEvent::Cloud => 67,
AsyncEvent::Dialog => 63,
AsyncEvent::Http => 62,
AsyncEvent::InAppPurchase => 66,
AsyncEvent::ImageLoaded => 60,
AsyncEvent::Networking => 68,
AsyncEvent::PushNotification => 71,
AsyncEvent::SaveLoad => 72,
AsyncEvent::Social => 70,
AsyncEvent::Steam => 69, // nice
AsyncEvent::System => 75,
},
},
}
}
}
impl TryFrom<EventIntermediary> for EventType {
type Error = EventTypeConvertErrors;
fn try_from(o: EventIntermediary) -> Result<Self, Self::Error> {
const USER_EVENT_MAX_ABS: usize = OtherEvent::USER_EVENT_BASE + OtherEvent::USER_EVENT_MAX;
const OUTSIDE_VIEW_MAX: usize =
OtherEvent::OUTSIDE_VIEW_BASE + OtherEvent::OUTSIDE_VIEW_MAX;
const OUTSIDE_INTERSECT_MAX: usize =
OtherEvent::INTERSECT_VIEW_BASE + OtherEvent::OUTSIDE_VIEW_MAX;
let output = match o.event_type {
// lifetime events
0 => {
if o.event_num == 0 {
EventType::Create
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(0));
}
}
1 => {
if o.event_num == 0 {
EventType::Destroy
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(1));
}
}
12 => {
if o.event_num == 0 {
EventType::Cleanup
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(12));
}
}
// step
3 => match o.event_num {
0 => EventType::Step(Stage::Main),
1 => EventType::Step(Stage::Begin),
2 => EventType::Step(Stage::End),
_ => return Err(EventTypeConvertErrors::CannotFindEventNumber(3)),
},
2 => {
if o.event_num <= EventType::ALARM_MAX {
EventType::Alarm(o.event_num)
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(2));
}
}
8 => match o.event_num {
0 => EventType::Draw(DrawEvent::Draw(Stage::Main)),
72 => EventType::Draw(DrawEvent::Draw(Stage::Begin)),
73 => EventType::Draw(DrawEvent::Draw(Stage::End)),
64 => EventType::Draw(DrawEvent::DrawGui(Stage::Main)),
74 => EventType::Draw(DrawEvent::DrawGui(Stage::Begin)),
75 => EventType::Draw(DrawEvent::DrawGui(Stage::End)),
76 => EventType::Draw(DrawEvent::PreDraw),
77 => EventType::Draw(DrawEvent::PostDraw),
65 => EventType::Draw(DrawEvent::WindowResize),
_ => return Err(EventTypeConvertErrors::CannotFindEventNumber(8)),
},
4 => {
if o.event_num == 0 {
EventType::Collision
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(4));
}
}
6 => {
if let Some(mouse_event) = MouseEvent::convert_to_input(o.event_num) {
EventType::Mouse(mouse_event)
} else {
match o.event_num {
10 => EventType::Mouse(MouseEvent::MouseEnter),
11 => EventType::Mouse(MouseEvent::MouseExit),
60 => EventType::Mouse(MouseEvent::MouseWheelUp),
61 => EventType::Mouse(MouseEvent::MouseWheelDown),
_ => return Err(EventTypeConvertErrors::CannotFindEventNumber(6)),
}
}
}
5 => {
if let Some(vk) = FromPrimitive::from_usize(o.event_num) {
EventType::KeyDown(vk)
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(5));
}
}
9 => {
if let Some(vk) = FromPrimitive::from_usize(o.event_num) {
EventType::KeyPress(vk)
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(9));
}
}
10 => {
if let Some(vk) = FromPrimitive::from_usize(o.event_num) {
EventType::KeyRelease(vk)
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(10));
}
}
13 => {
if let Some(event) = GestureEvent::convert_to_input(o.event_num) {
EventType::Gesture(event)
} else {
return Err(EventTypeConvertErrors::CannotFindEventNumber(13));
}
}
7 => match o.event_num {
// OTHER EVENTS
0 => EventType::Other(OtherEvent::OutsideRoom),
1 => EventType::Other(OtherEvent::IntersectBoundary),
val @ OtherEvent::OUTSIDE_VIEW_BASE..=OUTSIDE_VIEW_MAX => {
EventType::Other(OtherEvent::OutsideView(val - OtherEvent::OUTSIDE_VIEW_BASE))
}
val @ OtherEvent::INTERSECT_VIEW_BASE..=OUTSIDE_INTERSECT_MAX => EventType::Other(
OtherEvent::IntersectView(val - OtherEvent::INTERSECT_VIEW_BASE),
),
2 => EventType::Other(OtherEvent::GameStart),
3 => EventType::Other(OtherEvent::GameEnd),
4 => EventType::Other(OtherEvent::RoomStart),
5 => EventType::Other(OtherEvent::RoomEnd),
7 => EventType::Other(OtherEvent::AnimationEnd),
58 => EventType::Other(OtherEvent::AnimationUpdate),
59 => EventType::Other(OtherEvent::AnimationEvent),
8 => EventType::Other(OtherEvent::PathEnded),
val @ OtherEvent::USER_EVENT_BASE..=USER_EVENT_MAX_ABS => {
EventType::Other(OtherEvent::UserEvent(val - OtherEvent::USER_EVENT_BASE))
}
76 => EventType::Other(OtherEvent::BroadcastMessage),
// ASYNC EVENTS
74 => EventType::Async(AsyncEvent::AudioPlayback),
73 => EventType::Async(AsyncEvent::AudioRecording),
67 => EventType::Async(AsyncEvent::Cloud),
63 => EventType::Async(AsyncEvent::Dialog),
62 => EventType::Async(AsyncEvent::Http),
66 => EventType::Async(AsyncEvent::InAppPurchase),
60 => EventType::Async(AsyncEvent::ImageLoaded),
68 => EventType::Async(AsyncEvent::Networking),
71 => EventType::Async(AsyncEvent::PushNotification),
72 => EventType::Async(AsyncEvent::SaveLoad),
70 => EventType::Async(AsyncEvent::Social),
69 => EventType::Async(AsyncEvent::Steam),
75 => EventType::Async(AsyncEvent::System),
_ => return Err(EventTypeConvertErrors::CannotFindEventNumber(7)),
},
_ => return Err(EventTypeConvertErrors::CannotFindEventType),
};
Ok(output)
}
}
#[derive(Debug, Serialize, Deserialize)]
enum Field {
#[serde(rename = "eventNum")]
Number,
#[serde(rename = "eventType")]
Type,
}
use serde::de::{Error, MapAccess, Visitor};
struct DeserializerVisitor;
impl<'de> Visitor<'de> for DeserializerVisitor {
type Value = EventType;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(r#"a value of "eventNum""#)
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut event_number = None;
let mut event_type = None;
while let Some(key) = map.next_key()? {
match key {
Field::Number => {
if event_number.is_some() {
return Err(Error::duplicate_field("eventNum"));
}
event_number = Some(map.next_value()?);
}
Field::Type => {
if event_type.is_some() {
return Err(Error::duplicate_field("eventType"));
}
event_type = Some(map.next_value()?);
}
}
}
let event_intermediary = EventIntermediary {
event_type: event_type.ok_or_else(|| Error::missing_field("eventType"))?,
event_num: event_number.ok_or_else(|| Error::missing_field("eventNum"))?,
};
EventType::try_from(event_intermediary).map_err(Error::custom)
}
}
impl Serialize for EventType {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let val: EventIntermediary = (*self).into();
let mut inter = serializer.serialize_struct("EventIntermediary", 2)?;
inter.serialize_field("eventNum", &val.event_num)?;
inter.serialize_field("eventType", &val.event_type)?;
inter.end()
}
}
impl<'de> Deserialize<'de> for EventType {
fn deserialize<D>(deserializer: D) -> Result<EventType, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_struct("EventType", &FIELD_NAMES, DeserializerVisitor)
}
}
const FIELD_NAMES: [&str; 2] = ["eventNum", "eventType"];
impl fmt::Display for EventType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
EventType::Create => write!(f, "Create"),
EventType::Destroy => write!(f, "Destroy"),
EventType::Cleanup => write!(f, "CleanUp"),
EventType::Step(stage) => write!(f, "{}", stage.display_with_before("Step")),
EventType::Alarm(number) => write!(f, "Alarm {}", number),
EventType::Draw(draw_stage) => write!(f, "{}", draw_stage),
EventType::Collision => write!(f, "Collision"),
EventType::Mouse(v) => write!(f, "{}", v),
EventType::KeyDown(key) => write!(f, "Key Down - {}", key.as_ref()),
EventType::KeyPress(key) => write!(f, "Key Press - {}", key.as_ref()),
EventType::KeyRelease(key) => write!(f, "Key Up - {}", key.as_ref()),
EventType::Gesture(gesture) => write!(f, "{}", gesture),
EventType::Other(other) => write!(f, "{}", other),
EventType::Async(async_ev) => write!(f, "{}", async_ev),
}
}
}
impl Stage {
pub fn display_with_before(&self, other: &str) -> String {
match self {
Stage::Main => other.to_string(),
Stage::Begin => format!("Begin {}", other),
Stage::End => format!("End {}", other),
}
}
pub fn display_with_after(&self, other: &str) -> String {
match self {
Stage::Main => other.to_string(),
Stage::Begin => format!("{} Begin", other),
Stage::End => format!("{} End", other),
}
}
}
impl fmt::Display for DrawEvent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DrawEvent::Draw(stage) => write!(f, "{}", stage.display_with_after("Draw")),
DrawEvent::DrawGui(stage) => write!(f, "{}", stage.display_with_after("Draw GUI")),
DrawEvent::PreDraw => write!(f, "Pre-Draw"),
DrawEvent::PostDraw => write!(f, "Post-Draw"),
DrawEvent::WindowResize => write!(f, "Window Resize"),
}
}
}
impl fmt::Display for MouseEvent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
MouseEvent::Down(mb) => write!(f, "{} Down", mb),
MouseEvent::Pressed(mb) => write!(f, "{} Pressed", mb),
MouseEvent::Released(mb) => write!(f, "{} Released", mb),
MouseEvent::NoInput => write!(f, "No Mouse Input"),
MouseEvent::MouseEnter => write!(f, "Mouse Enter"),
MouseEvent::MouseExit => write!(f, "Mouse Leave"),
MouseEvent::MouseWheelUp => write!(f, "Mouse Wheel Up"),
MouseEvent::MouseWheelDown => write!(f, "Mouse Wheel Down"),
}
}
}
impl fmt::Display for MouseButton {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let word = match self.mb_code {
MouseButtonCode::Left => "Left",
MouseButtonCode::Right => "Right",
MouseButtonCode::Middle => "Middle",
};
if self.local {
write!(f, "{}", word)
} else {
write!(f, "Global {}", word)
}
}
}
impl fmt::Display for GestureEvent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let word = match self.gesture {
Gesture::Tap => "Tap",
Gesture::DoubleTap => "Double Tap",
Gesture::DragStart => "Drag Start",
Gesture::Dragging => "Dragging",
Gesture::DragEnd => "Drag End",
Gesture::Flick => "Flick",
Gesture::PinchStart => "Pinch Start",
Gesture::PinchIn => "Pinch In",
Gesture::PinchOut => "Pinch Out",
Gesture::PinchEnd => "Pinch End",
Gesture::RotateStart => "Rotate Start",
Gesture::Rotating => "Rotating",
Gesture::RotateEnd => "Rotate End",
};
if self.local {
write!(f, "{}", word)
} else {
write!(f, "Global {}", word)
}
}
}
impl fmt::Display for OtherEvent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
OtherEvent::OutsideRoom => write!(f, "Outside Room"),
OtherEvent::IntersectBoundary => write!(f, "Intersect Boundary"),
OtherEvent::OutsideView(view) => write!(f, "Outside View {}", view),
OtherEvent::IntersectView(view) => write!(f, "Intersect View {} Boundary", view),
OtherEvent::GameStart => write!(f, "Game Start"),
OtherEvent::GameEnd => write!(f, "Game End"),
OtherEvent::RoomStart => write!(f, "Room Start"),
OtherEvent::RoomEnd => write!(f, "Room End"),
OtherEvent::AnimationEnd => write!(f, "Animation End"),
OtherEvent::AnimationUpdate => write!(f, "Animation Update"),
OtherEvent::AnimationEvent => write!(f, "Animation Event"),
OtherEvent::PathEnded => write!(f, "Path Ended"),
OtherEvent::UserEvent(event) => write!(f, "User Event {}", event),
OtherEvent::BroadcastMessage => write!(f, "Broadcast Message"),
}
}
}
impl fmt::Display for AsyncEvent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let word = match self {
AsyncEvent::AudioPlayback => "Audio Playback",
AsyncEvent::AudioRecording => "Audio Recording",
AsyncEvent::Cloud => "Cloud",
AsyncEvent::Dialog => "Dialog",
AsyncEvent::Http => "Http",
AsyncEvent::InAppPurchase => "In-App Purchase",
AsyncEvent::ImageLoaded => "Image Loaded",
AsyncEvent::Networking => "Networking",
AsyncEvent::PushNotification => "Push Notification",
AsyncEvent::SaveLoad => "Save/Load",
AsyncEvent::Social => "Social",
AsyncEvent::Steam => "Steam",
AsyncEvent::System => "System",
};
write!(f, "Async - {}", word)
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
struct Wrapper {
pub rev_padding: usize,
#[serde(flatten)]
pub event_type: EventType,
pub padding: usize,
}
#[test]
fn basic_serialize() {
let value = Wrapper {
rev_padding: 10,
event_type: EventType::Create,
padding: 10,
};
let as_serde: String = serde_json::to_string(&value).unwrap();
assert_eq!(
as_serde,
r#"{"rev_padding":10,"eventNum":0,"eventType":0,"padding":10}"#
)
}
#[test]
fn basic_deserialize() {
let wrapper_str =
r#"{"rev_padding":10,"eventNum":0,"eventType":0,"ioop": "oop","padding":10}"#;
let wrapper: Wrapper = serde_json::from_str(wrapper_str).unwrap();
assert_eq!(
wrapper,
Wrapper {
rev_padding: 10,
event_type: EventType::Create,
padding: 10
}
);
let event_type_str = r#"{"eventNum":0,"eventType":0}"#;
let event_type: EventType = serde_json::from_str(event_type_str).unwrap();
assert_eq!(event_type, EventType::Create);
let event_type_str = r#"{"eventNum":100,"eventType":1000}"#;
let event_type: Result<EventType, _> = serde_json::from_str(event_type_str);
assert!(event_type.is_err());
}
#[test]
fn symmetry() {
harness(EventType::Create);
harness(EventType::Destroy);
harness(EventType::Cleanup);
harness(EventType::Step(Stage::Main));
harness(EventType::Step(Stage::Begin));
harness(EventType::Step(Stage::End));
for i in 0..=11 {
harness(EventType::Alarm(i));
}
harness(EventType::Draw(DrawEvent::Draw(Stage::Main)));
harness(EventType::Draw(DrawEvent::Draw(Stage::Begin)));
harness(EventType::Draw(DrawEvent::Draw(Stage::End)));
harness(EventType::Draw(DrawEvent::DrawGui(Stage::Main)));
harness(EventType::Draw(DrawEvent::DrawGui(Stage::Begin)));
harness(EventType::Draw(DrawEvent::DrawGui(Stage::End)));
harness(EventType::Draw(DrawEvent::PreDraw));
harness(EventType::Draw(DrawEvent::PostDraw));
harness(EventType::Draw(DrawEvent::WindowResize));
harness(EventType::Collision);
for i in 0..MouseButtonCode::COUNT {
let mouse_button_code = FromPrimitive::from_usize(i).unwrap();
harness(EventType::Mouse(MouseEvent::Down(MouseButton {
mb_code: mouse_button_code,
local: true,
})));
harness(EventType::Mouse(MouseEvent::Pressed(MouseButton {
mb_code: mouse_button_code,
local: true,
})));
harness(EventType::Mouse(MouseEvent::Released(MouseButton {
mb_code: mouse_button_code,
local: true,
})));
let mouse_button_code = FromPrimitive::from_usize(i).unwrap();
harness(EventType::Mouse(MouseEvent::Down(MouseButton {
mb_code: mouse_button_code,
local: false,
})));
harness(EventType::Mouse(MouseEvent::Pressed(MouseButton {
mb_code: mouse_button_code,
local: false,
})));
harness(EventType::Mouse(MouseEvent::Released(MouseButton {
mb_code: mouse_button_code,
local: false,
})));
}
harness(EventType::Mouse(MouseEvent::NoInput));
harness(EventType::Mouse(MouseEvent::MouseEnter));
harness(EventType::Mouse(MouseEvent::MouseExit));
for i in 0..200 {
if let Some(vk) = num_traits::FromPrimitive::from_usize(i) {
harness(EventType::KeyDown(vk));
harness(EventType::KeyPress(vk));
harness(EventType::KeyRelease(vk));
}
}
for i in 0..Gesture::COUNT {
let gesture = FromPrimitive::from_usize(i).unwrap();
harness(EventType::Gesture(GestureEvent {
gesture,
local: true,
}));
harness(EventType::Gesture(GestureEvent {
gesture,
local: false,
}));
}
harness(EventType::Other(OtherEvent::OutsideRoom));
harness(EventType::Other(OtherEvent::IntersectBoundary));
for i in 0..=OtherEvent::OUTSIDE_VIEW_MAX {
harness(EventType::Other(OtherEvent::OutsideView(i)));
harness(EventType::Other(OtherEvent::IntersectView(i)));
}
harness(EventType::Other(OtherEvent::GameStart));
harness(EventType::Other(OtherEvent::GameEnd));
harness(EventType::Other(OtherEvent::RoomStart));
harness(EventType::Other(OtherEvent::RoomEnd));
harness(EventType::Other(OtherEvent::AnimationEnd));
harness(EventType::Other(OtherEvent::AnimationUpdate));
harness(EventType::Other(OtherEvent::AnimationEvent));
harness(EventType::Other(OtherEvent::PathEnded));
for i in 0..=OtherEvent::USER_EVENT_MAX {
harness(EventType::Other(OtherEvent::UserEvent(i)));
}
harness(EventType::Other(OtherEvent::BroadcastMessage));
harness(EventType::Async(AsyncEvent::AudioRecording));
harness(EventType::Async(AsyncEvent::AudioRecording));
harness(EventType::Async(AsyncEvent::Cloud));
harness(EventType::Async(AsyncEvent::Dialog));
harness(EventType::Async(AsyncEvent::Http));
harness(EventType::Async(AsyncEvent::InAppPurchase));
harness(EventType::Async(AsyncEvent::ImageLoaded));
harness(EventType::Async(AsyncEvent::Networking));
harness(EventType::Async(AsyncEvent::PushNotification));
harness(EventType::Async(AsyncEvent::SaveLoad));
harness(EventType::Async(AsyncEvent::Social));
harness(EventType::Async(AsyncEvent::Steam));
harness(EventType::Async(AsyncEvent::System));
fn harness(val: EventType) {
let output = EventType::try_from(EventIntermediary::from(val)).unwrap();
assert_eq!(val, output);
}
}
#[test]
fn symmetry_from_event_intermediary() {
for event_type in 0..100 {
for event_num in 0..100 {
let ei = EventIntermediary {
event_type,
event_num,
};
if let Ok(et) = EventType::try_from(ei) {
assert_eq!(
EventIntermediary::from(et),
ei,
"input: {{event_type:{}, event_num:{}}}",
event_type,
event_num
);
}
}
}
}
#[test]
fn filepath_symmetry() {
let event_names = [
"Create",
"Destroy",
"CleanUp",
"Step",
"Alarm",
"Draw",
"Collision",
"Other",
"Other",
"Keyboard",
"KeyPress",
"KeyRelease",
"Gesture",
];
for name in event_names.iter() {
for i in 0..200 {
match EventType::parse_filename(name, i) {
Ok(event) => {
let (output_fname, event_number) = event.filename();
assert_eq!(output_fname, *name);
assert_eq!(event_number, i);
}
Err(e) => {
assert!(
matches!(e, EventTypeConvertErrors::CannotFindEventNumber(_)),
"input: {}, {} || got {}",
name,
i,
e
);
}
}
}
}
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
///! Serves ClientStateUpdate listeners.
///!
use {
fidl_fuchsia_wlan_policy as fidl_policy,
futures::{channel::mpsc, prelude::*, select, stream::FuturesUnordered},
parking_lot::Mutex,
std::sync::Arc,
};
/// Convenience wrapper for cloning a `ClientStateSummary`.
struct ClientStateSummaryCloner(fidl_policy::ClientStateSummary);
impl ClientStateSummaryCloner {
fn clone(&self) -> fidl_policy::ClientStateSummary {
fidl_policy::ClientStateSummary {
state: self.0.state.clone(),
networks: self.0.networks.as_ref().map(|x| {
x.iter()
.map(|x| fidl_policy::NetworkState {
id: x.id.clone(),
state: x.state.clone(),
status: x.status.clone(),
})
.collect()
}),
}
}
}
impl From<fidl_policy::ClientStateSummary> for ClientStateSummaryCloner {
fn from(summary: fidl_policy::ClientStateSummary) -> Self {
Self(summary)
}
}
/// Messages sent by either of the two served service to interact with a shared pool of listeners.
#[derive(Debug)]
pub enum Message {
/// Sent if a new listener wants to register itself for future updates.
NewListener(fidl_policy::ClientStateUpdatesProxy),
/// Sent if an entity wants to notify all listeners about a state change.
#[allow(unused)]
NotifyListeners(fidl_policy::ClientStateSummary),
}
pub type MessageSender = mpsc::UnboundedSender<Message>;
pub type MessageStream = mpsc::UnboundedReceiver<Message>;
/// Serves and manages a list of ClientListener.
/// Use `Message` to interact with registered listeners.
pub async fn serve(mut messages: MessageStream) {
// A list of listeners which are ready to receive updates.
let acked_listeners = Arc::new(Mutex::new(vec![]));
// A list of pending listeners which have not acknowledged their last update yet.
let mut unacked_listeners = FuturesUnordered::new();
loop {
select! {
// Enqueue every listener back into the listener pool once they ack'ed the reception
// of a previously sent update.
// Listeners which already closed their channel will be dropped.
// TODO(38128): Clients must be send the latest update if they weren't updated due to
// their inactivity.
listener = unacked_listeners.select_next_some() => if let Some(listener) = listener {
acked_listeners.lock().push(listener);
},
// Message for listeners
msg = messages.select_next_some() => match msg {
// Register new listener
Message::NewListener(listener) => {
unacked_listeners.push(notify_listener(listener, current_state()));
},
// Notify all listeners
Message::NotifyListeners(update) => {
let update = ClientStateSummaryCloner(update);
let mut listeners = acked_listeners.lock();
// Listeners are dequeued and their pending acknowledgement is enqueued.
while !listeners.is_empty() {
let listener = listeners.remove(0);
unacked_listeners.push(notify_listener(listener, update.clone()));
}
},
},
}
}
}
/// Notifies a listener about the given update.
/// Returns Some(listener) if the update was successful, otherwise None.
async fn notify_listener(
listener: fidl_policy::ClientStateUpdatesProxy,
update: fidl_policy::ClientStateSummary,
) -> Option<fidl_policy::ClientStateUpdatesProxy> {
listener.on_client_state_update(update).await.ok().map(|()| listener)
}
/// Returns the current state of the active Client.
/// Right now, only a dummy state update is returned.
fn current_state() -> fidl_policy::ClientStateSummary {
// TODO(hahnr): Don't just send a dummy state update but the correct current state of the
// interface.
fidl_policy::ClientStateSummary { state: None, networks: None }
}
#[cfg(test)]
mod tests {
use {
super::*, fidl::endpoints::create_proxy, fuchsia_async as fasync, futures::task::Poll,
pin_utils::pin_mut, wlan_common::assert_variant,
};
#[test]
fn initial_update() {
let mut exec = fasync::Executor::new().expect("failed to create an executor");
let (mut update_sender, listener_updates) = mpsc::unbounded();
let serve_listeners = serve(listener_updates);
pin_mut!(serve_listeners);
assert_variant!(exec.run_until_stalled(&mut serve_listeners), Poll::Pending);
// Register listener.
let mut l1_stream = register_listener(&mut exec, &mut update_sender, &mut serve_listeners);
// Verify first listener received an update.
let summary = ack_next_status_update(&mut exec, &mut l1_stream, &mut serve_listeners);
assert_eq!(summary, fidl_policy::ClientStateSummary { state: None, networks: None });
// Verify exactly one update was sent.
assert_variant!(exec.run_until_stalled(&mut l1_stream.next()), Poll::Pending);
}
#[test]
fn multiple_listeners_broadcast() {
let mut exec = fasync::Executor::new().expect("failed to create an executor");
let (mut update_sender, listener_updates) = mpsc::unbounded();
let serve_listeners = serve(listener_updates);
pin_mut!(serve_listeners);
assert_variant!(exec.run_until_stalled(&mut serve_listeners), Poll::Pending);
// Register #1 listener & ack initial update.
let mut l1_stream = register_listener(&mut exec, &mut update_sender, &mut serve_listeners);
ack_next_status_update(&mut exec, &mut l1_stream, &mut serve_listeners);
// Register #2 listener & ack initial update.
let mut l2_stream = register_listener(&mut exec, &mut update_sender, &mut serve_listeners);
ack_next_status_update(&mut exec, &mut l2_stream, &mut serve_listeners);
// Send an update to both listeners.
let update = ClientStateSummaryCloner(fidl_policy::ClientStateSummary {
state: None,
networks: Some(vec![]),
});
broadcast_update(&mut exec, &mut update_sender, update.clone(), &mut serve_listeners);
// Verify #1 listener received the update.
let summary = ack_next_status_update(&mut exec, &mut l1_stream, &mut serve_listeners);
assert_eq!(summary, update.clone());
assert_variant!(exec.run_until_stalled(&mut l1_stream.next()), Poll::Pending);
// Verify #2 listeners received the update.
let summary = ack_next_status_update(&mut exec, &mut l2_stream, &mut serve_listeners);
assert_eq!(summary, update.clone());
assert_variant!(exec.run_until_stalled(&mut l2_stream.next()), Poll::Pending);
}
#[test]
fn multiple_listeners_unacked() {
let mut exec = fasync::Executor::new().expect("failed to create an executor");
let (mut update_sender, listener_updates) = mpsc::unbounded();
let serve_listeners = serve(listener_updates);
pin_mut!(serve_listeners);
assert_variant!(exec.run_until_stalled(&mut serve_listeners), Poll::Pending);
// Register #1 listener & ack initial update.
let mut l1_stream = register_listener(&mut exec, &mut update_sender, &mut serve_listeners);
ack_next_status_update(&mut exec, &mut l1_stream, &mut serve_listeners);
// Register #2 listener & ack initial update.
let mut l2_stream = register_listener(&mut exec, &mut update_sender, &mut serve_listeners);
ack_next_status_update(&mut exec, &mut l2_stream, &mut serve_listeners);
// Send an update to both listeners.
let update = ClientStateSummaryCloner(fidl_policy::ClientStateSummary {
state: None,
networks: Some(vec![]),
});
broadcast_update(&mut exec, &mut update_sender, update.clone(), &mut serve_listeners);
// #1 listener acknowledges update.
ack_next_status_update(&mut exec, &mut l1_stream, &mut serve_listeners);
// #2 listener does not yet acknowledge update.
let (_, l2_responder) =
try_next_status_update(&mut exec, &mut l2_stream).expect("expected status update");
// Send another update.
let update = ClientStateSummaryCloner(fidl_policy::ClientStateSummary {
state: None,
networks: None,
});
broadcast_update(&mut exec, &mut update_sender, update.clone(), &mut serve_listeners);
// #1 listener verifies and acknowledges update.
let summary = ack_next_status_update(&mut exec, &mut l1_stream, &mut serve_listeners);
assert_eq!(summary, update.clone());
// #2 listener should not have been sent an update.
assert_variant!(exec.run_until_stalled(&mut l2_stream.next()), Poll::Pending);
// #2 listener will send ack previous update.
ack_update(&mut exec, l2_responder, &mut serve_listeners);
// Send another update.
let update = ClientStateSummaryCloner(fidl_policy::ClientStateSummary {
state: None,
networks: None,
});
broadcast_update(&mut exec, &mut update_sender, update.clone(), &mut serve_listeners);
// Verify #1 & #2 listeners received the update.
ack_next_status_update(&mut exec, &mut l1_stream, &mut serve_listeners);
ack_next_status_update(&mut exec, &mut l2_stream, &mut serve_listeners);
}
/// Acknowledges a previously received update.
fn ack_update<F>(
exec: &mut fasync::Executor,
pending_ack: fidl_policy::ClientStateUpdatesOnClientStateUpdateResponder,
serve_listeners: &mut F,
) where
F: Future<Output = ()> + Unpin,
{
pending_ack.send().expect("error acking update");
assert_variant!(exec.run_until_stalled(serve_listeners), Poll::Pending);
}
/// Broadcasts an update to all registered listeners.
fn broadcast_update<F>(
exec: &mut fasync::Executor,
sender: &mut MessageSender,
update: fidl_policy::ClientStateSummary,
serve_listeners: &mut F,
) where
F: Future<Output = ()> + Unpin,
{
let clone = ClientStateSummaryCloner(update).clone();
sender.unbounded_send(Message::NotifyListeners(clone)).expect("error sending update");
assert_variant!(exec.run_until_stalled(serve_listeners), Poll::Pending);
}
/// Reads and expects a status update to be available. Once the update was read it'll also be
/// acknowledged.
fn ack_next_status_update<F>(
exec: &mut fasync::Executor,
stream: &mut fidl_policy::ClientStateUpdatesRequestStream,
serve_listeners: &mut F,
) -> fidl_policy::ClientStateSummary
where
F: Future<Output = ()> + Unpin,
{
let (summary, responder) =
try_next_status_update(exec, stream).expect("expected status update");
ack_update(exec, responder, serve_listeners);
summary
}
/// Registers a new listener.
fn register_listener<F>(
exec: &mut fasync::Executor,
sender: &mut MessageSender,
serve_listeners: &mut F,
) -> fidl_policy::ClientStateUpdatesRequestStream
where
F: Future<Output = ()> + Unpin,
{
// Register #1 listener.
let (proxy, events) = create_proxy::<fidl_policy::ClientStateUpdatesMarker>()
.expect("failed to create ClientStateUpdates proxy");
let stream = events.into_stream().expect("failed to create stream");
sender.unbounded_send(Message::NewListener(proxy)).expect("error sending update");
assert_variant!(exec.run_until_stalled(serve_listeners), Poll::Pending);
stream
}
/// Tries to read a status update. Returns None if no update was received.
fn try_next_status_update(
exec: &mut fasync::Executor,
stream: &mut fidl_policy::ClientStateUpdatesRequestStream,
) -> Option<(
fidl_policy::ClientStateSummary,
fidl_policy::ClientStateUpdatesOnClientStateUpdateResponder,
)> {
let next = exec.run_until_stalled(&mut stream.next());
if let Poll::Ready(Some(Ok(
fidl_policy::ClientStateUpdatesRequest::OnClientStateUpdate { summary, responder },
))) = next
{
Some((summary, responder))
} else {
None
}
}
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct DesktopWindowTarget(pub ::windows::core::IInspectable);
impl DesktopWindowTarget {
pub fn IsTopmost(&self) -> ::windows::core::Result<bool> {
let this = self;
unsafe {
let mut result__: bool = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<bool>(result__)
}
}
#[cfg(feature = "Foundation")]
pub fn Close(&self) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::super::super::Foundation::IClosable>(self)?;
unsafe { (::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this)).ok() }
}
pub fn Compositor(&self) -> ::windows::core::Result<super::Compositor> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::Compositor>(result__)
}
}
#[cfg(feature = "UI_Core")]
pub fn Dispatcher(&self) -> ::windows::core::Result<super::super::Core::CoreDispatcher> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::Core::CoreDispatcher>(result__)
}
}
pub fn Properties(&self) -> ::windows::core::Result<super::CompositionPropertySet> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::CompositionPropertySet>(result__)
}
}
pub fn StartAnimation<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>, Param1: ::windows::core::IntoParam<'a, super::CompositionAnimation>>(&self, propertyname: Param0, animation: Param1) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject>(self)?;
unsafe { (::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), propertyname.into_param().abi(), animation.into_param().abi()).ok() }
}
pub fn StopAnimation<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, propertyname: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject>(self)?;
unsafe { (::windows::core::Interface::vtable(this).10)(::core::mem::transmute_copy(this), propertyname.into_param().abi()).ok() }
}
pub fn Root(&self) -> ::windows::core::Result<super::Visual> {
let this = &::windows::core::Interface::cast::<super::ICompositionTarget>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::Visual>(result__)
}
}
pub fn SetRoot<'a, Param0: ::windows::core::IntoParam<'a, super::Visual>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::ICompositionTarget>(self)?;
unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
pub fn Comment(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject2>(self)?;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn SetComment<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject2>(self)?;
unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
pub fn ImplicitAnimations(&self) -> ::windows::core::Result<super::ImplicitAnimationCollection> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject2>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::ImplicitAnimationCollection>(result__)
}
}
pub fn SetImplicitAnimations<'a, Param0: ::windows::core::IntoParam<'a, super::ImplicitAnimationCollection>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject2>(self)?;
unsafe { (::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
pub fn StartAnimationGroup<'a, Param0: ::windows::core::IntoParam<'a, super::ICompositionAnimationBase>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject2>(self)?;
unsafe { (::windows::core::Interface::vtable(this).10)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
pub fn StopAnimationGroup<'a, Param0: ::windows::core::IntoParam<'a, super::ICompositionAnimationBase>>(&self, value: Param0) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject2>(self)?;
unsafe { (::windows::core::Interface::vtable(this).11)(::core::mem::transmute_copy(this), value.into_param().abi()).ok() }
}
#[cfg(feature = "System")]
pub fn DispatcherQueue(&self) -> ::windows::core::Result<super::super::super::System::DispatcherQueue> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject3>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::super::System::DispatcherQueue>(result__)
}
}
pub fn TryGetAnimationController<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, propertyname: Param0) -> ::windows::core::Result<super::AnimationController> {
let this = &::windows::core::Interface::cast::<super::ICompositionObject4>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), propertyname.into_param().abi(), &mut result__).from_abi::<super::AnimationController>(result__)
}
}
pub fn PopulatePropertyInfo<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>, Param1: ::windows::core::IntoParam<'a, super::AnimationPropertyInfo>>(&self, propertyname: Param0, propertyinfo: Param1) -> ::windows::core::Result<()> {
let this = &::windows::core::Interface::cast::<super::IAnimationObject>(self)?;
unsafe { (::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), propertyname.into_param().abi(), propertyinfo.into_param().abi()).ok() }
}
}
unsafe impl ::windows::core::RuntimeType for DesktopWindowTarget {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.UI.Composition.Desktop.DesktopWindowTarget;{6329d6ca-3366-490e-9db3-25312929ac51})");
}
unsafe impl ::windows::core::Interface for DesktopWindowTarget {
type Vtable = IDesktopWindowTarget_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x6329d6ca_3366_490e_9db3_25312929ac51);
}
impl ::windows::core::RuntimeName for DesktopWindowTarget {
const NAME: &'static str = "Windows.UI.Composition.Desktop.DesktopWindowTarget";
}
impl ::core::convert::From<DesktopWindowTarget> for ::windows::core::IUnknown {
fn from(value: DesktopWindowTarget) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&DesktopWindowTarget> for ::windows::core::IUnknown {
fn from(value: &DesktopWindowTarget) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<DesktopWindowTarget> for ::windows::core::IInspectable {
fn from(value: DesktopWindowTarget) -> Self {
value.0
}
}
impl ::core::convert::From<&DesktopWindowTarget> for ::windows::core::IInspectable {
fn from(value: &DesktopWindowTarget) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[cfg(feature = "Foundation")]
impl ::core::convert::TryFrom<DesktopWindowTarget> for super::super::super::Foundation::IClosable {
type Error = ::windows::core::Error;
fn try_from(value: DesktopWindowTarget) -> ::windows::core::Result<Self> {
::core::convert::TryFrom::try_from(&value)
}
}
#[cfg(feature = "Foundation")]
impl ::core::convert::TryFrom<&DesktopWindowTarget> for super::super::super::Foundation::IClosable {
type Error = ::windows::core::Error;
fn try_from(value: &DesktopWindowTarget) -> ::windows::core::Result<Self> {
::windows::core::Interface::cast(value)
}
}
#[cfg(feature = "Foundation")]
impl<'a> ::windows::core::IntoParam<'a, super::super::super::Foundation::IClosable> for DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::super::super::Foundation::IClosable> {
::windows::core::IntoParam::into_param(&self)
}
}
#[cfg(feature = "Foundation")]
impl<'a> ::windows::core::IntoParam<'a, super::super::super::Foundation::IClosable> for &DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::super::super::Foundation::IClosable> {
::core::convert::TryInto::<super::super::super::Foundation::IClosable>::try_into(self).map(::windows::core::Param::Owned).unwrap_or(::windows::core::Param::None)
}
}
impl ::core::convert::TryFrom<DesktopWindowTarget> for super::IAnimationObject {
type Error = ::windows::core::Error;
fn try_from(value: DesktopWindowTarget) -> ::windows::core::Result<Self> {
::core::convert::TryFrom::try_from(&value)
}
}
impl ::core::convert::TryFrom<&DesktopWindowTarget> for super::IAnimationObject {
type Error = ::windows::core::Error;
fn try_from(value: &DesktopWindowTarget) -> ::windows::core::Result<Self> {
::windows::core::Interface::cast(value)
}
}
impl<'a> ::windows::core::IntoParam<'a, super::IAnimationObject> for DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::IAnimationObject> {
::windows::core::IntoParam::into_param(&self)
}
}
impl<'a> ::windows::core::IntoParam<'a, super::IAnimationObject> for &DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::IAnimationObject> {
::core::convert::TryInto::<super::IAnimationObject>::try_into(self).map(::windows::core::Param::Owned).unwrap_or(::windows::core::Param::None)
}
}
impl ::core::convert::From<DesktopWindowTarget> for super::CompositionTarget {
fn from(value: DesktopWindowTarget) -> Self {
::core::convert::Into::<super::CompositionTarget>::into(&value)
}
}
impl ::core::convert::From<&DesktopWindowTarget> for super::CompositionTarget {
fn from(value: &DesktopWindowTarget) -> Self {
::windows::core::Interface::cast(value).unwrap()
}
}
impl<'a> ::windows::core::IntoParam<'a, super::CompositionTarget> for DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::CompositionTarget> {
::windows::core::Param::Owned(::core::convert::Into::<super::CompositionTarget>::into(self))
}
}
impl<'a> ::windows::core::IntoParam<'a, super::CompositionTarget> for &DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::CompositionTarget> {
::windows::core::Param::Owned(::core::convert::Into::<super::CompositionTarget>::into(::core::clone::Clone::clone(self)))
}
}
impl ::core::convert::From<DesktopWindowTarget> for super::CompositionObject {
fn from(value: DesktopWindowTarget) -> Self {
::core::convert::Into::<super::CompositionObject>::into(&value)
}
}
impl ::core::convert::From<&DesktopWindowTarget> for super::CompositionObject {
fn from(value: &DesktopWindowTarget) -> Self {
::windows::core::Interface::cast(value).unwrap()
}
}
impl<'a> ::windows::core::IntoParam<'a, super::CompositionObject> for DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::CompositionObject> {
::windows::core::Param::Owned(::core::convert::Into::<super::CompositionObject>::into(self))
}
}
impl<'a> ::windows::core::IntoParam<'a, super::CompositionObject> for &DesktopWindowTarget {
fn into_param(self) -> ::windows::core::Param<'a, super::CompositionObject> {
::windows::core::Param::Owned(::core::convert::Into::<super::CompositionObject>::into(::core::clone::Clone::clone(self)))
}
}
unsafe impl ::core::marker::Send for DesktopWindowTarget {}
unsafe impl ::core::marker::Sync for DesktopWindowTarget {}
#[repr(transparent)]
#[doc(hidden)]
pub struct IDesktopWindowTarget(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IDesktopWindowTarget {
type Vtable = IDesktopWindowTarget_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x6329d6ca_3366_490e_9db3_25312929ac51);
}
#[repr(C)]
#[doc(hidden)]
pub struct IDesktopWindowTarget_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut bool) -> ::windows::core::HRESULT,
);
|
//! Display vector graphics in your application.
use crate::{layout, Element, Hasher, Layout, Length, Point, Size, Widget};
use std::{
hash::Hash,
path::{Path, PathBuf},
};
/// A vector graphics image.
///
/// An [`Svg`] image resizes smoothly without losing any quality.
///
/// [`Svg`] images can have a considerable rendering cost when resized,
/// specially when they are complex.
///
/// [`Svg`]: struct.Svg.html
#[derive(Debug, Clone)]
pub struct Svg {
handle: Handle,
width: Length,
height: Length,
}
impl Svg {
/// Creates a new [`Svg`] from the given [`Handle`].
///
/// [`Svg`]: struct.Svg.html
/// [`Handle`]: struct.Handle.html
pub fn new(handle: impl Into<Handle>) -> Self {
Svg {
handle: handle.into(),
width: Length::Fill,
height: Length::Fill,
}
}
/// Sets the width of the [`Svg`].
///
/// [`Svg`]: struct.Svg.html
pub fn width(mut self, width: Length) -> Self {
self.width = width;
self
}
/// Sets the height of the [`Svg`].
///
/// [`Svg`]: struct.Svg.html
pub fn height(mut self, height: Length) -> Self {
self.height = height;
self
}
}
impl<Message, Renderer> Widget<Message, Renderer> for Svg
where
Renderer: self::Renderer,
{
fn width(&self) -> Length {
self.width
}
fn height(&self) -> Length {
self.height
}
fn layout(
&self,
renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
let (width, height) = renderer.dimensions(&self.handle);
let aspect_ratio = width as f32 / height as f32;
let mut size = limits
.width(self.width)
.height(self.height)
.resolve(Size::new(width as f32, height as f32));
let viewport_aspect_ratio = size.width / size.height;
if viewport_aspect_ratio > aspect_ratio {
size.width = width as f32 * size.height / height as f32;
} else {
size.height = height as f32 * size.width / width as f32;
}
layout::Node::new(size)
}
fn draw(
&self,
renderer: &mut Renderer,
layout: Layout<'_>,
_cursor_position: Point,
) -> Renderer::Output {
renderer.draw(self.handle.clone(), layout)
}
fn hash_layout(&self, state: &mut Hasher) {
self.width.hash(state);
self.height.hash(state);
}
}
/// An [`Svg`] handle.
///
/// [`Svg`]: struct.Svg.html
#[derive(Debug, Clone)]
pub struct Handle {
id: u64,
path: PathBuf,
}
impl Handle {
/// Creates an SVG [`Handle`] pointing to the vector image of the given
/// path.
///
/// [`Handle`]: struct.Handle.html
pub fn from_path<T: Into<PathBuf>>(path: T) -> Handle {
use std::hash::Hasher as _;
let path = path.into();
let mut hasher = Hasher::default();
path.hash(&mut hasher);
Handle {
id: hasher.finish(),
path,
}
}
/// Returns the unique identifier of the [`Handle`].
///
/// [`Handle`]: struct.Handle.html
pub fn id(&self) -> u64 {
self.id
}
/// Returns a reference to the path of the [`Handle`].
///
/// [`Handle`]: enum.Handle.html
pub fn path(&self) -> &Path {
&self.path
}
}
impl From<String> for Handle {
fn from(path: String) -> Handle {
Handle::from_path(path)
}
}
impl From<&str> for Handle {
fn from(path: &str) -> Handle {
Handle::from_path(path)
}
}
/// The renderer of an [`Svg`].
///
/// Your [renderer] will need to implement this trait before being able to use
/// an [`Svg`] in your user interface.
///
/// [`Svg`]: struct.Svg.html
/// [renderer]: ../../renderer/index.html
pub trait Renderer: crate::Renderer {
/// Returns the default dimensions of an [`Svg`] located on the given path.
///
/// [`Svg`]: struct.Svg.html
fn dimensions(&self, handle: &Handle) -> (u32, u32);
/// Draws an [`Svg`].
///
/// [`Svg`]: struct.Svg.html
fn draw(&mut self, handle: Handle, layout: Layout<'_>) -> Self::Output;
}
impl<'a, Message, Renderer> From<Svg> for Element<'a, Message, Renderer>
where
Renderer: self::Renderer,
{
fn from(icon: Svg) -> Element<'a, Message, Renderer> {
Element::new(icon)
}
}
|
use data_types::{NamespaceId, PartitionKey, SequenceNumber, TableId};
use generated_types::influxdata::iox::wal::v1::sequenced_wal_op::Op;
use metric::U64Counter;
use mutable_batch_pb::decode::decode_database_batch;
use observability_deps::tracing::*;
use std::time::Instant;
use thiserror::Error;
use wal::{SequencedWalOp, Wal};
use crate::{
dml_payload::write::{PartitionedData, TableData, WriteOperation},
dml_payload::IngestOp,
dml_sink::{DmlError, DmlSink},
partition_iter::PartitionIter,
persist::{drain_buffer::persist_partitions, queue::PersistQueue},
};
/// Errors returned when replaying the write-ahead log.
#[derive(Debug, Error)]
pub enum WalReplayError {
/// An error initialising a segment file reader.
#[error("failed to open wal segment for replay: {0}")]
OpenSegment(wal::Error),
/// An error when attempting to read an entry from the WAL.
#[error("failed to read wal entry: {0}")]
ReadEntry(wal::Error),
/// An error converting the WAL entry into a [`IngestOp`].
#[error("failed converting wal entry to ingest operation: {0}")]
MapToDml(#[from] mutable_batch_pb::decode::Error),
/// A failure to apply a [`IngestOp`] from the WAL to the in-memory
/// [`BufferTree`].
///
/// [`BufferTree`]: crate::buffer_tree::BufferTree
#[error("failed to apply op: {0}")]
Apply(#[from] DmlError),
}
// TODO: tolerate WAL replay errors
//
// https://github.com/influxdata/influxdb_iox/issues/6283
/// Replay all the entries in `wal` to `sink`, returning the maximum observed
/// [`SequenceNumber`].
pub async fn replay<T, P>(
wal: &Wal,
sink: &T,
persist: P,
metrics: &metric::Registry,
) -> Result<Option<SequenceNumber>, WalReplayError>
where
T: DmlSink + PartitionIter,
P: PersistQueue + Clone,
{
// Read the set of files to replay.
//
// The WAL yields files ordered from oldest to newest, ensuring the ordering
// of this replay is correct.
let files = wal.closed_segments();
if files.is_empty() {
info!("no wal replay files found");
return Ok(None);
}
// Initialise metrics to track the progress of the WAL replay.
//
// The file count tracks the number of WAL files that have started
// replaying, as opposed to finished replaying - this gives us the ability
// to monitor WAL replays that hang or otherwise go wrong.
let file_count_metric = metrics
.register_metric::<U64Counter>(
"ingester_wal_replay_files_started",
"Number of WAL files that have started to be replayed",
)
.recorder(&[]);
let op_count_metric = metrics.register_metric::<U64Counter>(
"ingester_wal_replay_ops",
"Number of operations replayed from the WAL",
);
let ok_op_count_metric = op_count_metric.recorder(&[("outcome", "success")]);
let empty_op_count_metric = op_count_metric.recorder(&[("outcome", "skipped_empty")]);
let n_files = files.len();
info!(n_files, "found wal files for replay");
// Replay each file, keeping track of the last observed sequence number.
//
// Applying writes to the buffer can only happen monotonically and this is
// enforced within the buffer.
let mut max_sequence = None;
for (index, file) in files.into_iter().enumerate() {
// Map 0-based iter index to 1 based file count
let file_number = index + 1;
file_count_metric.inc(1);
// Read the segment
let reader = wal
.reader_for_segment(file.id())
.map_err(WalReplayError::OpenSegment)?;
// Emit a log entry so progress can be tracked (and a problematic file
// be identified should an explosion happen during replay).
info!(
file_number,
n_files,
file_id = %file.id(),
size = file.size(),
"replaying wal file"
);
// Replay this segment file
match replay_file(reader, sink, &ok_op_count_metric, &empty_op_count_metric).await? {
v @ Some(_) => max_sequence = max_sequence.max(v),
None => {
// This file was empty and should be deleted.
warn!(
file_number,
n_files,
file_id = %file.id(),
size = file.size(),
"dropping empty wal segment",
);
// TODO(test): empty WAL replay
// A failure to delete an empty file should not prevent WAL
// replay from continuing.
if let Err(error) = wal.delete(file.id()).await {
error!(
file_number,
n_files,
file_id = %file.id(),
size = file.size(),
%error,
"error dropping empty wal segment",
);
}
continue;
}
};
info!(
file_number,
n_files,
file_id = %file.id(),
size = file.size(),
"persisting wal segment data"
);
// Persist all the data that was replayed from the WAL segment.
persist_partitions(sink.partition_iter(), &persist).await;
// Drop the newly persisted data - it should not be replayed.
wal.delete(file.id())
.await
.expect("failed to drop wal segment");
info!(
file_number,
n_files,
file_id = %file.id(),
size = file.size(),
"dropped persisted wal segment"
);
}
info!(
max_sequence_number = ?max_sequence,
"wal replay complete"
);
Ok(max_sequence)
}
/// Replay the entries in `file`, applying them to `buffer`. Returns the highest
/// sequence number observed in the file, or [`None`] if the file was empty.
async fn replay_file<T>(
file: wal::ClosedSegmentFileReader,
sink: &T,
ok_op_count_metric: &U64Counter,
empty_op_count_metric: &U64Counter,
) -> Result<Option<SequenceNumber>, WalReplayError>
where
T: DmlSink,
{
let mut max_sequence = None;
let start = Instant::now();
for batch in file {
let ops = batch.map_err(WalReplayError::ReadEntry)?;
for op in ops {
let SequencedWalOp {
table_write_sequence_numbers,
op,
} = op;
let op = match op {
Op::Write(w) => w,
Op::Delete(_) => unreachable!(),
Op::Persist(_) => unreachable!(),
};
let mut op_min_sequence_number = None;
let mut op_max_sequence_number = None;
// Reconstruct the ingest operation
let batches = decode_database_batch(&op)?;
let namespace_id = NamespaceId::new(op.database_id);
let partition_key = PartitionKey::from(op.partition_key);
if batches.is_empty() {
warn!(%namespace_id, "encountered wal op containing no table data, skipping replay");
empty_op_count_metric.inc(1);
continue;
}
let op = WriteOperation::new(
namespace_id,
batches
.into_iter()
.map(|(k, v)| {
let table_id = TableId::new(k);
let sequence_number = SequenceNumber::new(
*table_write_sequence_numbers
.get(&table_id)
.expect("attempt to apply unsequenced wal op"),
);
max_sequence = max_sequence.max(Some(sequence_number));
op_min_sequence_number = op_min_sequence_number.min(Some(sequence_number));
op_max_sequence_number = op_min_sequence_number.max(Some(sequence_number));
(
table_id,
TableData::new(table_id, PartitionedData::new(sequence_number, v)),
)
})
.collect(),
partition_key,
// TODO: A tracing context should be added for WAL replay.
None,
);
debug!(
?op,
op_min_sequence_number = op_min_sequence_number
.expect("attempt to apply unsequenced wal op")
.get(),
op_max_sequence_number = op_max_sequence_number
.expect("attempt to apply unsequenced wal op")
.get(),
"apply wal op"
);
// Apply the operation to the provided DML sink
sink.apply(IngestOp::Write(op))
.await
.map_err(Into::<DmlError>::into)?;
ok_op_count_metric.inc(1);
}
}
// This file is complete, return the last observed sequence
// number.
debug!("wal file replayed in {:?}", start.elapsed());
Ok(max_sequence)
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use assert_matches::assert_matches;
use async_trait::async_trait;
use metric::{Attributes, Metric};
use parking_lot::Mutex;
use wal::Wal;
use crate::{
buffer_tree::partition::PartitionData,
dml_payload::IngestOp,
dml_sink::mock_sink::MockDmlSink,
persist::queue::mock::MockPersistQueue,
test_util::{
assert_write_ops_eq, make_multi_table_write_op, make_write_op, PartitionDataBuilder,
ARBITRARY_NAMESPACE_ID, ARBITRARY_PARTITION_KEY, ARBITRARY_TABLE_ID,
ARBITRARY_TABLE_NAME, ARBITRARY_TRANSITION_PARTITION_ID,
},
wal::wal_sink::{mock::MockUnbufferedWriteNotifier, WalSink},
};
use super::*;
#[derive(Debug)]
struct MockIter {
sink: MockDmlSink,
partitions: Vec<Arc<Mutex<PartitionData>>>,
}
impl PartitionIter for MockIter {
fn partition_iter(&self) -> Box<dyn Iterator<Item = Arc<Mutex<PartitionData>>> + Send> {
Box::new(self.partitions.clone().into_iter())
}
}
#[async_trait]
impl DmlSink for MockIter {
type Error = <MockDmlSink as DmlSink>::Error;
async fn apply(&self, op: IngestOp) -> Result<(), Self::Error> {
self.sink.apply(op).await
}
}
const ALTERNATIVE_TABLE_NAME: &str = "arán";
#[tokio::test]
async fn test_replay() {
let dir = tempfile::tempdir().unwrap();
// Generate the test ops that will be appended and read back
let op1 = make_write_op(
&ARBITRARY_PARTITION_KEY,
ARBITRARY_NAMESPACE_ID,
&ARBITRARY_TABLE_NAME,
ARBITRARY_TABLE_ID,
24,
&format!(
r#"{},region=Madrid temp=35 4242424242"#,
&*ARBITRARY_TABLE_NAME
),
None,
);
let op2 = make_write_op(
&ARBITRARY_PARTITION_KEY,
ARBITRARY_NAMESPACE_ID,
&ARBITRARY_TABLE_NAME,
ARBITRARY_TABLE_ID,
25,
&format!(
r#"{},region=Asturias temp=25 4242424242"#,
&*ARBITRARY_TABLE_NAME
),
None,
);
// Add a write hitting multiple tables for good measure
let op3 = make_multi_table_write_op(
&ARBITRARY_PARTITION_KEY,
ARBITRARY_NAMESPACE_ID,
[
(
ARBITRARY_TABLE_NAME.to_string().as_str(),
ARBITRARY_TABLE_ID,
SequenceNumber::new(42),
),
(
ALTERNATIVE_TABLE_NAME,
TableId::new(ARBITRARY_TABLE_ID.get() + 1),
SequenceNumber::new(43),
),
]
.into_iter(),
// Overwrite op2
&format!(
r#"{},region=Asturias temp=15 4242424242
{},region=Mayo temp=12 4242424242"#,
&*ARBITRARY_TABLE_NAME, ALTERNATIVE_TABLE_NAME,
),
);
// Emulate a mid-write crash by inserting an op with no data
let empty_op = WriteOperation::new_empty_invalid(
ARBITRARY_NAMESPACE_ID,
ARBITRARY_PARTITION_KEY.clone(),
);
// The write portion of this test.
//
// Write two ops, rotate the file, and write a third op.
{
let inner = Arc::new(MockDmlSink::default().with_apply_return(vec![
Ok(()),
Ok(()),
Ok(()),
Ok(()),
]));
let wal = Wal::new(dir.path())
.await
.expect("failed to initialise WAL");
let notifier_handle = Arc::new(MockUnbufferedWriteNotifier::default());
let wal_sink = WalSink::new(
Arc::clone(&inner),
Arc::clone(&wal),
Arc::clone(¬ifier_handle),
);
// Apply the first op through the decorator
wal_sink
.apply(IngestOp::Write(op1.clone()))
.await
.expect("wal should not error");
// And the second op
wal_sink
.apply(IngestOp::Write(op2.clone()))
.await
.expect("wal should not error");
// Rotate the log file
wal.rotate().expect("failed to rotate WAL file");
// Write the third op
wal_sink
.apply(IngestOp::Write(op3.clone()))
.await
.expect("wal should not error");
// Write the empty op
wal_sink
.apply(IngestOp::Write(empty_op))
.await
.expect("wal should not error");
// Assert the mock inner sink saw the calls
assert_eq!(inner.get_calls().len(), 4);
}
// Reinitialise the WAL
let wal = Wal::new(dir.path())
.await
.expect("failed to initialise WAL");
assert_eq!(wal.closed_segments().len(), 2);
// Initialise the mock persist system
let persist = Arc::new(MockPersistQueue::default());
// Replay the results into a mock to capture the DmlWrites and returns
// some dummy partitions when iterated over.
let mock_sink = MockDmlSink::default().with_apply_return(vec![Ok(()), Ok(()), Ok(())]);
let mut partition = PartitionDataBuilder::new().build();
// Put at least one write into the buffer so it is a candidate for persistence
partition
.buffer_write(
op1.tables()
.next()
.unwrap()
.1
.partitioned_data()
.data()
.clone(),
SequenceNumber::new(1),
)
.unwrap();
let mock_iter = MockIter {
sink: mock_sink,
partitions: vec![Arc::new(Mutex::new(partition))],
};
let metrics = metric::Registry::default();
let max_sequence_number = replay(&wal, &mock_iter, Arc::clone(&persist), &metrics)
.await
.expect("failed to replay WAL");
assert_eq!(max_sequence_number, Some(SequenceNumber::new(43)));
// Assert the ops were pushed into the DmlSink exactly as generated,
// barring the empty op which is skipped
let ops = mock_iter.sink.get_calls();
assert_matches!(
&*ops,
&[
IngestOp::Write(ref w1),
IngestOp::Write(ref w2),
IngestOp::Write(ref w3),
] => {
assert_write_ops_eq(w1.clone(), op1);
assert_write_ops_eq(w2.clone(), op2);
assert_write_ops_eq(w3.clone(), op3);
}
);
// Ensure all partitions were persisted
let calls = persist.calls();
assert_matches!(&*calls, [p] => {
assert_eq!(p.lock().partition_id(), &*ARBITRARY_TRANSITION_PARTITION_ID);
});
// Ensure there were no partition persist panics.
Arc::try_unwrap(persist)
.expect("should be no more refs")
.join()
.await;
// Ensure the replayed segments were dropped
let wal = Wal::new(dir.path())
.await
.expect("failed to initialise WAL");
assert_eq!(wal.closed_segments().len(), 1);
// Validate the expected metric values were populated.
let files = metrics
.get_instrument::<Metric<U64Counter>>("ingester_wal_replay_files_started")
.expect("file counter not found")
.get_observer(&Attributes::from([]))
.expect("attributes not found")
.fetch();
assert_eq!(files, 2);
let ops = metrics
.get_instrument::<Metric<U64Counter>>("ingester_wal_replay_ops")
.expect("file counter not found")
.get_observer(&Attributes::from(&[("outcome", "success")]))
.expect("attributes not found")
.fetch();
assert_eq!(ops, 3);
let ops = metrics
.get_instrument::<Metric<U64Counter>>("ingester_wal_replay_ops")
.expect("file counter not found")
.get_observer(&Attributes::from(&[("outcome", "skipped_empty")]))
.expect("attributes not found")
.fetch();
assert_eq!(ops, 1);
}
}
|
fn main() {
let mut sum = 0;
let n = 1000;
let mut i = 1;
while i < n {
if i % 3 == 0 || i%5 == 0 {
sum += i;
}
i+=1;
}
println!("Sum = {}", sum);
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use clap::Parser;
// Add options when run sqllogictest, such as specific dir or file
#[derive(Parser, Debug, Clone)]
pub struct SqlLogicTestArgs {
// Set specific dir to run
#[arg(
short = 'd',
long = "run_dir",
help = "Run sqllogictests in specific directory, the arg is optional"
)]
pub dir: Option<String>,
// Set specific test file to run
#[arg(
short = 'f',
long = "run_file",
help = "Run sqllogictests in specific test file, the arg is optional"
)]
pub file: Option<String>,
// Set specific dir to skip
#[arg(
short = 's',
long = "skip_dir",
help = "Skip sqllogictests in specific directory, the arg is optional"
)]
pub skipped_dir: Option<String>,
// Set handler to run tests
#[arg(
short = 'l',
long = "handlers",
use_value_delimiter = true,
value_delimiter = ',',
help = "Choose handlers to run tests, support mysql, http, clickhouse handler, the arg is optional. If use multiple handlers, please use \',\' to split them"
)]
pub handlers: Option<Vec<String>>,
// Choose suits to run
#[arg(
short = 'u',
long = "suites",
help = "The tests to be run will come from under suits",
default_value = "tests/sqllogictests/suites"
)]
pub suites: String,
// If enable complete mode
#[arg(
short = 'c',
long = "complete",
default_missing_value = "true",
help = "The arg is used to enable auto complete mode"
)]
pub complete: bool,
// If close fast fail.
#[arg(
long = "no-fail-fast",
default_missing_value = "true",
help = "The arg is used to cancel fast fail"
)]
pub no_fail_fast: bool,
#[arg(
short = 'p',
long = "parallel",
default_value_t = 1,
help = "The arg is used to set parallel number"
)]
pub parallel: usize,
#[arg(
long = "enable_sandbox",
default_missing_value = "true",
help = "The arg is used to enable sandbox_tenant"
)]
pub enable_sandbox: bool,
#[arg(
long = "debug",
default_missing_value = "true",
help = "The arg is used to enable debug mode which would print some debug messages"
)]
pub debug: bool,
#[arg(
long = "tpch",
default_missing_value = "true",
help = "The arg is used to enable tpch benchmark"
)]
pub tpch: bool,
}
|
pub use VkCompareOp::*;
#[repr(u32)]
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub enum VkCompareOp {
VK_COMPARE_OP_NEVER = 0,
VK_COMPARE_OP_LESS = 1,
VK_COMPARE_OP_EQUAL = 2,
VK_COMPARE_OP_LESS_OR_EQUAL = 3,
VK_COMPARE_OP_GREATER = 4,
VK_COMPARE_OP_NOT_EQUAL = 5,
VK_COMPARE_OP_GREATER_OR_EQUAL = 6,
VK_COMPARE_OP_ALWAYS = 7,
}
|
use glyph_bbox::dataset as GlyphDataSet;
use htmlescape as escape;
use liquid;
use rust_embed::RustEmbed;
use simple_icons;
#[derive(RustEmbed)]
#[folder = "assets/badges"]
struct Asset;
#[derive(Debug, Deserialize)]
pub struct SvgBadgeInput {
pub title: String,
pub text: Option<String>,
pub title_colour: Option<String>,
pub text_colour: Option<String>,
pub title_bg_colour: Option<String>,
pub text_bg_colour: Option<String>,
pub font_face: Option<GlyphDataSet::FontFace>,
pub font_size: Option<GlyphDataSet::FontSize>,
pub padding_horizontal: Option<f64>,
pub padding_vertical: Option<f64>,
pub icon: Option<String>,
pub icon_colour: Option<String>,
pub icon_scale: Option<String>,
}
impl SvgBadgeInput {
pub fn validate_n_populate(&mut self, factory: &Factory) -> Result<(), String> {
self.validate_font(factory)
.and(self.validate_colours())
.and(self.validate_padding())
.and(self.validate_icon())
.and(self.sanitize_input())
}
pub fn sanitize_input(&mut self) -> Result<(), String> {
self.title = escape::encode_minimal(&self.title);
if self.text.is_some() {
self.text = Some(escape::encode_minimal(&self.text.clone().unwrap()));
}
Ok(())
}
pub fn validate_icon(&mut self) -> Result<(), String> {
if self.icon.is_none() {
return Ok(());
}
if self.icon_scale.is_none() {
self.icon_scale = Option::from(String::from("0.9"))
}
match simple_icons::get(&self.icon.clone().unwrap()) {
Some(_v) => Ok(()),
None => Err(String::from("invalid icon")),
}
}
pub fn validate_padding(&mut self) -> Result<(), String> {
if self.padding_horizontal.is_none() {
self.padding_horizontal =
Option::from(self.font_size.as_ref().unwrap().parse::<f64>().unwrap() / 2.0)
}
if self.padding_vertical.is_none() {
self.padding_vertical =
Option::from(self.font_size.as_ref().unwrap().parse::<f64>().unwrap() / 8.0)
}
Ok(())
}
pub fn validate_font(&mut self, f: &Factory) -> Result<(), String> {
if self.font_face.is_none() {
self.font_face = Option::from(f.default_font_face());
}
if self.font_size.is_none() {
self.font_size = Option::from(f.default_font_size());
}
match f.supports_font(
self.font_face.clone().unwrap(),
self.font_size.clone().unwrap(),
) {
true => Ok(()),
false => Err("unsupported font".into()),
}
}
pub fn validate_colours(&mut self) -> Result<(), String> {
if self.title_colour.is_none() {
self.title_colour = Option::from(String::from("#fff"));
}
if self.title_bg_colour.is_none() {
self.title_bg_colour = Option::from(String::from("#000"));
}
if self.text_colour.is_none() {
self.text_colour = Option::from(String::from("#000"));
}
if self.text_bg_colour.is_none() {
self.text_bg_colour = Option::from(String::from("#fff"));
}
if self.icon_colour.is_none() {
self.icon_colour = self.title_colour.clone();
}
for s in vec![
self.title_colour.clone(),
self.title_bg_colour.clone(),
self.text_colour.clone(),
self.text_bg_colour.clone(),
self.icon_colour.clone(),
] {
let colour = s.clone().unwrap();
if !colour.starts_with("#") {
return Err(String::from(format!("invalid colour: {}", colour)));
}
}
Ok(())
}
}
#[derive(Clone)]
pub struct FactoryOptions {
pub render_dataset: GlyphDataSet::DataSet,
pub host: String,
}
#[derive(Clone)]
pub struct Factory {
opts: FactoryOptions,
svg_template: String,
}
impl Factory {
pub fn new(opts: FactoryOptions) -> Factory {
info!("building factory");
let svg_template: String =
std::str::from_utf8(Asset::get("template.svg").unwrap().as_ref())
.unwrap()
.into();
liquid::ParserBuilder::with_stdlib()
.build()
.unwrap()
.parse(&svg_template.clone())
.unwrap();
Factory { opts, svg_template }
}
pub fn render_endpoint(&self) -> String {
format!("{}/v1/badge.svg", self.opts.host)
}
pub fn font_faces(&self) -> GlyphDataSet::FontFaces {
self.opts.render_dataset.config.font.faces.clone()
}
pub fn font_sizes(&self) -> GlyphDataSet::FontSizes {
self.opts.render_dataset.config.font.sizes.clone()
}
pub fn default_font_face(&self) -> GlyphDataSet::FontFace {
self.opts.render_dataset.config.font.faces[0].clone()
}
pub fn default_font_size(&self) -> GlyphDataSet::FontSize {
self.opts.render_dataset.config.font.sizes[0].clone()
}
pub fn supports_font(
&self,
face: GlyphDataSet::FontFace,
size: GlyphDataSet::FontSize,
) -> bool {
self.opts.render_dataset.config.font.faces.contains(&face)
&& self.opts.render_dataset.config.font.sizes.contains(&size)
}
pub fn template(&self) -> liquid::Template {
liquid::ParserBuilder::with_stdlib()
.build()
.unwrap()
.parse(&self.svg_template)
.unwrap()
}
pub fn render_error_badge(&self, s: String) -> String {
self.render_svg(SvgBadgeInput {
title: "error".to_string(),
text: Option::from(s),
title_colour: None,
text_colour: None,
title_bg_colour: Option::from(String::from("#c0392b")),
text_bg_colour: None,
font_face: None,
font_size: None,
padding_horizontal: None,
padding_vertical: None,
icon: None,
icon_colour: None,
icon_scale: None,
})
.unwrap()
}
pub fn render_svg(&self, mut input: SvgBadgeInput) -> Result<String, String> {
let r = input.validate_n_populate(self);
if r.is_err() {
return Err(r.err().unwrap());
}
let title_bbox = self.opts.render_dataset.bounding_box(
&input.title,
GlyphDataSet::BoundingBoxRenderOptions {
face: input.font_face.clone().unwrap(),
size: input.font_size.clone().unwrap(),
},
);
let text_bbox = if input.text.is_some() {
self.opts.render_dataset.bounding_box(
&input.text.clone().unwrap(),
GlyphDataSet::BoundingBoxRenderOptions {
face: input.font_face.clone().unwrap(),
size: input.font_size.clone().unwrap(),
},
)
} else {
None
};
if title_bbox.is_none() {
return Err("failed to render badge".into());
}
let output = match (input.icon.is_some(), text_bbox.is_some()) {
(true, true) => self.template().render(&liquid::object!({
"title": input.title,
"title_width": title_bbox.clone().unwrap()[0],
"title_height": title_bbox.unwrap()[1],
"text": input.text,
"text_width": text_bbox.clone().unwrap()[0],
"text_height": text_bbox.unwrap()[1],
"font_face": input.font_face,
"font_size": input.font_size,
"title_colour": input.title_colour,
"title_bg_colour": input.title_bg_colour,
"text_colour": input.text_colour,
"text_bg_colour": input.text_bg_colour,
"padding_horizontal": input.padding_horizontal,
"padding_vertical": input.padding_vertical,
"icon": true,
"icon_title": format!("{} icon", input.icon.clone().unwrap()),
"icon_path": simple_icons::get(&input.icon.unwrap()).unwrap().path,
"icon_colour": input.icon_colour,
"icon_scale": input.icon_scale,
"contains_text": true,
})),
(true, false) => self.template().render(&liquid::object!({
"title": input.title,
"title_width": title_bbox.clone().unwrap()[0],
"title_height": title_bbox.unwrap()[1],
"text": "",
"contains_text": false,
"text_width": 0,
"text_height": 0,
"font_face": input.font_face,
"font_size": input.font_size,
"title_colour": input.title_colour,
"title_bg_colour": input.title_bg_colour,
"text_colour": input.text_colour,
"text_bg_colour": input.text_bg_colour,
"padding_horizontal": input.padding_horizontal,
"padding_vertical": input.padding_vertical,
"icon": true,
"icon_title": format!("{} icon", input.icon.clone().unwrap()),
"icon_path": simple_icons::get(&input.icon.unwrap()).unwrap().path,
"icon_colour": input.icon_colour,
"icon_scale": input.icon_scale,
})),
(false, true) => self.template().render(&liquid::object!({
"title": input.title,
"title_width": title_bbox.clone().unwrap()[0],
"title_height": title_bbox.unwrap()[1],
"text": input.text,
"text_width": text_bbox.clone().unwrap()[0],
"text_height": text_bbox.unwrap()[1],
"font_face": input.font_face,
"font_size": input.font_size,
"title_colour": input.title_colour,
"title_bg_colour": input.title_bg_colour,
"text_colour": input.text_colour,
"text_bg_colour": input.text_bg_colour,
"padding_horizontal": input.padding_horizontal,
"padding_vertical": input.padding_vertical,
"contains_text": true,
})),
(false, false) => self.template().render(&liquid::object!({
"title": input.title,
"title_width": title_bbox.clone().unwrap()[0],
"title_height": title_bbox.unwrap()[1],
"text": "",
"contains_text": false,
"text_width": 0,
"text_height": 0,
"font_face": input.font_face,
"font_size": input.font_size,
"title_colour": input.title_colour,
"title_bg_colour": input.title_bg_colour,
"text_colour": input.text_colour,
"text_bg_colour": input.text_bg_colour,
"padding_horizontal": input.padding_horizontal,
"padding_vertical": input.padding_vertical,
})),
};
match output {
Ok(badge) => Ok(badge),
Err(_err) => {
error!("{}", _err);
Err("failed to render badge".into())
}
}
}
}
|
use crate::{linalg::Vct, Flt};
use std::default::Default;
use std::ops::{Mul, Rem};
/*
y
|
|
|
o--------x
/
/
z
*/
macro_rules! df {
() => {
Default::default()
};
}
#[cfg_attr(rustfmt, rustfmt_skip)]
#[derive(Copy, Clone, Debug, Default)]
pub struct Mat {
pub m00: Flt, pub m01: Flt, pub m02: Flt, pub m03: Flt,
pub m10: Flt, pub m11: Flt, pub m12: Flt, pub m13: Flt,
pub m20: Flt, pub m21: Flt, pub m22: Flt, pub m23: Flt,
pub m30: Flt, pub m31: Flt, pub m32: Flt, pub m33: Flt,
}
impl Mat {
pub fn identity() -> Self {
Self { m00: 1.0, m11: 1.0, m22: 1.0, m33: 1.0, ..df!() }
}
pub fn scale(x: Flt, y: Flt, z: Flt) -> Self {
Self { m00: x, m11: y, m22: z, m33: 1.0, ..df!() }
}
pub fn shift(x: Flt, y: Flt, z: Flt) -> Self {
Self { m00: 1.0, m11: 1.0, m22: 1.0, m33: 1.0, m03: x, m13: y, m23: z, ..df!() }
}
pub fn rot(axis: &str, radian: Flt) -> Self {
let (sin, cos) = (radian.sin(), radian.cos());
match axis {
"x" => Self { m00: 1.0, m11: cos, m12: -sin, m21: sin, m22: cos, m33: 1.0, ..df!() },
"y" => Self { m00: cos, m02: sin, m11: 1.0, m20: -sin, m22: cos, m33: 1.0, ..df!() },
"z" => Self { m00: cos, m01: -sin, m10: sin, m11: cos, m22: 1.0, m33: 1.0, ..df!() },
_ => panic!("Invalid axis"),
}
}
pub fn rot_degree(axis: &str, degree: Flt) -> Self {
Self::rot(axis, degree.to_radians())
}
// axis: p + tv
pub fn rot_line(p: Vct, v: Vct, radian: Flt) -> Self {
let a = (v.dot(Vct::new(1.0, 0.0, 0.0)) / v.len()).acos();
let b = (v.dot(Vct::new(0.0, 1.0, 0.0)) / v.len()).acos();
Self::shift(p.x, p.y, p.z)
* Self::rot("x", -a)
* Self::rot("y", -b)
* Self::rot("z", radian)
* Self::rot("y", b)
* Self::rot("x", a)
* Self::shift(-p.x, -p.y, -p.z)
}
pub fn rot_line_degree(p: Vct, v: Vct, degree: Flt) -> Self {
Self::rot_line(p, v, degree.to_radians())
}
}
impl Mul<Mat> for Mat {
type Output = Self;
fn mul(self, rhs: Self) -> Self {
Mat {
m00: self.m00 * rhs.m00 + self.m01 * rhs.m10 + self.m02 * rhs.m20 + self.m03 * rhs.m30,
m01: self.m00 * rhs.m01 + self.m01 * rhs.m11 + self.m02 * rhs.m21 + self.m03 * rhs.m31,
m02: self.m00 * rhs.m02 + self.m01 * rhs.m12 + self.m02 * rhs.m22 + self.m03 * rhs.m32,
m03: self.m00 * rhs.m03 + self.m01 * rhs.m13 + self.m02 * rhs.m23 + self.m03 * rhs.m33,
m10: self.m10 * rhs.m00 + self.m11 * rhs.m10 + self.m12 * rhs.m20 + self.m13 * rhs.m30,
m11: self.m10 * rhs.m01 + self.m11 * rhs.m11 + self.m12 * rhs.m21 + self.m13 * rhs.m31,
m12: self.m10 * rhs.m02 + self.m11 * rhs.m12 + self.m12 * rhs.m22 + self.m13 * rhs.m32,
m13: self.m10 * rhs.m03 + self.m11 * rhs.m13 + self.m12 * rhs.m23 + self.m13 * rhs.m33,
m20: self.m20 * rhs.m00 + self.m21 * rhs.m10 + self.m22 * rhs.m20 + self.m23 * rhs.m30,
m21: self.m20 * rhs.m01 + self.m21 * rhs.m11 + self.m22 * rhs.m21 + self.m23 * rhs.m31,
m22: self.m20 * rhs.m02 + self.m21 * rhs.m12 + self.m22 * rhs.m22 + self.m23 * rhs.m32,
m23: self.m20 * rhs.m03 + self.m21 * rhs.m13 + self.m22 * rhs.m23 + self.m23 * rhs.m33,
m30: self.m30 * rhs.m00 + self.m31 * rhs.m10 + self.m32 * rhs.m20 + self.m33 * rhs.m30,
m31: self.m30 * rhs.m01 + self.m31 * rhs.m11 + self.m32 * rhs.m21 + self.m33 * rhs.m31,
m32: self.m30 * rhs.m02 + self.m31 * rhs.m12 + self.m32 * rhs.m22 + self.m33 * rhs.m32,
m33: self.m30 * rhs.m03 + self.m31 * rhs.m13 + self.m32 * rhs.m23 + self.m33 * rhs.m33,
}
}
}
// (x, y, z, 1)
impl Mul<Vct> for Mat {
type Output = Vct;
fn mul(self, rhs: Vct) -> Vct {
Vct {
x: self.m00 * rhs.x + self.m01 * rhs.y + self.m02 * rhs.z + self.m03,
y: self.m10 * rhs.x + self.m11 * rhs.y + self.m12 * rhs.z + self.m13,
z: self.m20 * rhs.x + self.m21 * rhs.y + self.m22 * rhs.z + self.m23,
}
}
}
// (x, y, z, 0)
impl Rem<Vct> for Mat {
type Output = Vct;
fn rem(self, rhs: Vct) -> Vct {
Vct {
x: self.m00 * rhs.x + self.m01 * rhs.y + self.m02 * rhs.z,
y: self.m10 * rhs.x + self.m11 * rhs.y + self.m12 * rhs.z,
z: self.m20 * rhs.x + self.m21 * rhs.y + self.m22 * rhs.z,
}
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#[cfg(test)]
mod tests {
use rand::Rng;
use std::env::current_exe;
use std::io::{Read, Write};
use std::process::{Child, Command, Stdio};
use timebomb::timeout_ms;
const TEST_TIMEOUT_MS: u32 = 60_000;
fn cmd(name: &str) -> Command {
let mut path = current_exe().unwrap();
path.pop();
path.push("overnet_host_examples_test_commands");
path.push(name);
let mut cmd = Command::new(path);
cmd.env("RUST_BACKTRACE", "1");
cmd
}
struct Daemon {
child: Child,
name: String,
}
impl Daemon {
fn new_from_child(name: String, child: Child) -> Daemon {
Daemon { name, child }
}
fn new(mut command: Command) -> Daemon {
let name = format!("{:?}", command);
let child = command.spawn().unwrap();
Daemon { name, child }
}
}
impl Drop for Daemon {
fn drop(&mut self) {
self.child.kill().expect(&format!("'{}' wasn't running", self.name));
}
}
struct Ascendd {
daemons: Vec<Daemon>,
socket: String,
}
impl Ascendd {
fn new() -> Ascendd {
let socket = format!("/tmp/ascendd.{}.sock", rand::thread_rng().gen::<u128>());
let mut cmd = cmd("ascendd");
cmd.arg("--sockpath").arg(&socket);
Ascendd { daemons: vec![Daemon::new(cmd)], socket }
}
fn cmd(&self, name: &str) -> Command {
let mut c = cmd(name);
c.env("ASCENDD", &self.socket);
c
}
fn labelled_cmd(&self, name: &str, label: &str) -> Command {
let mut c = self.cmd(name);
c.env("OVERNET_CONNECTION_LABEL", label);
c
}
fn echo_cmd(&self, kind: &str) -> Command {
let mut c = self.labelled_cmd("overnet_echo", kind);
c.arg(kind);
c
}
fn echo_client(&self) -> Command {
let mut c = self.echo_cmd("client");
c.arg("AUTOMATED_TEST");
c
}
fn add_echo_server(&mut self) {
self.daemons.push(Daemon::new(self.echo_cmd("server")))
}
fn interface_passing_cmd(&self, kind: &str) -> Command {
let mut c = self.labelled_cmd("overnet_interface_passing", kind);
c.arg(kind);
c
}
fn interface_passing_client(&self) -> Command {
let mut c = self.interface_passing_cmd("client");
c.arg("AUTOMATED_TEST");
c
}
fn add_interface_passing_server(&mut self) {
self.daemons.push(Daemon::new(self.interface_passing_cmd("server")))
}
fn add_onet_host_pipe(
&mut self,
label: &str,
) -> (Box<dyn Read + Send>, Box<dyn Write + Send>) {
let mut cmd = self.labelled_cmd("onet", label);
cmd.arg("host-pipe").stdin(Stdio::piped()).stdout(Stdio::piped());
let name = format!("{:?}", cmd);
let mut child = cmd.spawn().unwrap();
let input = Box::new(child.stdout.take().unwrap());
let output = Box::new(child.stdin.take().unwrap());
self.daemons.push(Daemon::new_from_child(name, child));
(input, output)
}
fn onet_client(&self, cmd: &str) -> Command {
let mut c = self.labelled_cmd("onet", cmd);
c.arg(cmd);
c.arg("--exclude_self");
c
}
}
fn bridge(a: &mut Ascendd, b: &mut Ascendd) {
let (mut i1, mut o1) = a.add_onet_host_pipe("onet1");
let (mut i2, mut o2) = b.add_onet_host_pipe("onet2");
std::thread::spawn(move || std::io::copy(&mut i1, &mut o2));
std::thread::spawn(move || std::io::copy(&mut i2, &mut o1));
}
fn run_client(mut cmd: Command) {
timeout_ms(
move || assert!(cmd.spawn().unwrap().wait().expect("client should succeed").success()),
TEST_TIMEOUT_MS,
);
}
#[test]
fn echo_test() {
let mut ascendd = Ascendd::new();
ascendd.add_echo_server();
run_client(ascendd.echo_client());
run_client(ascendd.onet_client("full-map"));
}
#[test]
fn multiple_ascendd_echo_test() {
let mut ascendd1 = Ascendd::new();
let mut ascendd2 = Ascendd::new();
bridge(&mut ascendd1, &mut ascendd2);
ascendd1.add_echo_server();
run_client(ascendd1.echo_client());
run_client(ascendd1.onet_client("full-map"));
}
#[test]
fn interface_passing_test() {
let mut ascendd = Ascendd::new();
ascendd.add_interface_passing_server();
run_client(ascendd.interface_passing_client());
run_client(ascendd.onet_client("full-map"));
}
}
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[doc(keyword = "as")]
//
/// The keyword for casting a value to a type.
///
/// `as` is most commonly used to turn primitive types into other primitive types, but it has other
/// uses that include turning pointers into addresses, addresses into pointers, and pointers into
/// other pointers.
///
/// ```rust
/// let thing1: u8 = 89.0 as u8;
/// assert_eq!('B' as u32, 66);
/// assert_eq!(thing1 as char, 'Y');
/// let thing2: f32 = thing1 as f32 + 10.5;
/// assert_eq!(true as u8 + thing2 as u8, 100);
/// ```
///
/// In general, any cast that can be performed via ascribing the type can also be done using `as`,
/// so instead of writing `let x: u32 = 123`, you can write `let x = 123 as u32` (Note: `let x: u32
/// = 123` would be best in that situation). The same is not true in the other direction, however,
/// explicitly using `as` allows a few more coercions that aren't allowed implicitly, such as
/// changing the type of a raw pointer or turning closures into raw pointers.
///
/// Other places `as` is used include as extra syntax for [`crate`] and `use`, to change the name
/// something is imported as.
///
/// For more information on what `as` is capable of, see the [Reference]
///
/// [Reference]:
/// https://doc.rust-lang.org/reference/expressions/operator-expr.html#type-cast-expressions
/// [`crate`]: keyword.crate.html
mod as_keyword { }
#[doc(keyword = "const")]
//
/// The keyword for defining constants.
///
/// Sometimes a certain value is used many times throughout a program, and it can become
/// inconvenient to copy it over and over. What's more, it's not always possible or desirable to
/// make it a variable that gets carried around to each function that needs it. In these cases, the
/// `const` keyword provides a convenient alternative to code duplication.
///
/// ```rust
/// const THING: u32 = 0xABAD1DEA;
///
/// let foo = 123 + THING;
/// ```
///
/// Constants must be explicitly typed, unlike with `let` you can't ignore its type and let the
/// compiler figure it out. Any constant value can be defined in a const, which in practice happens
/// to be most things that would be reasonable to have a constant (barring `const fn`s, coming
/// soon). For example, you can't have a File as a `const`.
///
/// The only lifetime allowed in a constant is `'static`, which is the lifetime that encompasses
/// all others in a Rust program. For example, if you wanted to define a constant string, it would
/// look like this:
///
/// ```rust
/// const WORDS: &str = "hello rust!";
/// ```
///
/// Thanks to static lifetime elision, you usually don't have to explicitly use 'static:
///
/// ```rust
/// const WORDS: &str = "hello convenience!";
/// ```
///
/// `const` items looks remarkably similar to `static` items, which introduces some confusion as
/// to which one should be used at which times. To put it simply, constants are inlined wherever
/// they're used, making using them identical to simply replacing the name of the const with its
/// value. Static variables on the other hand point to a single location in memory, which all
/// accesses share. This means that, unlike with constants, they can't have destructors, and act as
/// a single value across the entire codebase.
///
/// Constants, as with statics, should always be in SCREAMING_SNAKE_CASE.
///
/// The `const` keyword is also used in raw pointers in combination with `mut`, as seen in `*const
/// T` and `*mut T`. More about that can be read at the [pointer] primitive part of the Rust docs.
///
/// For more detail on `const`, see the [Rust Book] or the [Reference]
///
/// [pointer]: primitive.pointer.html
/// [Rust Book]:
/// https://doc.rust-lang.org/stable/book/2018-edition/ch03-01-variables-and-mutability.html#differences-between-variables-and-constants
/// [Reference]: https://doc.rust-lang.org/reference/items/constant-items.html
mod const_keyword { }
#[doc(keyword = "crate")]
//
/// The `crate` keyword.
///
/// The primary use of the `crate` keyword is as a part of `extern crate` declarations, which are
/// used to specify a dependency on a crate external to the one it's declared in. Crates are the
/// fundamental compilation unit of Rust code, and can be seen as libraries or projects. More can
/// be read about crates in the [Reference].
///
/// ```rust ignore
/// extern crate rand;
/// extern crate my_crate as thing;
/// extern crate std; // implicitly added to the root of every Rust project
/// ```
///
/// The `as` keyword can be used to change what the crate is referred to as in your project. If a
/// crate name includes a dash, it is implicitly imported with the dashes replaced by underscores.
///
/// `crate` is also used as in conjunction with `pub` to signify that the item it's attached to
/// is public only to other members of the same crate it's in.
///
/// ```rust
/// # #[allow(unused_imports)]
/// pub(crate) use std::io::Error as IoError;
/// pub(crate) enum CoolMarkerType { }
/// pub struct PublicThing {
/// pub(crate) semi_secret_thing: bool,
/// }
/// ```
///
/// [Reference]: https://doc.rust-lang.org/reference/items/extern-crates.html
mod crate_keyword { }
#[doc(keyword = "enum")]
//
/// For defining enumerations.
///
/// Enums in Rust are similar to those of other compiled languages like C, but have important
/// differences that make them considerably more powerful. What Rust calls enums are more commonly
/// known as [Algebraic Data Types] if you're coming from a functional programming background. The
/// important detail is that each enum variant can have data to go along with it.
///
/// ```rust
/// # struct Coord;
/// enum SimpleEnum {
/// FirstVariant,
/// SecondVariant,
/// ThirdVariant,
/// }
///
/// enum Location {
/// Unknown,
/// Anonymous,
/// Known(Coord),
/// }
///
/// enum ComplexEnum {
/// Nothing,
/// Something(u32),
/// LotsOfThings {
/// usual_struct_stuff: bool,
/// blah: String,
/// }
/// }
///
/// enum EmptyEnum { }
/// ```
///
/// The first enum shown is the usual kind of enum you'd find in a C-style language. The second
/// shows off a hypothetical example of something storing location data, with `Coord` being any
/// other type that's needed, for example a struct. The third example demonstrates the kind of
/// data a variant can store, ranging from nothing, to a tuple, to an anonymous struct.
///
/// Instantiating enum variants involves explicitly using the enum's name as its namespace,
/// followed by one of its variants. `SimpleEnum::SecondVariant` would be an example from above.
/// When data follows along with a variant, such as with rust's built-in [`Option`] type, the data
/// is added as the type describes, for example `Option::Some(123)`. The same follows with
/// struct-like variants, with things looking like `ComplexEnum::LotsOfThings { usual_struct_stuff:
/// true, blah: "hello!".to_string(), }`. Empty Enums are similar to () in that they cannot be
/// instantiated at all, and are used mainly to mess with the type system in interesting ways.
///
/// For more information, take a look at the [Rust Book] or the [Reference]
///
/// [Algebraic Data Types]: https://en.wikipedia.org/wiki/Algebraic_data_type
/// [`Option`]: option/enum.Option.html
/// [Rust Book]: https://doc.rust-lang.org/book/second-edition/ch06-01-defining-an-enum.html
/// [Reference]: https://doc.rust-lang.org/reference/items/enumerations.html
mod enum_keyword { }
#[doc(keyword = "extern")]
//
/// For external connections in Rust code.
///
/// The `extern` keyword is used in two places in Rust. One is in conjunction with the [`crate`]
/// keyword to make your Rust code aware of other Rust crates in your project, i.e., `extern crate
/// lazy_static;`. The other use is in foreign function interfaces (FFI).
///
/// `extern` is used in two different contexts within FFI. The first is in the form of external
/// blocks, for declaring function interfaces that Rust code can call foreign code by.
///
/// ```rust ignore
/// #[link(name = "my_c_library")]
/// extern "C" {
/// fn my_c_function(x: i32) -> bool;
/// }
/// ```
///
/// This code would attempt to link with `libmy_c_library.so` on unix-like systems and
/// `my_c_library.dll` on Windows at runtime, and panic if it can't find something to link to. Rust
/// code could then use `my_c_function` as if it were any other unsafe Rust function. Working with
/// non-Rust languages and FFI is inherently unsafe, so wrappers are usually built around C APIs.
///
/// The mirror use case of FFI is also done via the `extern` keyword:
///
/// ```rust
/// #[no_mangle]
/// pub extern fn callable_from_c(x: i32) -> bool {
/// x % 3 == 0
/// }
/// ```
///
/// If compiled as a dylib, the resulting .so could then be linked to from a C library, and the
/// function could be used as if it was from any other library.
///
/// For more information on FFI, check the [Rust book] or the [Reference].
///
/// [Rust book]:
/// https://doc.rust-lang.org/book/second-edition/ch19-01-unsafe-rust.html#using-extern-functions-to-call-external-code
/// [Reference]: https://doc.rust-lang.org/reference/items/external-blocks.html
mod extern_keyword { }
#[doc(keyword = "fn")]
//
/// The `fn` keyword.
///
/// The `fn` keyword is used to declare a function.
///
/// Example:
///
/// ```rust
/// fn some_function() {
/// // code goes in here
/// }
/// ```
///
/// For more information about functions, take a look at the [Rust Book][book].
///
/// [book]: https://doc.rust-lang.org/book/second-edition/ch03-03-how-functions-work.html
mod fn_keyword { }
#[doc(keyword = "let")]
//
/// The `let` keyword.
///
/// The `let` keyword is used to declare a variable.
///
/// Example:
///
/// ```rust
/// # #![allow(unused_assignments)]
/// let x = 3; // We create a variable named `x` with the value `3`.
/// ```
///
/// By default, all variables are **not** mutable. If you want a mutable variable,
/// you'll have to use the `mut` keyword.
///
/// Example:
///
/// ```rust
/// # #![allow(unused_assignments)]
/// let mut x = 3; // We create a mutable variable named `x` with the value `3`.
///
/// x += 4; // `x` is now equal to `7`.
/// ```
///
/// For more information about the `let` keyword, take a look at the [Rust Book][book].
///
/// [book]: https://doc.rust-lang.org/book/second-edition/ch03-01-variables-and-mutability.html
mod let_keyword { }
#[doc(keyword = "struct")]
//
/// The `struct` keyword.
///
/// The `struct` keyword is used to define a struct type.
///
/// Example:
///
/// ```
/// struct Foo {
/// field1: u32,
/// field2: String,
/// }
/// ```
///
/// There are different kinds of structs. For more information, take a look at the
/// [Rust Book][book].
///
/// [book]: https://doc.rust-lang.org/book/second-edition/ch05-01-defining-structs.html
mod struct_keyword { }
|
#[doc = "Reader of register TBPR"]
pub type R = crate::R<u32, super::TBPR>;
#[doc = "Writer for register TBPR"]
pub type W = crate::W<u32, super::TBPR>;
#[doc = "Register TBPR `reset()`'s with value 0"]
impl crate::ResetValue for super::TBPR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TBPSR`"]
pub type TBPSR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `TBPSR`"]
pub struct TBPSR_W<'a> {
w: &'a mut W,
}
impl<'a> TBPSR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff);
self.w
}
}
#[doc = "Reader of field `TBPSRH`"]
pub type TBPSRH_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `TBPSRH`"]
pub struct TBPSRH_W<'a> {
w: &'a mut W,
}
impl<'a> TBPSRH_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - GPTM Timer B Prescale"]
#[inline(always)]
pub fn tbpsr(&self) -> TBPSR_R {
TBPSR_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - GPTM Timer B Prescale High Byte"]
#[inline(always)]
pub fn tbpsrh(&self) -> TBPSRH_R {
TBPSRH_R::new(((self.bits >> 8) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - GPTM Timer B Prescale"]
#[inline(always)]
pub fn tbpsr(&mut self) -> TBPSR_W {
TBPSR_W { w: self }
}
#[doc = "Bits 8:15 - GPTM Timer B Prescale High Byte"]
#[inline(always)]
pub fn tbpsrh(&mut self) -> TBPSRH_W {
TBPSRH_W { w: self }
}
}
|
use anyhow::Result;
use itertools::Itertools;
use std::collections::HashSet;
fn part1(input: &[String]) -> usize {
input
.split(|line| line.is_empty())
.map(|group| {
group
.iter()
.flat_map(|line| line.chars())
.collect::<HashSet<_>>()
.len()
})
.sum()
}
fn part2(input: &[String]) -> usize {
input
.split(|line| line.is_empty())
.map(|group| {
let len = group.len();
let mut group: Box<[char]> = group.iter().flat_map(|line| line.chars()).collect();
group.sort();
group
.into_iter()
.group_by(|g| *g)
.into_iter()
.map(|(_, q)| q.count())
.filter(|qlen| *qlen == len)
.count()
})
.sum()
}
fn main() -> Result<()> {
let input: Vec<String> = common::std_input_vec()?;
println!("Part1: {}", part1(&input));
println!("Part2: {}", part2(&input));
Ok(())
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Software Pulse Register"]
pub swpulse: SWPULSE,
#[doc = "0x04 - Software Level Register"]
pub swlevel: SWLEVEL,
#[doc = "0x08 - I/O Routing Pin Enable Register"]
pub routepen: ROUTEPEN,
_reserved3: [u8; 4usize],
#[doc = "0x10 - I/O Routing Location Register"]
pub routeloc0: ROUTELOC0,
#[doc = "0x14 - I/O Routing Location Register"]
pub routeloc1: ROUTELOC1,
#[doc = "0x18 - I/O Routing Location Register"]
pub routeloc2: ROUTELOC2,
_reserved6: [u8; 20usize],
#[doc = "0x30 - Control Register"]
pub ctrl: CTRL,
#[doc = "0x34 - DMA Request 0 Register"]
pub dmareq0: DMAREQ0,
#[doc = "0x38 - DMA Request 1 Register"]
pub dmareq1: DMAREQ1,
_reserved9: [u8; 4usize],
#[doc = "0x40 - PRS Channel Values"]
pub peek: PEEK,
_reserved10: [u8; 12usize],
#[doc = "0x50 - Channel Control Register"]
pub ch0_ctrl: CH0_CTRL,
#[doc = "0x54 - Channel Control Register"]
pub ch1_ctrl: CH1_CTRL,
#[doc = "0x58 - Channel Control Register"]
pub ch2_ctrl: CH2_CTRL,
#[doc = "0x5c - Channel Control Register"]
pub ch3_ctrl: CH3_CTRL,
#[doc = "0x60 - Channel Control Register"]
pub ch4_ctrl: CH4_CTRL,
#[doc = "0x64 - Channel Control Register"]
pub ch5_ctrl: CH5_CTRL,
#[doc = "0x68 - Channel Control Register"]
pub ch6_ctrl: CH6_CTRL,
#[doc = "0x6c - Channel Control Register"]
pub ch7_ctrl: CH7_CTRL,
#[doc = "0x70 - Channel Control Register"]
pub ch8_ctrl: CH8_CTRL,
#[doc = "0x74 - Channel Control Register"]
pub ch9_ctrl: CH9_CTRL,
#[doc = "0x78 - Channel Control Register"]
pub ch10_ctrl: CH10_CTRL,
#[doc = "0x7c - Channel Control Register"]
pub ch11_ctrl: CH11_CTRL,
}
#[doc = "Software Pulse Register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [swpulse](swpulse) module"]
pub type SWPULSE = crate::Reg<u32, _SWPULSE>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SWPULSE;
#[doc = "`write(|w| ..)` method takes [swpulse::W](swpulse::W) writer structure"]
impl crate::Writable for SWPULSE {}
#[doc = "Software Pulse Register"]
pub mod swpulse;
#[doc = "Software Level Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [swlevel](swlevel) module"]
pub type SWLEVEL = crate::Reg<u32, _SWLEVEL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SWLEVEL;
#[doc = "`read()` method returns [swlevel::R](swlevel::R) reader structure"]
impl crate::Readable for SWLEVEL {}
#[doc = "`write(|w| ..)` method takes [swlevel::W](swlevel::W) writer structure"]
impl crate::Writable for SWLEVEL {}
#[doc = "Software Level Register"]
pub mod swlevel;
#[doc = "I/O Routing Pin Enable Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [routepen](routepen) module"]
pub type ROUTEPEN = crate::Reg<u32, _ROUTEPEN>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ROUTEPEN;
#[doc = "`read()` method returns [routepen::R](routepen::R) reader structure"]
impl crate::Readable for ROUTEPEN {}
#[doc = "`write(|w| ..)` method takes [routepen::W](routepen::W) writer structure"]
impl crate::Writable for ROUTEPEN {}
#[doc = "I/O Routing Pin Enable Register"]
pub mod routepen;
#[doc = "I/O Routing Location Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [routeloc0](routeloc0) module"]
pub type ROUTELOC0 = crate::Reg<u32, _ROUTELOC0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ROUTELOC0;
#[doc = "`read()` method returns [routeloc0::R](routeloc0::R) reader structure"]
impl crate::Readable for ROUTELOC0 {}
#[doc = "`write(|w| ..)` method takes [routeloc0::W](routeloc0::W) writer structure"]
impl crate::Writable for ROUTELOC0 {}
#[doc = "I/O Routing Location Register"]
pub mod routeloc0;
#[doc = "I/O Routing Location Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [routeloc1](routeloc1) module"]
pub type ROUTELOC1 = crate::Reg<u32, _ROUTELOC1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ROUTELOC1;
#[doc = "`read()` method returns [routeloc1::R](routeloc1::R) reader structure"]
impl crate::Readable for ROUTELOC1 {}
#[doc = "`write(|w| ..)` method takes [routeloc1::W](routeloc1::W) writer structure"]
impl crate::Writable for ROUTELOC1 {}
#[doc = "I/O Routing Location Register"]
pub mod routeloc1;
#[doc = "I/O Routing Location Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [routeloc2](routeloc2) module"]
pub type ROUTELOC2 = crate::Reg<u32, _ROUTELOC2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ROUTELOC2;
#[doc = "`read()` method returns [routeloc2::R](routeloc2::R) reader structure"]
impl crate::Readable for ROUTELOC2 {}
#[doc = "`write(|w| ..)` method takes [routeloc2::W](routeloc2::W) writer structure"]
impl crate::Writable for ROUTELOC2 {}
#[doc = "I/O Routing Location Register"]
pub mod routeloc2;
#[doc = "Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ctrl](ctrl) module"]
pub type CTRL = crate::Reg<u32, _CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CTRL;
#[doc = "`read()` method returns [ctrl::R](ctrl::R) reader structure"]
impl crate::Readable for CTRL {}
#[doc = "`write(|w| ..)` method takes [ctrl::W](ctrl::W) writer structure"]
impl crate::Writable for CTRL {}
#[doc = "Control Register"]
pub mod ctrl;
#[doc = "DMA Request 0 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dmareq0](dmareq0) module"]
pub type DMAREQ0 = crate::Reg<u32, _DMAREQ0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DMAREQ0;
#[doc = "`read()` method returns [dmareq0::R](dmareq0::R) reader structure"]
impl crate::Readable for DMAREQ0 {}
#[doc = "`write(|w| ..)` method takes [dmareq0::W](dmareq0::W) writer structure"]
impl crate::Writable for DMAREQ0 {}
#[doc = "DMA Request 0 Register"]
pub mod dmareq0;
#[doc = "DMA Request 1 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dmareq1](dmareq1) module"]
pub type DMAREQ1 = crate::Reg<u32, _DMAREQ1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DMAREQ1;
#[doc = "`read()` method returns [dmareq1::R](dmareq1::R) reader structure"]
impl crate::Readable for DMAREQ1 {}
#[doc = "`write(|w| ..)` method takes [dmareq1::W](dmareq1::W) writer structure"]
impl crate::Writable for DMAREQ1 {}
#[doc = "DMA Request 1 Register"]
pub mod dmareq1;
#[doc = "PRS Channel Values\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [peek](peek) module"]
pub type PEEK = crate::Reg<u32, _PEEK>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PEEK;
#[doc = "`read()` method returns [peek::R](peek::R) reader structure"]
impl crate::Readable for PEEK {}
#[doc = "PRS Channel Values"]
pub mod peek;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch0_ctrl](ch0_ctrl) module"]
pub type CH0_CTRL = crate::Reg<u32, _CH0_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH0_CTRL;
#[doc = "`read()` method returns [ch0_ctrl::R](ch0_ctrl::R) reader structure"]
impl crate::Readable for CH0_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch0_ctrl::W](ch0_ctrl::W) writer structure"]
impl crate::Writable for CH0_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch0_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch1_ctrl](ch1_ctrl) module"]
pub type CH1_CTRL = crate::Reg<u32, _CH1_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH1_CTRL;
#[doc = "`read()` method returns [ch1_ctrl::R](ch1_ctrl::R) reader structure"]
impl crate::Readable for CH1_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch1_ctrl::W](ch1_ctrl::W) writer structure"]
impl crate::Writable for CH1_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch1_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch2_ctrl](ch2_ctrl) module"]
pub type CH2_CTRL = crate::Reg<u32, _CH2_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH2_CTRL;
#[doc = "`read()` method returns [ch2_ctrl::R](ch2_ctrl::R) reader structure"]
impl crate::Readable for CH2_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch2_ctrl::W](ch2_ctrl::W) writer structure"]
impl crate::Writable for CH2_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch2_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch3_ctrl](ch3_ctrl) module"]
pub type CH3_CTRL = crate::Reg<u32, _CH3_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH3_CTRL;
#[doc = "`read()` method returns [ch3_ctrl::R](ch3_ctrl::R) reader structure"]
impl crate::Readable for CH3_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch3_ctrl::W](ch3_ctrl::W) writer structure"]
impl crate::Writable for CH3_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch3_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch4_ctrl](ch4_ctrl) module"]
pub type CH4_CTRL = crate::Reg<u32, _CH4_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH4_CTRL;
#[doc = "`read()` method returns [ch4_ctrl::R](ch4_ctrl::R) reader structure"]
impl crate::Readable for CH4_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch4_ctrl::W](ch4_ctrl::W) writer structure"]
impl crate::Writable for CH4_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch4_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch5_ctrl](ch5_ctrl) module"]
pub type CH5_CTRL = crate::Reg<u32, _CH5_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH5_CTRL;
#[doc = "`read()` method returns [ch5_ctrl::R](ch5_ctrl::R) reader structure"]
impl crate::Readable for CH5_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch5_ctrl::W](ch5_ctrl::W) writer structure"]
impl crate::Writable for CH5_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch5_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch6_ctrl](ch6_ctrl) module"]
pub type CH6_CTRL = crate::Reg<u32, _CH6_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH6_CTRL;
#[doc = "`read()` method returns [ch6_ctrl::R](ch6_ctrl::R) reader structure"]
impl crate::Readable for CH6_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch6_ctrl::W](ch6_ctrl::W) writer structure"]
impl crate::Writable for CH6_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch6_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch7_ctrl](ch7_ctrl) module"]
pub type CH7_CTRL = crate::Reg<u32, _CH7_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH7_CTRL;
#[doc = "`read()` method returns [ch7_ctrl::R](ch7_ctrl::R) reader structure"]
impl crate::Readable for CH7_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch7_ctrl::W](ch7_ctrl::W) writer structure"]
impl crate::Writable for CH7_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch7_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch8_ctrl](ch8_ctrl) module"]
pub type CH8_CTRL = crate::Reg<u32, _CH8_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH8_CTRL;
#[doc = "`read()` method returns [ch8_ctrl::R](ch8_ctrl::R) reader structure"]
impl crate::Readable for CH8_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch8_ctrl::W](ch8_ctrl::W) writer structure"]
impl crate::Writable for CH8_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch8_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch9_ctrl](ch9_ctrl) module"]
pub type CH9_CTRL = crate::Reg<u32, _CH9_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH9_CTRL;
#[doc = "`read()` method returns [ch9_ctrl::R](ch9_ctrl::R) reader structure"]
impl crate::Readable for CH9_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch9_ctrl::W](ch9_ctrl::W) writer structure"]
impl crate::Writable for CH9_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch9_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch10_ctrl](ch10_ctrl) module"]
pub type CH10_CTRL = crate::Reg<u32, _CH10_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH10_CTRL;
#[doc = "`read()` method returns [ch10_ctrl::R](ch10_ctrl::R) reader structure"]
impl crate::Readable for CH10_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch10_ctrl::W](ch10_ctrl::W) writer structure"]
impl crate::Writable for CH10_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch10_ctrl;
#[doc = "Channel Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ch11_ctrl](ch11_ctrl) module"]
pub type CH11_CTRL = crate::Reg<u32, _CH11_CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CH11_CTRL;
#[doc = "`read()` method returns [ch11_ctrl::R](ch11_ctrl::R) reader structure"]
impl crate::Readable for CH11_CTRL {}
#[doc = "`write(|w| ..)` method takes [ch11_ctrl::W](ch11_ctrl::W) writer structure"]
impl crate::Writable for CH11_CTRL {}
#[doc = "Channel Control Register"]
pub mod ch11_ctrl;
|
#[doc = "Reader of register SACKCTL"]
pub type R = crate::R<u32, super::SACKCTL>;
#[doc = "Writer for register SACKCTL"]
pub type W = crate::W<u32, super::SACKCTL>;
#[doc = "Register SACKCTL `reset()`'s with value 0"]
impl crate::ResetValue for super::SACKCTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `ACKOEN`"]
pub type ACKOEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ACKOEN`"]
pub struct ACKOEN_W<'a> {
w: &'a mut W,
}
impl<'a> ACKOEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `ACKOVAL`"]
pub type ACKOVAL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ACKOVAL`"]
pub struct ACKOVAL_W<'a> {
w: &'a mut W,
}
impl<'a> ACKOVAL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
impl R {
#[doc = "Bit 0 - I2C Slave ACK Override Enable"]
#[inline(always)]
pub fn ackoen(&self) -> ACKOEN_R {
ACKOEN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - I2C Slave ACK Override Value"]
#[inline(always)]
pub fn ackoval(&self) -> ACKOVAL_R {
ACKOVAL_R::new(((self.bits >> 1) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - I2C Slave ACK Override Enable"]
#[inline(always)]
pub fn ackoen(&mut self) -> ACKOEN_W {
ACKOEN_W { w: self }
}
#[doc = "Bit 1 - I2C Slave ACK Override Value"]
#[inline(always)]
pub fn ackoval(&mut self) -> ACKOVAL_W {
ACKOVAL_W { w: self }
}
}
|
//! Internal implementation details of usbd-hid.
extern crate proc_macro;
extern crate usbd_hid_descriptors;
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::quote;
use syn::{parse, parse_macro_input, ItemStruct, Field, Fields, Type, Expr, Path};
use syn::{Result, Token, ExprAssign, ExprPath, Pat, PatSlice, Ident};
use syn::{ExprTuple, ExprLit, Lit, ExprBlock, Block, Stmt};
use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::token::Bracket;
use std::string::String;
use std::collections::HashMap;
use usbd_hid_descriptors::*;
use byteorder::{ByteOrder, LittleEndian};
/// Attribute to generate a HID descriptor
///
/// You are expected to provide two inputs to this generator:
///
/// - A struct of named fields (which follows the `gen_hid_descriptor` attribute)
/// - A specially-formatted section describing the properties of the descriptor (this
/// section must be provided as arguments to the `gen_hid_descriptor()` attribute)
///
/// The generated HID descriptor will be available as a `&[u8]` by calling
/// `YourStructType::desc()`. `YourStructType` also now implements `SerializedDescriptor`.
///
/// As long as a descriptor describes only input or output types, and a report ID is
/// not used, the wire format for transmitting and recieving the data described by the
/// descriptor is simply the packed representation of the struct itself.
/// Where report ID's are used anywhere in the descriptor, you must prepend the relevant
/// report ID to the packed representation of the struct prior to transmission.
///
/// If inputs and outputs are mixed within the same HID descriptor, then only the struct
/// fields used in that direction can be present in a payload being transmitted in that
/// direction.
///
/// # Examples
///
/// - Custom 32-octet array, sent from device to host
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (collection = APPLICATION, usage_page = VENDOR_DEFINED_START, usage = 0x01) = {
/// buff=input;
/// }
/// )]
/// struct CustomInputReport {
/// buff: [u8; 32],
/// }
/// ```
///
/// - Custom input / output, sent in either direction
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (collection = APPLICATION, usage_page = VENDOR_DEFINED_START, usage = 0x01) = {
/// input_buffer=input;
/// output_buffer=output;
/// }
/// )]
/// struct CustomBidirectionalReport {
/// input_buffer: [u8; 32],
/// output_buffer: [u8; 32],
/// }
/// ```
///
/// Because both inputs and outputs are used, the data format when sending / recieving is the
/// 32 bytes in the relevant direction, **NOT** the full 64 bytes contained within the struct.
///
/// - Packed bitfields
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (report_id = 0x01,) = {
/// #[packed_bits 3] f1=input;
/// #[packed_bits 9] f2=input;
/// }
/// )]
/// struct CustomPackedBits {
/// f1: u8,
/// f2: u16,
/// }
/// ```
///
/// Because the `#[packed_bits]` sub-attribute was used, the two input fields specified are
/// interpreted as packed bits. As such, `f1` describes 3 boolean inputs, and `f2` describes
/// 9 boolean inputs. Padding constants are automatically generated.
///
/// The `#[packed_bits <num bits>]` feature is intended to be used for describing button presses.
///
/// - Customizing the settings on a report item
///
/// ``` no_run
/// #[gen_hid_descriptor(
/// (collection = APPLICATION, usage_page = VENDOR_DEFINED_START, usage = 0x01) = {
/// (usage_min = X, usage_max = Y) = {
/// #[item_settings data,variable,relative] x=input;
/// #[item_settings data,variable,relative] y=input;
/// };
/// }
/// )]
/// struct CustomCoords {
/// x: i8,
/// y: i8,
/// }
/// ```
///
/// The above example describes a report which sends X & Y co-ordinates. As indicated in
/// the `#[item_settings]` sub-attribute, the individual inputs are described as:
///
/// - Datapoints (`data`) - as opposed to constant
/// - Variable (`variable`) - as opposed to an array
/// - Relative (`relative`) - as opposed to absolute
///
/// # Supported struct types
///
/// The struct following the attribute must consist entirely of named fields, using
/// only types enumerated below, or fixed-size arrays of the types enumerated below.
///
/// - u8 / i8
/// - u16 / i16
/// - u32 / i32
///
/// `LOGICAL_MINIMUM` & `LOGICAL_MAXIMUM` are automatically set in the descriptor, based
/// on the type & whether `#[packed_bits]` was set on the field or not.
///
/// # Descriptor format
///
/// The parameters of the HID descriptor should be provided as arguments to the attribute.
/// The arguments should follow the basic form:
///
/// ```
/// #[gen_hid_descriptor(
/// <collection-spec> OR <item-spec>;
/// <collection-spec> OR <item-spec>;
/// ...
/// <collection-spec> OR <item-spec>
/// )]
/// ```
///
/// ## `collection-spec`:
///
/// ```
/// (parameter = <constant or 0xxxx>, ...) = {
/// <collection-spec> OR <item-spec>;
/// ...
/// }
/// ```
///
/// Note: All collection specs must end in a semicolon, except the top-level one.
///
/// Note: Parameters are a tuple, so make sure you have a trailing comma if you only have one
/// parameter.
///
/// The valid parameters are `collection`, `usage_page`, `usage`, `usage_min`, `usage_max`, and
/// `report_id`. These simply configure parameters that apply to contained items in the report.
/// Use of the `collection` parameter automatically creates a collection feature for all items
/// which are contained within it, and other parameters specified in the same collection-spec
/// apply to the collection, not directly to the elements of the collection (ie: defining a
/// collection + a usage generates a descriptor where the usage is set on the collection, not the
/// items contained within the collection).
///
/// ## `item-spec`:
///
/// ```
/// #[packed_bits <num_items>] #[item_settings <setting>,...] <fieldname>=input OR output;
/// ```
///
/// The two sub-attributes are both optional.
///
/// - `fieldname` refers to the name of a field within the struct. All fields must be specified.
/// - `input` fields are sent in reports from device to host. `output` fields are sent in reports
/// from host to device. This matches the terminology used in the USB & HID specifications.
/// - `packed_bits` configures the field as a set of `num_items` booleans rather than a number.
/// Any left over bits are automatically set as constants within the report. This is typically
/// used to implement buttons.
/// - `item_settings` describes settings on the input/output item, as enumerated in section
/// 6.2.2.5 of the [HID specification, version 1.11](https://www.usb.org/sites/default/files/documents/hid1_11.pdf).
/// By default, all items are configured as `(Data,Var,Abs,No Wrap,Linear,Preferred State,No Null Position)`.
#[proc_macro_attribute]
pub fn gen_hid_descriptor(args: TokenStream, input: TokenStream) -> TokenStream {
let decl = parse_macro_input!(input as ItemStruct);
let spec = parse_macro_input!(args as GroupSpec);
let ident = decl.ident.clone();
// Error if the struct doesn't name its fields.
match decl.clone().fields {
Fields::Named(_) => (),
_ => return parse::Error::new(ident.span(),"`#[gen_hid_descriptor]` type must name fields")
.to_compile_error()
.into(),
};
let descriptor = match compile(spec, &decl.fields){
Ok(d) => d,
Err(e) => return e.to_compile_error().into(),
};
// let descriptor_len = Index::from(size);
let out = quote! {
#[derive(Debug, Clone, Copy)]
#[repr(C, packed)]
#decl
impl SerializedDescriptor for #ident {
fn desc() -> &'static[u8] {
&#descriptor
}
}
};
TokenStream::from(out)
}
// Spec describes an item within a HID report.
#[derive(Debug, Clone)]
enum Spec {
MainItem(ItemSpec),
Collection(GroupSpec),
}
// ItemSpec describes settings that apply to a single field.
#[derive(Debug, Clone, Default)]
struct ItemSpec {
kind: MainItemKind,
settings: Option<MainItemSetting>,
want_bits: Option<u16>,
}
/// GroupSpec keeps track of consecutive fields with shared global
/// parameters. Fields are configured based on the attributes
/// used in the procedural macro's invocation.
#[derive(Debug, Clone, Default)]
struct GroupSpec {
fields: HashMap<String, Spec>,
field_order: Vec<String>,
report_id: Option<u32>,
usage_page: Option<u32>,
collection: Option<u32>,
// Local items
usage: Option<u32>,
usage_min: Option<u32>,
usage_max: Option<u32>,
}
impl GroupSpec {
fn set_item(&mut self, name: String, item_kind: MainItemKind, settings: Option<MainItemSetting>, bits: Option<u16>) {
if let Some(field) = self.fields.get_mut(&name) {
if let Spec::MainItem(field) = field {
field.kind = item_kind;
field.settings = settings;
field.want_bits = bits;
}
} else {
self.fields.insert(name.clone(), Spec::MainItem(ItemSpec{ kind: item_kind, settings: settings, want_bits: bits, ..Default::default() }));
self.field_order.push(name);
}
}
fn add_nested_group(&mut self, ng: GroupSpec) {
let name = (0..self.fields.len()+1).map(|_| "_").collect::<String>();
self.fields.insert(name.clone(), Spec::Collection(ng));
self.field_order.push(name);
}
fn get(&self, name: String) -> Option<&Spec> {
self.fields.get(&name)
}
fn try_set_attr(&mut self, input: ParseStream, name: String, val: u32) -> Result<()> {
match name.as_str() {
"report_id" => {
self.report_id = Some(val);
Ok(())
},
"usage_page" => {
self.usage_page = Some(val);
Ok(())
},
"collection" => {
self.collection = Some(val);
Ok(())
},
// Local items.
"usage" => {
self.usage = Some(val);
Ok(())
},
"usage_min" => {
self.usage_min = Some(val);
Ok(())
},
"usage_max" => {
self.usage_max = Some(val);
Ok(())
},
_ => Err(parse::Error::new(input.span(), format!("`#[gen_hid_descriptor]` unknown group spec key: {}", name.clone()))),
}
}
}
impl IntoIterator for GroupSpec {
type Item = String;
type IntoIter = std::vec::IntoIter<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
self.field_order.into_iter()
}
}
fn try_resolve_constant(key_name: String, path: String) -> Option<u32> {
match (key_name.as_str(), path.as_str()) {
("collection", "PHYSICAL") => Some(0x0),
("collection", "APPLICATION") => Some(0x1),
("collection", "LOGICAL") => Some(0x3),
("collection", "REPORT") => Some(0x3),
("collection", "NAMED_ARRAY") => Some(0x4),
("collection", "USAGE_SWITCH") => Some(0x5),
("collection", "USAGE_MODIFIER") => Some(0x06),
("usage_page", "UNDEFINED") => Some(0x00),
("usage_page", "GENERIC_DESKTOP") => Some(0x01),
("usage_page", "SIMULATION_CONTROLS") => Some(0x02),
("usage_page", "VR_CONTROLS") => Some(0x03),
("usage_page", "SPORT_CONTROLS") => Some(0x04),
("usage_page", "GAME_CONTROLS") => Some(0x05),
("usage_page", "GENERIC_DEVICE_CONTROLS") => Some(0x06),
("usage_page", "KEYBOARD") => Some(0x07),
("usage_page", "LEDS") => Some(0x08),
("usage_page", "BUTTON") => Some(0x09),
("usage_page", "ORDINAL") => Some(0x0A),
("usage_page", "TELEPHONY") => Some(0x0B),
("usage_page", "CONSUMER") => Some(0x0C),
("usage_page", "DIGITIZER") => Some(0x0D),
("usage_page", "ALPHANUMERIC_DISPLAY") => Some(0x14),
("usage_page", "BARCODE_SCANNER") => Some(0x8C),
("usage_page", "VENDOR_DEFINED_START") => Some(0xFF00),
("usage_page", "VENDOR_DEFINED_END") => Some(0xFFFF),
// Desktop usage_page usage ID's.
("usage", "POINTER") => Some(0x01),
("usage", "MOUSE") => Some(0x02),
("usage", "JOYSTICK") => Some(0x04),
("usage", "GAMEPAD") => Some(0x05),
("usage", "KEYBOARD") => Some(0x06),
("usage", "KEYPAD") => Some(0x07),
("usage", "MULTI_AXIS_CONTROLLER") => Some(0x08),
("usage", "X") | ("usage_min", "X") | ("usage_max", "X") => Some(0x30),
("usage", "Y") | ("usage_min", "Y") | ("usage_max", "Y") => Some(0x31),
("usage", "Z") | ("usage_min", "Z") | ("usage_max", "Z") => Some(0x32),
// LED usage_page usage ID's.
("usage", "NUM_LOCK") => Some(0x01),
("usage", "CAPS_LOCK") => Some(0x02),
("usage", "SCROLL_LOCK") => Some(0x03),
("usage", "POWER") => Some(0x06),
("usage", "SHIFT") => Some(0x07),
("usage", "MUTE") => Some(0x09),
("usage", "RING") => Some(0x18),
// Button usage_page usage ID's.
("usage", "BUTTON_NONE") => Some(0x00),
("usage", "BUTTON_1") | ("usage_min", "BUTTON_1") => Some(0x01),
("usage", "BUTTON_2") => Some(0x02),
("usage", "BUTTON_3") | ("usage_max", "BUTTON_3") => Some(0x03),
("usage", "BUTTON_4") | ("usage_max", "BUTTON_4") => Some(0x04),
("usage", "BUTTON_5") => Some(0x05),
("usage", "BUTTON_6") => Some(0x06),
("usage", "BUTTON_7") => Some(0x07),
("usage", "BUTTON_8") | ("usage_max", "BUTTON_8") => Some(0x08),
// Alpha-numeric display usage_page usage ID's.
("usage", "CLEAR_DISPLAY") => Some(0x25),
("usage", "DISPLAY_ENABLE") => Some(0x26),
("usage", "CHARACTER_REPORT") => Some(0x2B),
("usage", "CHARACTER_DATA") => Some(0x2C),
(_, _) => None,
}
}
fn parse_group_spec(input: ParseStream, field: Expr) -> Result<GroupSpec> {
let mut collection_attrs: Vec<(String, u32)> = vec![];
if let Expr::Assign(ExprAssign {left, .. }) = field.clone() {
if let Expr::Tuple(ExprTuple{elems, ..}) = *left {
for elem in elems {
let group_attr = maybe_parse_kv_lhs(elem.clone());
if group_attr.is_none() || group_attr.clone().unwrap().len() != 1 {
return Err(parse::Error::new(input.span(), "`#[gen_hid_descriptor]` group spec key can only have a single element"));
}
let group_attr = group_attr.unwrap()[0].clone();
let mut val: Option<u32> = None;
if let Expr::Assign(ExprAssign{right, .. }) = elem {
if let Expr::Lit(ExprLit{lit, ..}) = *right {
if let Lit::Int(lit) = lit {
if let Ok(num) = lit.base10_parse::<u32>() {
val = Some(num);
}
}
} else if let Expr::Path(ExprPath{path: Path{segments, ..}, ..}) = *right {
val = try_resolve_constant(group_attr.clone(), quote! { #segments }.to_string());
if val.is_none() {
return Err(parse::Error::new(input.span(), format!("`#[gen_hid_descriptor]` unrecognized constant: {}", quote! { #segments }.to_string())));
}
}
}
if val.is_none() {
return Err(parse::Error::new(input.span(), "`#[gen_hid_descriptor]` group spec attribute value must be a numeric literal or recognized constant"));
}
collection_attrs.push((group_attr, val.unwrap()));
}
}
}
if collection_attrs.len() == 0 {
return Err(parse::Error::new(input.span(), "`#[gen_hid_descriptor]` group spec lhs must contain value pairs"));
}
let mut out = GroupSpec{ ..Default::default() };
for (key, val) in collection_attrs {
if let Err(e) = out.try_set_attr(input, key, val) {
return Err(e);
}
}
// Match out the item kind on the right of the equals.
if let Expr::Assign(ExprAssign {right, .. }) = field {
if let Expr::Block(ExprBlock{block: Block{stmts, ..}, ..}) = *right {
for stmt in stmts {
if let Stmt::Expr(e) = stmt {
if let Err(e) = out.from_field(input, e) {
return Err(e);
}
} else if let Stmt::Semi(e, _) = stmt {
if let Err(e) = out.from_field(input, e) {
return Err(e);
}
} else {
return Err(parse::Error::new(input.span(), "`#[gen_hid_descriptor]` group spec body can only contain semicolon-separated fields"));
}
}
};
};
Ok(out)
}
/// maybe_parse_kv_lhs returns a vector of :: separated idents.
fn maybe_parse_kv_lhs(field: Expr) -> Option<Vec<String>> {
if let Expr::Assign(ExprAssign {left, .. }) = field {
if let Expr::Path(ExprPath{path: Path{segments, ..}, ..}) = *left {
let mut out: Vec<String> = vec![];
for s in segments {
out.push(s.ident.to_string());
}
return Some(out);
}
}
return None;
}
fn parse_item_attrs(attrs: Vec<syn::Attribute>) -> (Option<MainItemSetting>, Option<u16>) {
let mut out: MainItemSetting = MainItemSetting{ 0: 0 };
let mut had_settings: bool = false;
let mut packed_bits: Option<u16> = None;
for attr in attrs {
match attr.path.segments[0].ident.to_string().as_str() {
"packed_bits" => {
for tok in attr.tokens {
if let proc_macro2::TokenTree::Literal(lit) = tok {
if let Ok(num) = lit.to_string().parse::<u16>() {
packed_bits = Some(num);
break;
}
}
}
if packed_bits.is_none() {
println!("WARNING!: bitfield attribute specified but failed to read number of bits from token!");
}
},
"item_settings" => {
had_settings = true;
for setting in attr.tokens {
if let proc_macro2::TokenTree::Ident(id) = setting {
match id.to_string().as_str() {
"constant" => out.set_constant(true),
"data" => out.set_constant(false),
"variable" => out.set_variable(true),
"array" => out.set_variable(false),
"relative" => out.set_relative(true),
"absolute" => out.set_relative(false),
"wrap" => out.set_wrap(true),
"no_wrap" => out.set_wrap(false),
"non_linear" => out.set_non_linear(true),
"linear" => out.set_non_linear(false),
"no_preferred" => out.set_no_preferred_state(true),
"preferred" => out.set_no_preferred_state(false),
"null" => out.set_has_null_state(true),
"not_null" => out.set_has_null_state(false),
"volatile" => out.set_volatile(true),
"not_volatile" => out.set_volatile(false),
p => println!("WARNING: Unknown item_settings parameter: {}", p),
}
}
}
},
p => {
println!("WARNING: Unknown item attribute: {}", p);
},
}
}
if had_settings {
return (Some(out), packed_bits);
}
(None, packed_bits)
}
// maybe_parse_kv tries to parse an expression like 'blah=blah'.
fn maybe_parse_kv(field: Expr) -> Option<(String, String, Option<MainItemSetting>, Option<u16>)> {
// Match out the identifier on the left of the equals.
let name: String;
if let Some(lhs) = maybe_parse_kv_lhs(field.clone()) {
if lhs.len() != 1 {
return None;
}
name = lhs[0].clone();
} else {
return None;
}
// Decode item settings.
let item_settings = if let Expr::Assign(ExprAssign {attrs, .. }) = field.clone() {
parse_item_attrs(attrs)
} else {
(None, None)
};
// Match out the item kind on the right of the equals.
let mut val: Option<String> = None;
if let Expr::Assign(ExprAssign {right, .. }) = field {
if let Expr::Path(ExprPath{path: Path{segments, ..}, ..}) = *right {
val = Some(segments[0].ident.clone().to_string());
}
};
if val.is_none() {
return None;
}
Some((name, val.unwrap(), item_settings.0, item_settings.1))
}
impl Parse for GroupSpec {
fn parse(input: ParseStream) -> Result<Self> {
let mut out = GroupSpec { ..Default::default() };
let fields: Punctuated<Expr, Token![,]> = input.parse_terminated(Expr::parse)?;
if fields.len() == 0 {
return Err(parse::Error::new(input.span(), "`#[gen_hid_descriptor]` expected information about the HID report"));
}
for field in fields {
if let Err(e) = out.from_field(input, field) {
return Err(e);
}
}
Ok(out)
}
}
impl GroupSpec {
fn from_field(&mut self, input: ParseStream, field: Expr) -> Result<()> {
if let Some(i) = maybe_parse_kv(field.clone()) {
let (name, item_kind, settings, bits) = i;
self.set_item(name, item_kind.into(), settings, bits);
return Ok(())
};
match parse_group_spec(input, field.clone()) {
Err(e) => return Err(e),
Ok(g) => self.add_nested_group(g),
};
Ok(())
}
}
fn byte_literal(lit: u8) -> Pat {
// print!("{:x} ", lit);
// println!();
Pat::Lit(
syn::PatLit{
attrs: vec![],
expr: Box::new(
Expr::Lit(
syn::ExprLit{
attrs: vec![],
lit: syn::Lit::Byte(syn::LitByte::new(lit, Span::call_site())),
}
)
),
}
)
}
#[derive(Default)]
struct DescCompilation {
// usage_page: u8,
// usage: u8,
// collection: Option<u8>,
logical_minimum: Option<isize>,
logical_maximum: Option<isize>,
report_size: Option<u16>,
report_count: Option<u16>,
}
impl DescCompilation {
fn emit(&self, elems: &mut Punctuated<Pat, syn::token::Comma>, prefix: &mut ItemPrefix, buf: [u8; 4], signed: bool) {
// println!("buf: {:?}", buf);
if buf[1..4] == [0,0,0] && !(signed && buf[0] == 255) {
prefix.set_byte_count(1);
elems.push(byte_literal(prefix.0));
elems.push(byte_literal(buf[0]));
}
else if buf[2..4] == [0,0] && !(signed && buf[1] == 255) {
prefix.set_byte_count(2);
elems.push(byte_literal(prefix.0));
elems.push(byte_literal(buf[0]));
elems.push(byte_literal(buf[1]));
}
else {
prefix.set_byte_count(3);
elems.push(byte_literal(prefix.0));
elems.push(byte_literal(buf[0]));
elems.push(byte_literal(buf[1]));
elems.push(byte_literal(buf[2]));
elems.push(byte_literal(buf[3]));
}
// println!("emitted {} data bytes", prefix.byte_count());
}
fn emit_item(&self, elems: &mut Punctuated<Pat, syn::token::Comma>, typ: u8, kind: u8, num: isize, signed: bool) {
let mut prefix = ItemPrefix(0);
prefix.set_tag(kind);
prefix.set_type(typ);
// TODO: Support long tags.
// Section 6.2.2.4: An Input item could have a data size of zero (0)
// bytes. In this case the value of each data bit for the item can be
// assumed to be zero. This is functionally identical to using a item
// tag that specifies a 4-byte data item followed by four zero bytes.
let allow_short = typ == ItemType::Main.into() && kind == MainItemKind::Input.into();
if allow_short && num == 0 {
prefix.set_byte_count(0);
elems.push(byte_literal(prefix.0));
return;
}
let mut buf = [0; 4];
LittleEndian::write_i32(&mut buf, num as i32);
self.emit(elems, &mut prefix, buf, signed);
}
fn handle_globals(&mut self, elems: &mut Punctuated<Pat, syn::token::Comma>, item: MainItem) {
if self.logical_minimum.is_none() || self.logical_minimum.clone().unwrap() != item.logical_minimum {
self.emit_item(elems, ItemType::Global.into(), GlobalItemKind::LogicalMin.into(), item.logical_minimum as isize, true);
self.logical_minimum = Some(item.logical_minimum);
}
if self.logical_maximum.is_none() || self.logical_maximum.clone().unwrap() != item.logical_maximum {
self.emit_item(elems, ItemType::Global.into(), GlobalItemKind::LogicalMax.into(), item.logical_maximum as isize, true);
self.logical_maximum = Some(item.logical_maximum);
}
if self.report_size.is_none() || self.report_size.clone().unwrap() != item.report_size {
self.emit_item(elems, ItemType::Global.into(), GlobalItemKind::ReportSize.into(), item.report_size as isize, true);
self.report_size = Some(item.report_size);
}
if self.report_count.is_none() || self.report_count.clone().unwrap() != item.report_count {
self.emit_item(elems, ItemType::Global.into(), GlobalItemKind::ReportCount.into(), item.report_count as isize, true);
self.report_count = Some(item.report_count);
}
}
fn emit_field(&mut self, elems: &mut Punctuated<Pat, syn::token::Comma>, i: &ItemSpec, item: MainItem) {
self.handle_globals(elems, item.clone());
let item_data = match &i.settings {
Some(s) => s.0 as isize,
None => 0x02, // 0x02 = Data,Var,Abs
};
self.emit_item(elems, ItemType::Main.into(), item.kind.into(), item_data, true);
if let Some(padding) = item.padding_bits {
// Make another item of type constant to carry the remaining bits.
let padding = MainItem{ report_size: 1, report_count: padding, ..item };
self.handle_globals(elems, padding.clone());
let mut const_settings = MainItemSetting{ 0: 0};
const_settings.set_constant(true);
const_settings.set_variable(true);
self.emit_item(elems, ItemType::Main.into(), item.kind.into(), const_settings.0 as isize, true);
}
}
fn emit_group(&mut self, elems: &mut Punctuated<Pat, syn::token::Comma>, spec: &GroupSpec, fields: &Fields) -> Result<()> {
// println!("GROUP: {:?}", spec);
if let Some(usage_page) = spec.usage_page {
self.emit_item(elems, ItemType::Global.into(), GlobalItemKind::UsagePage.into(), usage_page as isize, false);
}
if let Some(usage) = spec.usage {
self.emit_item(elems, ItemType::Local.into(), LocalItemKind::Usage.into(), usage as isize, false);
}
if let Some(usage_min) = spec.usage_min {
self.emit_item(elems, ItemType::Local.into(), LocalItemKind::UsageMin.into(), usage_min as isize, false);
}
if let Some(usage_max) = spec.usage_max {
self.emit_item(elems, ItemType::Local.into(), LocalItemKind::UsageMax.into(), usage_max as isize, false);
}
if let Some(report_id) = spec.report_id {
self.emit_item(elems, ItemType::Global.into(), GlobalItemKind::ReportID.into(), report_id as isize, false);
}
if let Some(collection) = spec.collection {
self.emit_item(elems, ItemType::Main.into(), MainItemKind::Collection.into(), collection as isize, false);
}
for name in spec.clone() {
let f = spec.get(name.clone()).unwrap();
match f {
Spec::MainItem(i) => {
// println!("field: {:?}", i);
let d = field_decl(fields, name);
match analyze_field(d.clone(), d.ty, i) {
Ok(item) => self.emit_field(elems, i, item.descriptor_item),
Err(e) => return Err(e),
}
},
Spec::Collection(g) => if let Err(e) = self.emit_group(elems, g, fields) {
return Err(e);
},
}
}
if let Some(_) = spec.collection { // Close collection.
elems.push(byte_literal(0xc0));
}
Ok(())
}
}
fn field_decl(fields: &Fields, name: String) -> Field {
for field in fields {
let ident = field.ident.clone().unwrap().to_string();
if ident == name {
return field.clone();
}
}
panic!(format!("internal error: could not find field {} which should exist", name))
}
fn compile(spec: GroupSpec, fields: &Fields) -> Result<PatSlice> {
let mut compiler = DescCompilation{ ..Default::default() };
let mut elems = Punctuated::new();
if let Err(e) = compiler.emit_group(&mut elems, &spec, fields) {
return Err(e);
};
Ok(PatSlice{
attrs: vec![],
elems: elems,
bracket_token: Bracket{span: Span::call_site()},
})
}
// MainItem describes all the mandatory data points of a Main item.
#[derive(Debug, Default, Clone)]
struct MainItem {
kind: MainItemKind,
logical_minimum: isize,
logical_maximum: isize,
report_count: u16,
report_size: u16,
padding_bits: Option<u16>,
}
#[derive(Debug)]
struct ReportUnaryField {
bit_width: usize,
descriptor_item: MainItem,
ident: Ident,
}
fn analyze_field(field: Field, ft: Type, item: &ItemSpec) -> Result<ReportUnaryField> {
if let Type::Path(p) = ft {
if p.path.segments.len() != 1 {
return Err(
parse::Error::new(field.ident.unwrap().span(),"`#[gen_hid_descriptor]` internal error when unwrapping type")
);
}
let type_ident = p.path.segments[0].ident.clone();
let mut output = match type_ident.to_string().as_str() {
"u8" => unsigned_unary_item(field.ident.clone().unwrap(), item.kind, 8),
"u16" => unsigned_unary_item(field.ident.clone().unwrap(), item.kind, 16),
"u32" => unsigned_unary_item(field.ident.clone().unwrap(), item.kind, 32),
"i8" => signed_unary_item(field.ident.clone().unwrap(), item.kind, 8),
"i16" => signed_unary_item(field.ident.clone().unwrap(), item.kind, 16),
"i32" => signed_unary_item(field.ident.clone().unwrap(), item.kind, 32),
_ => return Err(
parse::Error::new(type_ident.span(),"`#[gen_hid_descriptor]` type not supported")
),
};
if let Some(want_bits) = item.want_bits {
output.descriptor_item.logical_minimum = 0;
output.descriptor_item.logical_maximum = 1;
output.descriptor_item.report_count = want_bits;
output.descriptor_item.report_size = 1;
let remaining_bits = output.bit_width as u16 - want_bits;
if remaining_bits > 0 {
output.descriptor_item.padding_bits = Some(remaining_bits);
}
};
Ok(output)
} else if let Type::Array(a) = ft {
let mut size: usize = 0;
if let Expr::Lit(ExprLit{lit, ..}) = a.len {
if let Lit::Int(lit) = lit {
if let Ok(num) = lit.base10_parse::<usize>() {
size = num;
}
}
}
if size == 0 {
return Err(
parse::Error::new(field.ident.unwrap().span(),"`#[gen_hid_descriptor]` array has invalid length")
);
}
// Recurse for the native data type, then mutate it to account for the repetition.
match analyze_field(field, *a.elem, item) {
Err(e) => Err(e),
Ok(mut f) => {
f.descriptor_item.report_count = f.descriptor_item.report_count * size as u16;
Ok(f)
},
}
} else {
Err(
parse::Error::new(field.ident.unwrap().span(),"`#[gen_hid_descriptor]` cannot handle field type")
)
}
}
fn signed_unary_item(id: Ident, kind: MainItemKind, bit_width: usize) -> ReportUnaryField {
let bound = 2u32.pow((bit_width-1) as u32) as isize - 1;
ReportUnaryField{
ident: id,
bit_width: bit_width,
descriptor_item: MainItem{
kind: kind,
logical_minimum: -bound,
logical_maximum: bound,
report_count: 1,
report_size: bit_width as u16,
padding_bits: None,
},
}
}
fn unsigned_unary_item(id: Ident, kind: MainItemKind, bit_width: usize) -> ReportUnaryField {
ReportUnaryField{
ident: id,
bit_width: bit_width,
descriptor_item: MainItem{
kind: kind,
logical_minimum: 0,
logical_maximum: 2u32.pow(bit_width as u32) as isize - 1,
report_count: 1,
report_size: bit_width as u16,
padding_bits: None,
},
}
}
|
use core::{cmp, convert, marker, ops};
/// A value that a register can store.
///
/// All registers are either `u8` or `u16`.
pub trait RegisterValue : Copy + Clone +
ops::BitAnd<Output=Self> +
ops::BitAndAssign +
ops::BitOr<Output=Self> +
ops::BitOrAssign +
ops::BitXor<Output=Self> +
ops::BitXorAssign +
ops::Not<Output=Self> +
cmp::PartialEq + cmp::Eq +
cmp::PartialOrd + cmp::Ord +
convert::From<u8> {
}
/// A register.
pub trait Register : Sized {
/// The type that can represent the value of the register.
type T: RegisterValue;
/// The type representing a set of bits that may be manipulated
/// within the register.
type RegisterBits = RegisterBits<Self>;
/// The address of the register.
const ADDRESS: *mut Self::T;
/// Writes a value to the register.
#[inline(always)]
fn write<V>(value: V) where V: Into<Self::T> {
unsafe {
core::ptr::write_volatile(Self::ADDRESS, value.into());
}
}
/// Reads the value of the register.
#[inline(always)]
fn read() -> Self::T {
unsafe { core::ptr::read_volatile(Self::ADDRESS) }
}
/// Sets a set of bits to `1` in the register.
fn set(bits: RegisterBits<Self>) {
Self::set_mask_raw(bits.mask);
}
/// Sets a bitmask in a register.
///
/// This is equivalent to `r |= mask`.
#[inline(always)]
fn set_mask_raw(mask: Self::T) {
unsafe {
core::ptr::write_volatile(Self::ADDRESS, core::ptr::read_volatile(Self::ADDRESS) | mask);
}
}
/// Unsets a set of bits in the register.
///
/// All of the bits will be set to `0`.
fn unset(bits: RegisterBits<Self>) {
Self::unset_mask_raw(bits.mask);
}
/// Clears a bitmask from a register.
///
/// This is equivalent to `r &= !mask`.
#[inline(always)]
fn unset_mask_raw(mask: Self::T) {
unsafe {
core::ptr::write_volatile(Self::ADDRESS, core::ptr::read_volatile(Self::ADDRESS) & !mask)
}
}
/// Toggles a set of bits within the register.
///
/// All specified bits which were previously `0` will become
/// `1`, and all specified bits that were previous `1` will
/// become `0`.
fn toggle(mask: RegisterBits<Self>) {
Self::toggle_raw(mask.mask);
}
/// Toggles a mask in the register.
///
/// This is equivalent to `r ^= mask`.
#[inline(always)]
fn toggle_raw(mask: Self::T) {
unsafe {
core::ptr::write_volatile(Self::ADDRESS, core::ptr::read_volatile(Self::ADDRESS) ^ mask)
}
}
/// Checks if a set of bits are enabled.
///
/// All specifed bits must be set for this function
/// to return `true`.
fn is_set(bits: RegisterBits<Self>) -> bool {
Self::is_mask_set_raw(bits.mask)
}
/// Checks if a mask is set in the register.
///
/// This is equivalent to `(r & mask) == mask`.
#[inline(always)]
fn is_mask_set_raw(mask: Self::T) -> bool {
unsafe {
(core::ptr::read_volatile(Self::ADDRESS) & mask) == mask
}
}
/// Checks if a set of bits are not set.
///
/// All specified bits must be `0` for this
/// function to return `true`.
fn is_clear(mask: RegisterBits<Self>) -> bool {
Self::is_clear_raw(mask.mask)
}
/// Checks if a mask is clear in the register.
///
/// This is equivalent to `(r & mask) == 0`.
#[inline(always)]
fn is_clear_raw(mask: Self::T) -> bool {
unsafe {
(core::ptr::read_volatile(Self::ADDRESS) & mask) == Self::T::from(0)
}
}
/// Waits until a set of bits are set in the register.
///
/// This function will block until all bits that are set in
/// the mask are also set in the register.
fn wait_until_set(bits: RegisterBits<Self>) {
Self::wait_until_mask_set_raw(bits.mask);
}
/// Waits until a bit mask is set in the register.
///
/// This function will block until all bits that are set in
/// the mask are also set in the register.
#[inline(always)]
fn wait_until_mask_set_raw(mask: Self::T) {
wait_until(|| Self::is_mask_set_raw(mask))
}
}
/// Represents a set of bits within a specific register.
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct RegisterBits<R: Register> {
/// The raw bitmask.
mask: R::T,
_phantom: marker::PhantomData<R>,
}
impl<R> RegisterBits<R> where R: Register {
/// Creates a new register mask.
pub const fn new(mask: R::T) -> Self {
RegisterBits { mask, _phantom: marker::PhantomData }
}
pub fn zero() -> Self {
RegisterBits::new(0u8.into())
}
}
impl<R> ops::BitOr for RegisterBits<R> where R: Register
{
type Output = Self;
fn bitor(self, rhs: Self) -> Self {
RegisterBits::new(self.mask | rhs.mask)
}
}
impl<R> ops::BitOrAssign for RegisterBits<R> where R: Register {
fn bitor_assign(&mut self, rhs: Self) {
self.mask |= rhs.mask;
}
}
impl<R> ops::BitAnd for RegisterBits<R> where R: Register
{
type Output = Self;
fn bitand(self, rhs: Self) -> Self {
RegisterBits::new(self.mask & rhs.mask)
}
}
impl<R> ops::BitAndAssign for RegisterBits<R> where R: Register {
fn bitand_assign(&mut self, rhs: Self) {
self.mask &= rhs.mask;
}
}
impl<R> ops::Not for RegisterBits<R> where R: Register {
type Output = Self;
fn not(self) -> Self {
RegisterBits::new(!self.mask)
}
}
impl<R> From<RegisterBits<R>> for u8 where R: Register<T=u8> {
fn from(other: RegisterBits<R>) -> u8 { other.mask }
}
impl<R> From<RegisterBits<R>> for u16 where R: Register<T=u16> {
fn from(other: RegisterBits<R>) -> u16 { other.mask }
}
impl RegisterValue for u8 { }
impl RegisterValue for u16 { }
/// Waits until some condition is true of the register.
#[inline(always)]
fn wait_until<F>(mut f: F)
where F: FnMut() -> bool {
loop {
if f() {
break;
}
}
}
|
//! CBOR serialisation tooling
use std::io::Write;
use error::Error;
use len::{Len, LenSz, StringLenSz, Sz};
use result::Result;
use types::{Special, Type};
pub trait Serialize {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>>;
}
impl<'a, T: Serialize> Serialize for &'a T {
fn serialize<'se, W: Write + Sized>(
&self,
serializer: &'se mut Serializer<W>,
) -> Result<&'se mut Serializer<W>> {
serializer.serialize(*self)
}
}
impl Serialize for u64 {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_unsigned_integer(*self)
}
}
impl Serialize for u32 {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_unsigned_integer((*self) as u64)
}
}
impl Serialize for u16 {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_unsigned_integer((*self) as u64)
}
}
impl Serialize for u8 {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_unsigned_integer((*self) as u64)
}
}
impl Serialize for bool {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_special(Special::Bool(*self))
}
}
impl Serialize for f32 {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_special(Special::Float((*self) as f64))
}
}
impl Serialize for f64 {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_special(Special::Float(*self))
}
}
impl Serialize for String {
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
serializer.write_text(self)
}
}
impl<'a> Serialize for &'a [u8] {
fn serialize<'b, W: Write + Sized>(
&self,
serializer: &'b mut Serializer<W>,
) -> Result<&'b mut Serializer<W>> {
serializer.write_bytes(self)
}
}
impl<'a, A, B> Serialize for (&'a A, &'a B)
where
A: Serialize,
B: Serialize,
{
fn serialize<'b, W: Write + Sized>(
&self,
serializer: &'b mut Serializer<W>,
) -> Result<&'b mut Serializer<W>> {
serializer
.write_array(Len::Len(2))?
.serialize(self.0)?
.serialize(self.1)
}
}
impl<'a, A, B, C> Serialize for (&'a A, &'a B, &'a C)
where
A: Serialize,
B: Serialize,
C: Serialize,
{
fn serialize<'b, W: Write + Sized>(
&self,
serializer: &'b mut Serializer<W>,
) -> Result<&'b mut Serializer<W>> {
serializer
.write_array(Len::Len(3))?
.serialize(self.0)?
.serialize(self.1)?
.serialize(self.2)
}
}
impl<T> Serialize for Option<T>
where
T: Serialize,
{
fn serialize<'a, W: Write + Sized>(
&self,
serializer: &'a mut Serializer<W>,
) -> Result<&'a mut Serializer<W>> {
match self {
None => serializer.write_array(Len::Len(0)),
Some(x) => serializer.write_array(Len::Len(1))?.serialize(x),
}
}
}
/// helper function to serialise a map of fixed size.
///
/// i.e. the size must be known ahead of time
///
pub fn serialize_fixed_map<'a, C, K, V, W>(
data: C,
serializer: &mut Serializer<W>,
) -> Result<&mut Serializer<W>>
where
K: 'a + Serialize,
V: 'a + Serialize,
C: Iterator<Item = (&'a K, &'a V)> + ExactSizeIterator,
W: Write + Sized,
{
serializer.write_map(Len::Len(data.len() as u64))?;
for element in data {
Serialize::serialize(element.0, serializer)?;
Serialize::serialize(element.1, serializer)?;
}
Ok(serializer)
}
/// helper function to serialise a collection of T as a fixed number of element
///
/// i.e. the size must be known ahead of time
///
pub fn serialize_fixed_array<'a, C, T, W>(
data: C,
serializer: &mut Serializer<W>,
) -> Result<&mut Serializer<W>>
where
T: 'a + Serialize,
C: Iterator<Item = &'a T> + ExactSizeIterator,
W: Write + Sized,
{
serializer.write_array(Len::Len(data.len() as u64))?;
for element in data {
Serialize::serialize(element, serializer)?;
}
Ok(serializer)
}
/// helper function to serialise a map of indefinite number of elements.
///
pub fn serialize_indefinite_map<'a, C, K, V, W>(
data: C,
serializer: &mut Serializer<W>,
) -> Result<&mut Serializer<W>>
where
K: 'a + Serialize,
V: 'a + Serialize,
C: Iterator<Item = (&'a K, &'a V)>,
W: Write + Sized,
{
serializer.write_map(Len::Indefinite)?;
for element in data {
Serialize::serialize(element.0, serializer)?;
Serialize::serialize(element.1, serializer)?;
}
serializer.write_special(Special::Break)
}
/// helper function to serialise a collection of T as a indefinite number of element
///
pub fn serialize_indefinite_array<'a, C, T, W>(
data: C,
serializer: &mut Serializer<W>,
) -> Result<&mut Serializer<W>>
where
T: 'a + Serialize,
C: Iterator<Item = &'a T>,
W: Write + Sized,
{
serializer.write_array(Len::Indefinite)?;
for element in data {
Serialize::serialize(element, serializer)?;
}
serializer.write_special(Special::Break)
}
/// helper function to serialise cbor in cbor
///
/// The existence of this function is questionable as it does not make sense, from the
/// CBOR protocol point of view, to encode cbor inside cbor. However it is the way
/// the haskell base code is serialising some objects so we need to comply here too
///
/// This function is a more efficient version of:
///
/// ```
/// # use cbor_event::se::{Serializer, Serialize};
/// let mut serializer = Serializer::new_vec();
/// let mut se = Serializer::new_vec();
/// 0u32.serialize(&mut se).unwrap();
/// serializer.write_bytes(&se.finalize()).unwrap();
/// ```
///
pub fn serialize_cbor_in_cbor<T, W>(
data: T,
serializer: &mut Serializer<W>,
) -> Result<&mut Serializer<W>>
where
T: Serialize,
W: Write + Sized,
{
let mut se = Serializer::new_vec();
data.serialize(&mut se)?;
serializer.write_bytes(&se.finalize())
}
// use a default capacity when allocating the Serializer to avoid small reallocation
// at the beginning of the serialisation process as Vec grows by 2, starting from a
// small or an empty serializer will only increase the number of realloc called at
// every _reserve_ calls.
const DEFAULT_CAPACITY: usize = 512;
/// simple CBOR serializer into any
/// [`std::io::Write`](https://doc.rust-lang.org/std/io/trait.Write.html).
///
#[derive(Debug)]
pub struct Serializer<W: Write + Sized>(W);
impl Serializer<Vec<u8>> {
/// create a new serializer.
///
/// ```
/// use cbor_event::se::{Serializer};
///
/// let serializer = Serializer::new_vec();
/// ```
#[inline]
pub fn new_vec() -> Self {
Serializer::new(Vec::with_capacity(DEFAULT_CAPACITY))
}
}
impl<W: Write + Sized> Serializer<W> {
/// extend the serializer with the given bytes
///
/// This is not encoding the given bytes in the CBOR format. More a way
/// to add already CBOR encoded data or to add any bytes that may suite
/// your protocol.
pub fn write_raw_bytes(&mut self, bytes: &[u8]) -> Result<&mut Self> {
self.0.write_all(bytes)?;
Ok(self)
}
#[inline]
pub fn new(w: W) -> Self {
Serializer(w)
}
/// finalize the serializer, returning the serializer bytes
///
/// ```
/// use cbor_event::se::{Serializer};
///
/// let serializer = Serializer::new_vec();
///
/// let bytes = serializer.finalize();
///
/// # assert!(bytes.is_empty());
/// ```
#[inline]
pub fn finalize(self) -> W {
self.0
}
#[inline]
fn write_u8(&mut self, value: u8) -> Result<&mut Self> {
self.0.write_all(&[value][..])?;
Ok(self)
}
#[inline]
fn write_u16(&mut self, value: u16) -> Result<&mut Self> {
self.0
.write_all(&[((value & 0xFF_00) >> 8) as u8, (value & 0x00_FF) as u8][..])?;
Ok(self)
}
#[inline]
fn write_u32(&mut self, value: u32) -> Result<&mut Self> {
self.0.write_all(
&[
((value & 0xFF_00_00_00) >> 24) as u8,
((value & 0x00_FF_00_00) >> 16) as u8,
((value & 0x00_00_FF_00) >> 8) as u8,
(value & 0x00_00_00_FF) as u8,
][..],
)?;
Ok(self)
}
#[inline]
fn write_u64(&mut self, value: u64) -> Result<&mut Self> {
self.0.write_all(
&[
((value & 0xFF_00_00_00_00_00_00_00) >> 56) as u8,
((value & 0x00_FF_00_00_00_00_00_00) >> 48) as u8,
((value & 0x00_00_FF_00_00_00_00_00) >> 40) as u8,
((value & 0x00_00_00_FF_00_00_00_00) >> 32) as u8,
((value & 0x00_00_00_00_FF_00_00_00) >> 24) as u8,
((value & 0x00_00_00_00_00_FF_00_00) >> 16) as u8,
((value & 0x00_00_00_00_00_00_FF_00) >> 8) as u8,
(value & 0x00_00_00_00_00_00_00_FF) as u8,
][..],
)?;
Ok(self)
}
#[inline]
fn write_f64(&mut self, value: f64) -> Result<&mut Self> {
self.0.write_all(&value.to_be_bytes())?;
Ok(self)
}
/// Writes a CBOR type with the extra `len` information
///
/// if `sz` is passed in, it will use that length/data encoding
/// otherwise the minimum size (e.g. canonical) encoding will be used
#[inline]
fn write_type_definite(
&mut self,
cbor_type: Type,
len: u64,
sz: Option<Sz>,
) -> Result<&mut Self> {
let extra_sz = match sz {
None => Sz::canonical(len),
Some(sz) => {
let fits = match sz {
Sz::Inline => len <= super::MAX_INLINE_ENCODING,
Sz::One => len < 0x1_00,
Sz::Two => len < 0x1_00_00,
Sz::Four => len < 0x1_00_00_00_00,
Sz::Eight => true,
};
if !fits {
return Err(Error::InvalidLenPassed(sz));
}
sz
}
};
match extra_sz {
Sz::Inline => self.write_u8(cbor_type.to_byte(len as u8)),
Sz::One => self
.write_u8(cbor_type.to_byte(super::CBOR_PAYLOAD_LENGTH_U8))
.and_then(|s| s.write_u8(len as u8)),
Sz::Two => self
.write_u8(cbor_type.to_byte(super::CBOR_PAYLOAD_LENGTH_U16))
.and_then(|s| s.write_u16(len as u16)),
Sz::Four => self
.write_u8(cbor_type.to_byte(super::CBOR_PAYLOAD_LENGTH_U32))
.and_then(|s| s.write_u32(len as u32)),
Sz::Eight => self
.write_u8(cbor_type.to_byte(super::CBOR_PAYLOAD_LENGTH_U64))
.and_then(|s| s.write_u64(len)),
}
}
/// serialise the given unsigned integer
///
/// # Example
///
/// ```
/// use cbor_event::se::{Serializer};
///
/// let mut serializer = Serializer::new_vec();
/// serializer.write_unsigned_integer(0x12)
/// .expect("write a negative integer");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0x12].as_ref());
/// ```
pub fn write_unsigned_integer(&mut self, value: u64) -> Result<&mut Self> {
self.write_type_definite(Type::UnsignedInteger, value, None)
}
/// serialise the given unsigned integer using a specific encoding
///
/// see `write_unsigned_integer` and `Sz`
pub fn write_unsigned_integer_sz(&mut self, value: u64, sz: Sz) -> Result<&mut Self> {
self.write_type_definite(Type::UnsignedInteger, value, Some(sz))
}
/// write a negative integer
///
/// This function fails if one tries to write a non negative value.
///
/// ```
/// use cbor_event::se::{Serializer};
///
/// let mut serializer = Serializer::new_vec();
/// serializer.write_negative_integer(-12)
/// .expect("write a negative integer");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0x2b].as_ref());
/// ```
pub fn write_negative_integer(&mut self, value: i64) -> Result<&mut Self> {
self.write_type_definite(Type::NegativeInteger, (-value - 1) as u64, None)
}
/// write a negative integer using a specific encoding
///
/// see `write_negative_integer` and `Sz`
///
/// `value` must be within -1 and -u64::MAX -1 to fit into CBOR nint
pub fn write_negative_integer_sz(&mut self, value: i128, sz: Sz) -> Result<&mut Self> {
use std::convert::TryInto;
let value_u64 = (-value - 1)
.try_into()
.map_err(|_| Error::InvalidNint(value))?;
self.write_type_definite(Type::NegativeInteger, value_u64, Some(sz))
}
/// write the given object as bytes
///
/// ```
/// use cbor_event::se::{Serializer};
///
/// let mut serializer = Serializer::new_vec();
/// serializer.write_bytes(vec![0,1,2,3])
/// .expect("write bytes");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0x44, 0,1,2,3].as_ref());
/// ```
pub fn write_bytes<B: AsRef<[u8]>>(&mut self, bytes: B) -> Result<&mut Self> {
let bytes = bytes.as_ref();
self.write_type_definite(Type::Bytes, bytes.len() as u64, None)
.and_then(|s| {
s.0.write_all(bytes)?;
Ok(s)
})
}
/// write the given object as bytes using a specific bytestring encoding
///
/// see `write_bytes` and `StringLenSz`
pub fn write_bytes_sz<B: AsRef<[u8]>>(
&mut self,
bytes: B,
sz: StringLenSz,
) -> Result<&mut Self> {
let bytes = bytes.as_ref();
match sz {
StringLenSz::Len(sz) => self
.write_type_definite(Type::Bytes, bytes.len() as u64, Some(sz))
.and_then(|s| {
s.0.write_all(bytes)?;
Ok(s)
}),
StringLenSz::Indefinite(lens) => {
let sz_sum = lens.iter().fold(0, |sum, len| sum + len.0);
if sz_sum != bytes.len() as u64 {
return Err(Error::InvalidIndefiniteString);
}
self.write_u8(Type::Bytes.to_byte(0x1f))?;
let mut start = 0;
for (len, sz) in lens {
let end = start + len as usize;
let chunk = &bytes[start..end];
self.write_bytes_sz(chunk, StringLenSz::Len(sz))?;
start = end;
}
self.write_u8(Type::Special.to_byte(0x1f))?;
Ok(self)
}
}
}
/// write the given object as text
///
/// ```
/// use cbor_event::se::{Serializer};
///
/// let mut serializer = Serializer::new_vec();
/// serializer.write_text(r"hello world")
/// .expect("write text");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0x6b, 0x68, 0x65, 0x6C, 0x6C, 0x6F, 0x20, 0x77, 0x6F, 0x72, 0x6C, 0x64].as_ref());
/// ```
pub fn write_text<S: AsRef<str>>(&mut self, text: S) -> Result<&mut Self> {
let bytes = text.as_ref().as_bytes();
self.write_type_definite(Type::Text, bytes.len() as u64, None)
.and_then(|s| {
s.0.write_all(bytes)?;
Ok(s)
})
}
/// write the given object as text using a specific string encoding
///
/// see `write_text` and `StringLenSz`
pub fn write_text_sz<S: AsRef<str>>(&mut self, text: S, sz: StringLenSz) -> Result<&mut Self> {
let bytes = text.as_ref().as_bytes();
match sz {
StringLenSz::Len(sz) => self
.write_type_definite(Type::Text, bytes.len() as u64, Some(sz))
.and_then(|s| {
s.0.write_all(bytes)?;
Ok(s)
}),
StringLenSz::Indefinite(lens) => {
let sz_sum = lens.iter().fold(0, |sum, len| sum + len.0);
if sz_sum != bytes.len() as u64 {
return Err(Error::InvalidIndefiniteString);
}
self.write_u8(Type::Text.to_byte(0x1f))?;
let mut start = 0;
for (len, sz) in lens {
let end = start + len as usize;
let chunk = &bytes[start..end];
let chunk_str = String::from_utf8(chunk.to_vec())
.map_err(|_| Error::InvalidLenPassed(sz))?;
self.write_text_sz(chunk_str, StringLenSz::Len(sz))?;
start = end;
}
self.write_u8(Type::Special.to_byte(0x1f))?;
Ok(self)
}
}
}
/// start to write an array
///
/// Either you know the length of your array and you can pass it to the funtion
/// or use an indefinite length.
///
/// - if you set a fixed length of element, you are responsible to set the correct
/// amount of elements.
/// - if you set an indefinite length, you are responsible to write the `Special::Break`
/// when your stream ends.
///
/// # Example
///
/// ```
/// use cbor_event::{se::{Serializer}, Len};
///
/// let mut serializer = Serializer::new_vec();
/// serializer
/// .write_array(Len::Len(2)).expect("write an array")
/// .write_text(r"hello").expect("write text")
/// .write_text(r"world").expect("write text");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0x82, 0x65, 0x68, 0x65, 0x6C, 0x6C, 0x6F, 0x65, 0x77, 0x6F, 0x72, 0x6C, 0x64].as_ref());
/// ```
///
/// ```
/// use cbor_event::{se::{Serializer}, Len, Special};
///
/// let mut serializer = Serializer::new_vec();
/// serializer
/// .write_array(Len::Indefinite).expect("write an array")
/// .write_text(r"hello").expect("write text")
/// .write_text(r"world").expect("write text")
/// .write_special(Special::Break).expect("write break");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0x9f, 0x65, 0x68, 0x65, 0x6C, 0x6C, 0x6F, 0x65, 0x77, 0x6F, 0x72, 0x6C, 0x64, 0xff].as_ref());
/// ```
///
pub fn write_array(&mut self, len: Len) -> Result<&mut Self> {
match len {
Len::Indefinite => self.write_u8(Type::Array.to_byte(0x1f)),
Len::Len(len) => self.write_type_definite(Type::Array, len, None),
}
}
/// start to write an array using a specific length encoding
///
/// see `write_array` and `LenSz`
pub fn write_array_sz(&mut self, len: LenSz) -> Result<&mut Self> {
match len {
LenSz::Indefinite => self.write_u8(Type::Array.to_byte(0x1f)),
LenSz::Len(len, sz) => self.write_type_definite(Type::Array, len, Some(sz)),
}
}
/// start to write a map
///
/// Either you know the length of your map and you can pass it to the funtion
/// or use an indefinite length.
///
/// - if you set a fixed length of element, you are responsible to set the correct
/// amount of elements.
/// - if you set an indefinite length, you are responsible to write the `Special::Break`
/// when your stream ends.
///
/// A map is like an array but works by pair of element, so the length is half of the
/// number of element you are going to write, i.e. the number of pairs, not the number
/// of elements.
///
/// # Example
///
/// ```
/// use cbor_event::{se::{Serializer}, Len};
///
/// let mut serializer = Serializer::new_vec();
/// serializer
/// .write_map(Len::Len(2)).expect("write a map")
/// .write_unsigned_integer(1).expect("write unsigned integer")
/// .write_text(r"hello").expect("write text")
/// .write_unsigned_integer(2).expect("write unsigned integer")
/// .write_text(r"world").expect("write text");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0xA2, 01, 0x65, 0x68, 0x65, 0x6C, 0x6C, 0x6F, 0x02, 0x65, 0x77, 0x6F, 0x72, 0x6C, 0x64].as_ref());
/// ```
///
/// ```
/// use cbor_event::{se::{Serializer}, Len, Special};
///
/// let mut serializer = Serializer::new_vec();
/// serializer
/// .write_map(Len::Indefinite).expect("write a map")
/// .write_unsigned_integer(1).expect("write unsigned integer")
/// .write_text(r"hello").expect("write text")
/// .write_unsigned_integer(2).expect("write unsigned integer")
/// .write_text(r"world").expect("write text")
/// .write_special(Special::Break).expect("write the break");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0xbf, 01, 0x65, 0x68, 0x65, 0x6C, 0x6C, 0x6F, 0x02, 0x65, 0x77, 0x6F, 0x72, 0x6C, 0x64, 0xff].as_ref());
/// ```
///
pub fn write_map(&mut self, len: Len) -> Result<&mut Self> {
match len {
Len::Indefinite => self.write_u8(Type::Map.to_byte(0x1f)),
Len::Len(len) => self.write_type_definite(Type::Map, len, None),
}
}
/// start to write a map using a specific length encoding
///
/// see `write_map` and `LenSz`
pub fn write_map_sz(&mut self, len: LenSz) -> Result<&mut Self> {
match len {
LenSz::Indefinite => self.write_u8(Type::Map.to_byte(0x1f)),
LenSz::Len(len, sz) => self.write_type_definite(Type::Map, len, Some(sz)),
}
}
/// write a tag
///
/// in cbor a tag should be followed by a tagged object. You are responsible
/// to making sure you are writing the tagged object just after this
///
/// # Example
///
/// ```
/// use cbor_event::{se::{Serializer}, Len};
///
/// let mut serializer = Serializer::new_vec();
/// serializer
/// .write_tag(24).expect("write a tag")
/// .write_text(r"hello").expect("write text");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0xd8, 0x18, 0x65, 0x68, 0x65, 0x6C, 0x6C, 0x6F].as_ref());
/// ```
///
pub fn write_tag(&mut self, tag: u64) -> Result<&mut Self> {
self.write_type_definite(Type::Tag, tag, None)
}
/// write a tag using a specific encoding
///
/// see `write_tag` and `Sz`
pub fn write_tag_sz(&mut self, tag: u64, sz: Sz) -> Result<&mut Self> {
self.write_type_definite(Type::Tag, tag, Some(sz))
}
/// Write a tag that indicates that the following list is a finite
/// set. See https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml.
pub fn write_set_tag(&mut self) -> Result<&mut Self> {
self.write_type_definite(Type::Tag, 258, None)
}
/// write a special value in cbor
///
/// # Example
///
/// ```
/// use cbor_event::{se::{Serializer}, Len, Special};
///
/// let mut serializer = Serializer::new_vec();
/// serializer
/// .write_array(Len::Indefinite).expect("write an array")
/// .write_special(Special::Bool(false)).expect("write false")
/// .write_special(Special::Bool(true)).expect("write true")
/// .write_special(Special::Null).expect("write null")
/// .write_special(Special::Undefined).expect("write undefined")
/// .write_special(Special::Break).expect("write the break");
///
/// # let bytes = serializer.finalize();
/// # assert_eq!(bytes, [0x9f, 0xf4, 0xf5, 0xf6, 0xf7, 0xff].as_ref());
/// ```
pub fn write_special(&mut self, special: Special) -> Result<&mut Self> {
match special {
Special::Unassigned(v @ 0..=0x13) => self.write_u8(Type::Special.to_byte(v)),
Special::Bool(false) => self.write_u8(Type::Special.to_byte(0x14)),
Special::Bool(true) => self.write_u8(Type::Special.to_byte(0x15)),
Special::Null => self.write_u8(Type::Special.to_byte(0x16)),
Special::Undefined => self.write_u8(Type::Special.to_byte(0x17)),
Special::Unassigned(v) => self
.write_u8(Type::Special.to_byte(0x18))
.and_then(|s| s.write_u8(v)),
Special::Float(f) => self
.write_u8(Type::Special.to_byte(0x1b))
.and_then(|s| s.write_f64(f)),
Special::Break => self.write_u8(Type::Special.to_byte(0x1f)),
}
}
/// Convenient member function to chain serialisation
pub fn serialize<T: Serialize>(&mut self, t: &T) -> Result<&mut Self> {
Serialize::serialize(t, self)
}
}
// macro derivation for rust array of bytes
macro_rules! serialize_array {
( $( $x:expr ),* ) => {
$(
impl<T: Serialize> Serialize for [T; $x] {
fn serialize<'b, W: Write + Sized>(
&self,
serializer: &'b mut Serializer<W>,
) -> Result<&'b mut Serializer<W>> {
serialize_fixed_array(self.iter(), serializer)
}
}
)*
}
}
serialize_array!(
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50,
51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64
);
#[cfg(test)]
mod test {
use super::*;
#[test]
fn unsigned_integer_0() {
let mut serializer = Serializer::new_vec();
serializer
.write_unsigned_integer(0x12)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x12].as_ref());
}
#[test]
fn unsigned_integer_1() {
let mut serializer = Serializer::new_vec();
serializer
.write_unsigned_integer(0x20)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x18, 0x20].as_ref());
}
#[test]
fn unsigned_integer_2() {
let mut serializer = Serializer::new_vec();
serializer
.write_unsigned_integer(0x2021)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x19, 0x20, 0x21].as_ref());
}
#[test]
fn unsigned_integer_3() {
let mut serializer = Serializer::new_vec();
serializer
.write_unsigned_integer(0x20212223)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x1a, 0x20, 0x21, 0x22, 0x23].as_ref());
}
#[test]
fn unsigned_integer_4() {
let mut serializer = Serializer::new_vec();
serializer
.write_unsigned_integer(0x2021222324252627)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(
bytes,
[0x1b, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27].as_ref()
);
}
#[test]
fn negative_integer_0() {
let mut serializer = Serializer::new_vec();
serializer
.write_negative_integer(-12)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x2b].as_ref());
}
#[test]
fn negative_integer_1() {
let mut serializer = Serializer::new_vec();
serializer
.write_negative_integer(-200)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x38, 0xc7].as_ref());
}
#[test]
fn negative_integer_2() {
let mut serializer = Serializer::new_vec();
serializer
.write_negative_integer(-13201)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x39, 0x33, 0x90].as_ref());
}
#[test]
fn negative_integer_3() {
let mut serializer = Serializer::new_vec();
serializer
.write_negative_integer(-13201782)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x3a, 0x00, 0xc9, 0x71, 0x75].as_ref());
}
#[test]
fn negative_integer_4() {
let mut serializer = Serializer::new_vec();
serializer
.write_negative_integer(-9902201782)
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(
bytes,
[0x3b, 0x00, 0x00, 0x00, 0x02, 0x4E, 0x37, 0x9B, 0xB5].as_ref()
);
}
#[test]
fn bytes_0() {
let mut serializer = Serializer::new_vec();
serializer
.write_bytes(&vec![])
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x40].as_ref());
}
#[test]
fn bytes_1() {
let mut serializer = Serializer::new_vec();
serializer
.write_bytes(&vec![0b101010])
.expect("write unsigned integer");
let bytes = serializer.finalize();
assert_eq!(bytes, [0x41, 0b101010].as_ref());
}
fn test_special(cbor_type: Special, result: &[u8]) -> bool {
let mut serializer = Serializer::new_vec();
serializer
.write_special(cbor_type)
.expect("serialize a special");
let bytes = serializer.finalize();
println!("serializing: {:?}", cbor_type);
println!(" - expected: {:?}", result);
println!(" - got: {:?}", bytes);
bytes == result
}
#[test]
fn special_false() {
assert!(test_special(Special::Bool(false), [0xf4].as_ref()))
}
#[test]
fn special_true() {
assert!(test_special(Special::Bool(true), [0xf5].as_ref()))
}
#[test]
fn special_null() {
assert!(test_special(Special::Null, [0xf6].as_ref()))
}
#[test]
fn special_undefined() {
assert!(test_special(Special::Undefined, [0xf7].as_ref()))
}
#[test]
fn special_break() {
assert!(test_special(Special::Break, [0xff].as_ref()))
}
#[test]
fn special_unassigned() {
assert!(test_special(Special::Unassigned(0), [0xe0].as_ref()));
assert!(test_special(Special::Unassigned(1), [0xe1].as_ref()));
assert!(test_special(Special::Unassigned(10), [0xea].as_ref()));
assert!(test_special(Special::Unassigned(19), [0xf3].as_ref()));
assert!(test_special(Special::Unassigned(24), [0xf8, 0x18].as_ref()));
}
#[test]
fn special_float() {
assert!(test_special(
Special::Float(1.1),
[0xfb, 0x3f, 0xf1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9a].as_ref()
));
assert!(test_special(
Special::Float(-4.1),
[0xfb, 0xc0, 0x10, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66].as_ref()
));
assert!(test_special(
Special::Float(f64::INFINITY),
[0xfb, 0x7f, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00].as_ref()
));
assert!(test_special(
Special::Float(f64::NAN),
[0xfb, 0x7f, 0xf8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00].as_ref()
));
assert!(test_special(
Special::Float(f64::NEG_INFINITY),
[0xfb, 0xff, 0xf0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00].as_ref()
));
}
#[test]
fn uint_sz() {
let expected_bytes = vec![
0x09, 0x18, 0x09, 0x19, 0x00, 0x09, 0x1a, 0x00, 0x00, 0x00, 0x09, 0x1b, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
];
let mut serializer = Serializer::new_vec();
serializer
.write_unsigned_integer_sz(9, Sz::Inline)
.unwrap()
.write_unsigned_integer_sz(9, Sz::One)
.unwrap()
.write_unsigned_integer_sz(9, Sz::Two)
.unwrap()
.write_unsigned_integer_sz(9, Sz::Four)
.unwrap()
.write_unsigned_integer_sz(9, Sz::Eight)
.unwrap();
let bytes = serializer.finalize();
assert_eq!(bytes, expected_bytes);
}
#[test]
fn nint_sz() {
let expected_bytes = vec![
0x28, 0x38, 0x08, 0x39, 0x00, 0x08, 0x3a, 0x00, 0x00, 0x00, 0x08, 0x3b, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x08,
];
let mut serializer = Serializer::new_vec();
serializer
.write_negative_integer_sz(-9, Sz::Inline)
.unwrap()
.write_negative_integer_sz(-9, Sz::One)
.unwrap()
.write_negative_integer_sz(-9, Sz::Two)
.unwrap()
.write_negative_integer_sz(-9, Sz::Four)
.unwrap()
.write_negative_integer_sz(-9, Sz::Eight)
.unwrap();
// just outside of cbor NINT range
let big_nint = -(u64::MAX as i128) - 2;
assert!(serializer
.write_negative_integer_sz(big_nint, Sz::Eight)
.is_err());
let bytes = serializer.finalize();
assert_eq!(bytes, expected_bytes);
}
#[test]
fn bytes_sz() {
let def_parts: Vec<Vec<u8>> = vec![
vec![0x44, 0xBA, 0xAD, 0xF0, 0x0D],
vec![0x58, 0x04, 0xCA, 0xFE, 0xD0, 0x0D],
vec![0x59, 0x00, 0x04, 0xDE, 0xAD, 0xBE, 0xEF],
vec![0x5a, 0x00, 0x00, 0x00, 0x02, 0xCA, 0xFE],
vec![
0x5b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xBE, 0xEF,
],
];
let mut expected_bytes: Vec<u8> = def_parts.iter().flatten().cloned().collect();
// also make an indefinite encoded one out all the definite-encoded parts
expected_bytes.push(0x5F);
for slice in def_parts.iter() {
expected_bytes.extend_from_slice(&slice[..]);
}
expected_bytes.push(0xFF);
let indef_bytes = vec![
0xBA, 0xAD, 0xF0, 0x0D, 0xCA, 0xFE, 0xD0, 0x0D, 0xDE, 0xAD, 0xBE, 0xEF, 0xCA, 0xFE,
0xBE, 0xEF,
];
let indef_lens = vec![
(4, Sz::Inline),
(4, Sz::One),
(4, Sz::Two),
(2, Sz::Four),
(2, Sz::Eight),
];
let mut serializer = Serializer::new_vec();
serializer
.write_bytes_sz(vec![0xBA, 0xAD, 0xF0, 0x0D], StringLenSz::Len(Sz::Inline))
.unwrap()
.write_bytes_sz(vec![0xCA, 0xFE, 0xD0, 0x0D], StringLenSz::Len(Sz::One))
.unwrap()
.write_bytes_sz(vec![0xDE, 0xAD, 0xBE, 0xEF], StringLenSz::Len(Sz::Two))
.unwrap()
.write_bytes_sz(vec![0xCA, 0xFE], StringLenSz::Len(Sz::Four))
.unwrap()
.write_bytes_sz(vec![0xBE, 0xEF], StringLenSz::Len(Sz::Eight))
.unwrap()
.write_bytes_sz(indef_bytes, StringLenSz::Indefinite(indef_lens))
.unwrap();
let bytes = serializer.finalize();
assert_eq!(bytes, expected_bytes);
}
#[test]
fn text_sz() {
let def_parts: Vec<Vec<u8>> = vec![
vec![0x65, 0x48, 0x65, 0x6c, 0x6c, 0x6f],
vec![0x78, 0x05, 0x57, 0x6f, 0x72, 0x6c, 0x64],
vec![
0x79, 0x00, 0x09, 0xE6, 0x97, 0xA5, 0xE6, 0x9C, 0xAC, 0xE8, 0xAA, 0x9E,
],
vec![0x7a, 0x00, 0x00, 0x00, 0x01, 0x39],
vec![
0x7b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x41, 0x42, 0x43,
],
];
let mut expected_bytes: Vec<u8> = def_parts.iter().flatten().cloned().collect();
// also make an indefinite encoded one out all the definite-encoded parts
expected_bytes.push(0x7F);
for slice in def_parts.iter() {
expected_bytes.extend_from_slice(&slice[..]);
}
expected_bytes.push(0xFF);
let indef_lens = vec![
(5, Sz::Inline),
(5, Sz::One),
(9, Sz::Two),
(1, Sz::Four),
(3, Sz::Eight),
];
let mut serializer = Serializer::new_vec();
serializer
.write_text_sz("Hello", StringLenSz::Len(Sz::Inline))
.unwrap()
.write_text_sz("World", StringLenSz::Len(Sz::One))
.unwrap()
.write_text_sz("日本語", StringLenSz::Len(Sz::Two))
.unwrap()
.write_text_sz("9", StringLenSz::Len(Sz::Four))
.unwrap()
.write_text_sz("ABC", StringLenSz::Len(Sz::Eight))
.unwrap()
.write_text_sz("HelloWorld日本語9ABC", StringLenSz::Indefinite(indef_lens))
.unwrap();
let bytes = serializer.finalize();
assert_eq!(bytes, expected_bytes);
}
#[test]
fn array_sz() {
let expected_bytes = vec![
0x80, 0x98, 0x01, 0x99, 0x00, 0x02, 0x9a, 0x00, 0x00, 0x00, 0x03, 0x9b, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x9f,
];
let mut serializer = Serializer::new_vec();
serializer
.write_array_sz(LenSz::Len(0, Sz::Inline))
.unwrap()
.write_array_sz(LenSz::Len(1, Sz::One))
.unwrap()
.write_array_sz(LenSz::Len(2, Sz::Two))
.unwrap()
.write_array_sz(LenSz::Len(3, Sz::Four))
.unwrap()
.write_array_sz(LenSz::Len(4, Sz::Eight))
.unwrap()
.write_array_sz(LenSz::Indefinite)
.unwrap();
let bytes = serializer.finalize();
assert_eq!(bytes, expected_bytes);
}
#[test]
fn map_sz() {
let expected_bytes = vec![
0xa0, 0xb8, 0x01, 0xb9, 0x00, 0x02, 0xba, 0x00, 0x00, 0x00, 0x03, 0xbb, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0xbf,
];
let mut serializer = Serializer::new_vec();
serializer
.write_map_sz(LenSz::Len(0, Sz::Inline))
.unwrap()
.write_map_sz(LenSz::Len(1, Sz::One))
.unwrap()
.write_map_sz(LenSz::Len(2, Sz::Two))
.unwrap()
.write_map_sz(LenSz::Len(3, Sz::Four))
.unwrap()
.write_map_sz(LenSz::Len(4, Sz::Eight))
.unwrap()
.write_map_sz(LenSz::Indefinite)
.unwrap();
let bytes = serializer.finalize();
assert_eq!(bytes, expected_bytes);
}
#[test]
fn tag_sz() {
let expected_bytes = vec![
0x09, 0x18, 0x09, 0x19, 0x00, 0x09, 0x1a, 0x00, 0x00, 0x00, 0x09, 0x1b, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x09,
];
let mut serializer = Serializer::new_vec();
serializer
.write_unsigned_integer_sz(9, Sz::Inline)
.unwrap()
.write_unsigned_integer_sz(9, Sz::One)
.unwrap()
.write_unsigned_integer_sz(9, Sz::Two)
.unwrap()
.write_unsigned_integer_sz(9, Sz::Four)
.unwrap()
.write_unsigned_integer_sz(9, Sz::Eight)
.unwrap();
let bytes = serializer.finalize();
assert_eq!(bytes, expected_bytes);
}
#[test]
fn write_type_doesnt_fit() {
let mut serializer = Serializer::new_vec();
assert!(serializer
.write_type_definite(Type::UnsignedInteger, 23, Some(Sz::Inline))
.is_ok());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, 24, Some(Sz::Inline))
.is_err());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, u8::MAX as u64, Some(Sz::One))
.is_ok());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, u8::MAX as u64 + 1, Some(Sz::One))
.is_err());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, u16::MAX as u64, Some(Sz::Two))
.is_ok());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, u16::MAX as u64 + 1, Some(Sz::Two))
.is_err());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, u32::MAX as u64, Some(Sz::Four))
.is_ok());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, u32::MAX as u64 + 1, Some(Sz::Four))
.is_err());
assert!(serializer
.write_type_definite(Type::UnsignedInteger, u64::MAX as u64, Some(Sz::Eight))
.is_ok());
}
}
|
use crate::cpu::Cpu;
use crate::opcode::addressing_mode::{AddRegister, AddressMode};
use crate::opcode::*;
pub struct Jmp {
opcode: u8,
// Absolute or Indirect.
mode: AddressMode,
}
impl Jmp {
pub fn new(opcode: u8, cpu: &Cpu) -> Option<Self> {
let pc = cpu.program_counter as usize;
let address = bytes_to_addr(cpu.memory[pc + 1], cpu.memory[pc + 2]);
match opcode {
// Absolute
0x4C => Some(Jmp {
opcode,
mode: AddressMode::Absolute {
register: AddRegister::None,
address,
},
}),
0x6C => Some(Jmp {
opcode,
mode: AddressMode::Indirect {
register: AddRegister::None,
address_to_read_indirect: address,
},
}),
_ => None,
}
}
fn get_cycles(&self, _cpu: &Cpu) -> u64 {
match &self.mode {
AddressMode::Absolute {
register: _,
address: _,
} => 3,
AddressMode::Indirect {
register: _,
address_to_read_indirect: _,
} => 5,
_ => panic!("unexpected!"),
}
}
}
impl Operation for Jmp {
/// JMP simply moves to the address.
fn execute(&self, cpu: &mut Cpu) {
cpu.program_counter = self.mode.to_addr(cpu).unwrap();
cpu.cycles += self.get_cycles(cpu);
}
fn dump(&self, cpu: &Cpu) -> String {
format!(
"{:02X} {} JMP {}",
self.opcode,
self.mode.value_to_string(),
self.mode.to_string(cpu),
)
}
}
pub struct Jsr {
// Absolute.
mode: AddressMode,
}
impl Jsr {
const OPCODE: u8 = 0x20;
const BYTES: u16 = 3;
const CYCLES: u64 = 6;
pub fn new(opcode: u8, cpu: &Cpu) -> Option<Self> {
if opcode != Jsr::OPCODE {
return None;
}
let pc = cpu.program_counter as usize;
let address = bytes_to_addr(cpu.memory[pc + 1], cpu.memory[pc + 2]);
Some(Jsr {
mode: AddressMode::Absolute {
register: AddRegister::None,
address,
},
})
}
}
impl Operation for Jsr {
fn execute(&self, cpu: &mut Cpu) {
// Jsr always 3 bytes. Push return address - 1.
let return_address = cpu.program_counter + Jsr::BYTES - 1;
// Push onto the stack the return address.
cpu.stack.push_addr(&mut cpu.memory, return_address);
cpu.program_counter = self.mode.to_addr(cpu).unwrap();
// Always 6 cycles for a JSR
cpu.cycles += Jsr::CYCLES;
}
fn dump(&self, cpu: &Cpu) -> String {
format!(
"{:02X} {} JSR {}",
Self::OPCODE,
self.mode.value_to_string(),
self.mode.to_string(cpu),
)
}
}
pub struct Rts {}
impl Rts {
const OPCODE: u8 = 0x60;
const BYTES: u16 = 1;
const CYCLES: u64 = 6;
pub fn new(opcode: u8) -> Option<Self> {
if opcode != Rts::OPCODE {
return None;
}
Some(Rts {})
}
}
impl Operation for Rts {
fn execute(&self, cpu: &mut Cpu) {
let (pcl, pch) = cpu.stack.pop_addr(&mut cpu.memory);
let return_address = bytes_to_addr(pcl, pch);
cpu.program_counter = return_address;
cpu.program_counter += Rts::BYTES;
cpu.cycles += Rts::CYCLES;
}
fn dump(&self, _cpu: &Cpu) -> String {
format!("{:02X} RTS ", Self::OPCODE)
}
}
|
#[derive(Clone)]
pub struct Card {
pub id: String,
pub uri: String,
pub scryfall_uri: String,
pub rulings_uri: String,
pub name: String,
pub cmc: f64,
pub color_identity: Vec<String>,
pub type_line: String,
pub layout: String,
pub set_name: String,
pub rarity: String,
pub info: CardInfo,
pub legalities: CardLegalities,
}
#[derive(Clone)]
pub enum CardInfo {
Normal (CardFaceInfo),
Split (Vec<CardFaceInfo>),
Flip (Vec<CardFaceInfo>),
Multifaced (Vec<CardFaceInfo>),
}
#[derive(Clone)]
pub struct CardFaceInfo {
pub card_type: CardType,
pub colors: Vec<String>,
pub image_uris: CardImagery,
pub mana_cost: String,
pub name: String,
pub oracle_text: String,
pub type_line: String,
}
#[derive(Clone, Debug)]
pub enum CardType {
Planeswalker {loyalty: String},
Creature {power: String, toughness: String},
NonCreature,
}
#[derive(Clone, Debug)]
pub struct CardLegalities {
pub standard: String,
pub future: String,
pub frontier: String,
pub modern: String,
pub legacy: String,
pub pauper: String,
pub vintage: String,
pub penny: String,
pub commander: String,
pub duel: String,
}
#[derive(Clone)]
pub struct CardImagery {
pub png: String,
pub large: String,
pub normal: String,
pub small: String,
pub border_crop: String,
pub art_crop: String,
}
pub fn print_card(card: &Card) {
println!("id\t{}", card.id);
println!("name\t{}", card.name);
println!("layout\t{}", card.layout);
println!("rarity\t{}", card.rarity);
println!("cmc\t{}", card.cmc);
println!("typestr\t{}", card.type_line);
match &card.info {
CardInfo::Normal (info) => {
println!("type\t{:?}", info.card_type);
println!("text\t{:?}", info.oracle_text);
}
CardInfo::Split (faces) => {
for face in faces {
println!("------- name\t{}", face.name);
println!("\tmc\t{}", face.mana_cost);
println!("\ttypestr\t{}", face.type_line);
println!("\ttype\t{:?}", face.card_type);
println!("\ttext\t{:?}", face.oracle_text);
}
}
CardInfo::Flip (faces) => {
for face in faces {
println!("------- name\t{}", face.name);
println!("\tmc\t{}", face.mana_cost);
println!("\ttypestr\t{}", face.type_line);
println!("\ttype\t{:?}", face.card_type);
println!("\ttext\t{:?}", face.oracle_text);
}
}
CardInfo::Multifaced (faces) => {
for face in faces {
println!("------- name\t{}", face.name);
println!("\tmc\t{}", face.mana_cost);
println!("\ttypestr\t{}", face.type_line);
println!("\ttype\t{:?}", face.card_type);
println!("\ttext\t{:?}", face.oracle_text);
}
}
}
println!("{:?}", card.legalities);
}
|
//! Utilities used for serializing/deserializing sequencer REST API related data.
use num_bigint::BigUint;
use pathfinder_common::{
BlockNumber, CallParam, ConstructorParam, EthereumAddress, GasPrice, L1ToL2MessagePayloadElem,
L2ToL1MessagePayloadElem, TransactionSignatureElem, TransactionVersion,
};
use primitive_types::{H160, H256, U256};
use serde::de::Visitor;
use serde_with::{serde_conv, DeserializeAs, SerializeAs};
use stark_hash::{Felt, HexParseError, OverflowError};
use std::borrow::Cow;
use std::str::FromStr;
serde_conv!(
pub CallParamAsDecimalStr,
CallParam,
|serialize_me: &CallParam| starkhash_to_dec_str(&serialize_me.0),
|s: &str| starkhash_from_dec_str(s).map(CallParam)
);
serde_conv!(
pub ConstructorParamAsDecimalStr,
ConstructorParam,
|serialize_me: &ConstructorParam| starkhash_to_dec_str(&serialize_me.0),
|s: &str| starkhash_from_dec_str(s).map(ConstructorParam)
);
serde_conv!(
pub L1ToL2MessagePayloadElemAsDecimalStr,
L1ToL2MessagePayloadElem,
|serialize_me: &L1ToL2MessagePayloadElem| starkhash_to_dec_str(&serialize_me.0),
|s: &str| starkhash_from_dec_str(s).map(L1ToL2MessagePayloadElem)
);
serde_conv!(
pub L2ToL1MessagePayloadElemAsDecimalStr,
L2ToL1MessagePayloadElem,
|serialize_me: &L2ToL1MessagePayloadElem| starkhash_to_dec_str(&serialize_me.0),
|s: &str| starkhash_from_dec_str(s).map(L2ToL1MessagePayloadElem)
);
serde_conv!(
pub TransactionSignatureElemAsDecimalStr,
TransactionSignatureElem,
|serialize_me: &TransactionSignatureElem| starkhash_to_dec_str(&serialize_me.0),
|s: &str| starkhash_from_dec_str(s).map(TransactionSignatureElem)
);
pub struct EthereumAddressAsHexStr;
impl SerializeAs<EthereumAddress> for EthereumAddressAsHexStr {
fn serialize_as<S>(source: &EthereumAddress, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
// EthereumAddress is "0x" + 40 digits at most
let mut buf = [0u8; 2 + 40];
let s = bytes_as_hex_str(source.0.as_bytes(), &mut buf);
serializer.serialize_str(s)
}
}
impl<'de> DeserializeAs<'de, EthereumAddress> for EthereumAddressAsHexStr {
fn deserialize_as<D>(deserializer: D) -> Result<EthereumAddress, D::Error>
where
D: serde::Deserializer<'de>,
{
struct EthereumAddressVisitor;
impl<'de> Visitor<'de> for EthereumAddressVisitor {
type Value = EthereumAddress;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("a hex string of up to 40 digits with an optional '0x' prefix")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
bytes_from_hex_str::<{ H160::len_bytes() }>(v)
.map_err(serde::de::Error::custom)
.map(|b| EthereumAddress(H160::from(b)))
}
}
deserializer.deserialize_str(EthereumAddressVisitor)
}
}
pub struct H256AsNoLeadingZerosHexStr;
impl SerializeAs<H256> for H256AsNoLeadingZerosHexStr {
fn serialize_as<S>(source: &H256, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
// H256 is "0x" + 64 digits at most
let mut buf = [0u8; 2 + 64];
let s = bytes_as_hex_str(source.as_bytes(), &mut buf);
serializer.serialize_str(s)
}
}
impl<'de> DeserializeAs<'de, H256> for H256AsNoLeadingZerosHexStr {
fn deserialize_as<D>(deserializer: D) -> Result<H256, D::Error>
where
D: serde::Deserializer<'de>,
{
struct H256Visitor;
impl<'de> Visitor<'de> for H256Visitor {
type Value = H256;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("a hex string of up to 64 digits with an optional '0x' prefix")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
bytes_from_hex_str::<{ H256::len_bytes() }>(v)
.map_err(serde::de::Error::custom)
.map(H256::from)
}
}
deserializer.deserialize_str(H256Visitor)
}
}
pub struct GasPriceAsHexStr;
impl SerializeAs<GasPrice> for GasPriceAsHexStr {
fn serialize_as<S>(source: &GasPrice, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
// GasPrice is "0x" + 32 digits at most
let mut buf = [0u8; 2 + 32];
let bytes = source.0.to_be_bytes();
let s = bytes_as_hex_str(&bytes, &mut buf);
serializer.serialize_str(s)
}
}
impl<'de> DeserializeAs<'de, GasPrice> for GasPriceAsHexStr {
fn deserialize_as<D>(deserializer: D) -> Result<GasPrice, D::Error>
where
D: serde::Deserializer<'de>,
{
struct GasPriceVisitor;
impl<'de> Visitor<'de> for GasPriceVisitor {
type Value = GasPrice;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("a hex string of up to 32 digits with an optional '0x' prefix")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
bytes_from_hex_str::<16>(v)
.map_err(serde::de::Error::custom)
.map(GasPrice::from_be_bytes)
}
}
deserializer.deserialize_str(GasPriceVisitor)
}
}
pub struct StarknetBlockNumberAsHexStr;
impl SerializeAs<BlockNumber> for StarknetBlockNumberAsHexStr {
fn serialize_as<S>(source: &BlockNumber, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let bytes = source.get().to_be_bytes();
// BlockNumber is "0x" + 16 digits at most
let mut buf = [0u8; 2 + 16];
let s = bytes_as_hex_str(&bytes, &mut buf);
serializer.serialize_str(s)
}
}
impl<'de> DeserializeAs<'de, BlockNumber> for StarknetBlockNumberAsHexStr {
fn deserialize_as<D>(deserializer: D) -> Result<BlockNumber, D::Error>
where
D: serde::Deserializer<'de>,
{
struct StarknetBlockNumberVisitor;
impl<'de> Visitor<'de> for StarknetBlockNumberVisitor {
type Value = BlockNumber;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("a hex string of up to 16 digits with an optional '0x' prefix")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let stripped = v.strip_prefix("0x").unwrap_or(v);
let raw = u64::from_str_radix(stripped, 16).map_err(serde::de::Error::custom)?;
BlockNumber::deserialize_value::<E>(raw)
}
}
deserializer.deserialize_str(StarknetBlockNumberVisitor)
}
}
serde_with::serde_conv!(
pub TransactionVersionAsHexStr,
TransactionVersion,
|serialize_me: &TransactionVersion| bytes_to_hex_str(serialize_me.0.as_bytes()),
|s: &str| bytes_from_hex_str::<{ H256::len_bytes() }>(s).map(|b| TransactionVersion(H256::from(b)))
);
serde_with::serde_conv!(
pub U256AsHexStr,
primitive_types::U256,
|u: &U256| { let mut b = [0u8; 32]; u.to_big_endian(&mut b); bytes_to_hex_str(&b) },
|s: &str| bytes_from_hex_str::<32>(s).map(U256::from)
);
pub struct U64AsHexStr(pub u64);
impl serde::Serialize for U64AsHexStr {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&bytes_to_hex_str(&self.0.to_be_bytes()))
}
}
impl<'de> serde::Deserialize<'de> for U64AsHexStr {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = U64AsHexStr;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("A u64 encoded as a hex string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let value = bytes_from_hex_str::<8>(v)
.map(u64::from_be_bytes)
.map_err(E::custom)?;
Ok(U64AsHexStr(value))
}
}
deserializer.deserialize_str(Visitor)
}
}
/// A helper conversion function. Only use with __sequencer API related types__.
fn starkhash_from_biguint(b: BigUint) -> Result<Felt, OverflowError> {
Felt::from_be_slice(&b.to_bytes_be())
}
/// A helper conversion function. Only use with __sequencer API related types__.
pub fn starkhash_to_dec_str(h: &Felt) -> String {
let b = h.to_be_bytes();
let b = BigUint::from_bytes_be(&b);
b.to_str_radix(10)
}
/// A helper conversion function. Only use with __sequencer API related types__.
fn starkhash_from_dec_str(s: &str) -> Result<Felt, anyhow::Error> {
match BigUint::from_str(s) {
Ok(b) => {
let h = starkhash_from_biguint(b)?;
Ok(h)
}
Err(_) => {
let h = Felt::from_hex_str(s)?;
Ok(h)
}
}
}
/// A convenience function which parses a hex string into a byte array.
///
/// Supports both upper and lower case hex strings, as well as an
/// optional "0x" prefix.
fn bytes_from_hex_str<const N: usize>(hex_str: &str) -> Result<[u8; N], HexParseError> {
fn parse_hex_digit(digit: u8) -> Result<u8, HexParseError> {
match digit {
b'0'..=b'9' => Ok(digit - b'0'),
b'A'..=b'F' => Ok(digit - b'A' + 10),
b'a'..=b'f' => Ok(digit - b'a' + 10),
other => Err(HexParseError::InvalidNibble(other)),
}
}
let hex_str = hex_str.strip_prefix("0x").unwrap_or(hex_str);
if hex_str.len() > N * 2 {
return Err(HexParseError::InvalidLength {
max: N * 2,
actual: hex_str.len(),
});
}
let mut buf = [0u8; N];
// We want the result in big-endian so reverse iterate over each pair of nibbles.
let chunks = hex_str.as_bytes().rchunks_exact(2);
// Handle a possible odd nibble remaining nibble.
let odd_nibble = chunks.remainder();
if !odd_nibble.is_empty() {
let full_bytes = hex_str.len() / 2;
buf[N - 1 - full_bytes] = parse_hex_digit(odd_nibble[0])?;
}
for (i, c) in chunks.enumerate() {
// Indexing c[0] and c[1] are safe since chunk-size is 2.
buf[N - 1 - i] = parse_hex_digit(c[0])? << 4 | parse_hex_digit(c[1])?;
}
Ok(buf)
}
/// The first stage of conversion - skip leading zeros
fn skip_zeros(bytes: &[u8]) -> (impl Iterator<Item = &u8>, usize, usize) {
// Skip all leading zero bytes
let it = bytes.iter().skip_while(|&&b| b == 0);
let num_bytes = it.clone().count();
let skipped = bytes.len() - num_bytes;
// The first high nibble can be 0
let start = if bytes[skipped] < 0x10 { 1 } else { 2 };
// Number of characters to display
let len = start + num_bytes * 2;
(it, start, len)
}
/// The second stage of conversion - map bytes to hex str
fn it_to_hex_str<'a>(
it: impl Iterator<Item = &'a u8>,
start: usize,
len: usize,
buf: &'a mut [u8],
) -> &'a [u8] {
const LUT: [u8; 16] = *b"0123456789abcdef";
buf[0] = b'0';
// Same small lookup table is ~25% faster than hex::encode_from_slice 🤷
it.enumerate().for_each(|(i, &b)| {
let idx = b as usize;
let pos = start + i * 2;
let x = [LUT[(idx & 0xf0) >> 4], LUT[idx & 0x0f]];
buf[pos..pos + 2].copy_from_slice(&x);
});
buf[1] = b'x';
&buf[..len]
}
/// A convenience function which produces a "0x" prefixed hex str slice in a given buffer `buf`
/// from an array of bytes.
/// Panics if `bytes.len() * 2 + 2 > buf.len()`
pub fn bytes_as_hex_str<'a>(bytes: &'a [u8], buf: &'a mut [u8]) -> &'a str {
let expected_buf_len = bytes.len() * 2 + 2;
assert!(
buf.len() >= expected_buf_len,
"buffer size is {}, expected at least {}",
buf.len(),
expected_buf_len
);
if !bytes.iter().any(|b| *b != 0) {
return "0x0";
}
let (it, start, len) = skip_zeros(bytes);
let res = it_to_hex_str(it, start, len, buf);
// Unwrap is safe because `buf` holds valid UTF8 characters.
std::str::from_utf8(res).unwrap()
}
/// A convenience function which produces a "0x" prefixed hex string from a [Felt].
pub fn bytes_to_hex_str(bytes: &[u8]) -> Cow<'static, str> {
if !bytes.iter().any(|b| *b != 0) {
return Cow::from("0x0");
}
let (it, start, len) = skip_zeros(bytes);
let mut buf = vec![0u8; len];
it_to_hex_str(it, start, len, &mut buf);
// Unwrap is safe as the buffer contains valid utf8
String::from_utf8(buf).unwrap().into()
}
/// Extract JSON representation of program and entry points from the contract definition.
pub fn extract_program_and_entry_points_by_type(
contract_definition_dump: &[u8],
) -> anyhow::Result<(serde_json::Value, serde_json::Value)> {
use anyhow::Context;
#[derive(serde::Deserialize)]
struct ContractDefinition {
pub program: serde_json::Value,
pub entry_points_by_type: serde_json::Value,
}
let contract_definition =
serde_json::from_slice::<ContractDefinition>(contract_definition_dump)
.context("Failed to parse contract_definition")?;
Ok((
contract_definition.program,
contract_definition.entry_points_by_type,
))
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
#[test]
fn zero() {
const ZERO_HEX_STR: &str = "0x0";
const ZERO_DEC_STR: &str = "0";
const ZERO_BYTES: [u8; 1] = [0];
let a = starkhash_from_biguint(BigUint::from_bytes_be(&ZERO_BYTES)).unwrap();
let b = starkhash_from_dec_str(ZERO_DEC_STR).unwrap();
let expected = Felt::ZERO;
assert_eq!(expected, a);
assert_eq!(expected, b);
assert_eq!(starkhash_to_dec_str(&expected), ZERO_DEC_STR);
let c: [u8; 1] = bytes_from_hex_str(ZERO_HEX_STR).unwrap();
assert!(c.iter().all(|x| *x == 0));
assert_eq!(bytes_to_hex_str(&c[..]), ZERO_HEX_STR);
let mut buf = [0u8; 2 + 2];
assert_eq!(bytes_as_hex_str(&c[..], &mut buf), ZERO_HEX_STR);
}
#[test]
fn odd() {
const ODD_HEX_STR: &str = "0x1234567890abcde";
const ODD_DEC_STR: &str = "81985529205931230";
const ODD_BYTES: [u8; 8] = [1, 0x23, 0x45, 0x67, 0x89, 0x0a, 0xbc, 0xde];
let a = starkhash_from_biguint(BigUint::from_bytes_be(&ODD_BYTES)).unwrap();
let b = starkhash_from_dec_str(ODD_DEC_STR).unwrap();
let expected = Felt::from_hex_str(ODD_HEX_STR).unwrap();
assert_eq!(expected, a);
assert_eq!(expected, b);
assert_eq!(starkhash_to_dec_str(&expected), ODD_DEC_STR);
let c: [u8; 8] = bytes_from_hex_str(ODD_HEX_STR).unwrap();
assert_eq!(c, ODD_BYTES);
assert_eq!(bytes_to_hex_str(&c[..]), ODD_HEX_STR);
let mut buf = [0u8; 2 + 16];
assert_eq!(bytes_as_hex_str(&c[..], &mut buf), ODD_HEX_STR);
}
#[test]
fn even() {
const EVEN_HEX_STR: &str = "0x1234567890abcdef";
const EVEN_DEC_STR: &str = "1311768467294899695";
const EVEN_BYTES: [u8; 8] = [0x12, 0x34, 0x56, 0x78, 0x90, 0xab, 0xcd, 0xef];
let a = starkhash_from_biguint(BigUint::from_bytes_be(&EVEN_BYTES)).unwrap();
let b = starkhash_from_dec_str(EVEN_DEC_STR).unwrap();
let expected = Felt::from_hex_str(EVEN_HEX_STR).unwrap();
assert_eq!(expected, a);
assert_eq!(expected, b);
assert_eq!(starkhash_to_dec_str(&expected), EVEN_DEC_STR);
let c: [u8; 8] = bytes_from_hex_str(EVEN_HEX_STR).unwrap();
assert_eq!(c, EVEN_BYTES);
assert_eq!(bytes_to_hex_str(&c[..]), EVEN_HEX_STR);
let mut buf = [0u8; 2 + 16];
assert_eq!(bytes_as_hex_str(&c[..], &mut buf), EVEN_HEX_STR);
}
#[test]
fn max() {
const MAX_HEX_STR: &str =
"0x800000000000011000000000000000000000000000000000000000000000000";
const MAX_DEC_STR: &str =
"3618502788666131213697322783095070105623107215331596699973092056135872020480";
const MAX_BYTES: [u8; 32] = [
8, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0,
];
let a = starkhash_from_biguint(BigUint::from_bytes_be(&MAX_BYTES)).unwrap();
let b = starkhash_from_dec_str(MAX_DEC_STR).unwrap();
let expected = Felt::from_hex_str(MAX_HEX_STR).unwrap();
assert_eq!(expected, a);
assert_eq!(expected, b);
assert_eq!(starkhash_to_dec_str(&expected), MAX_DEC_STR);
let c: [u8; 32] = bytes_from_hex_str(MAX_HEX_STR).unwrap();
assert_eq!(c, MAX_BYTES);
assert_eq!(bytes_to_hex_str(&c[..]), MAX_HEX_STR);
let mut buf = [0u8; 2 + 64];
assert_eq!(bytes_as_hex_str(&c[..], &mut buf), MAX_HEX_STR);
}
#[test]
#[should_panic]
fn buffer_too_small() {
let mut buf = [0u8; 2 + 1];
bytes_as_hex_str(&[0u8], &mut buf);
}
#[test]
fn overflow() {
const OVERFLOW_DEC_STR: &str =
"3618502788666131213697322783095070105623107215331596699973092056135872020481";
const OVERFLOW_BYTES: [u8; 32] = [
8, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1,
];
assert_eq!(
starkhash_from_biguint(BigUint::from_bytes_be(&OVERFLOW_BYTES)),
Err(OverflowError)
);
assert_eq!(
starkhash_from_dec_str(OVERFLOW_DEC_STR)
.unwrap_err()
.downcast::<OverflowError>()
.unwrap(),
OverflowError,
);
}
#[test]
fn too_long() {
const TOO_LONG_HEX_STR: &str =
"0x80000000000001100000000000000000000000000000000000000000000000100";
const TOO_LONG_DEC_STR: &str =
"926336713898529590706514632472337947039515447124888755193111566370783237243136";
const TOO_LONG_BYTES: [u8; 33] = [
8, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0,
];
use stark_hash::HexParseError;
assert_eq!(
starkhash_from_biguint(BigUint::from_bytes_be(&TOO_LONG_BYTES)),
Err(OverflowError)
);
assert_eq!(
starkhash_from_dec_str(TOO_LONG_DEC_STR)
.unwrap_err()
.downcast::<OverflowError>()
.unwrap(),
OverflowError
);
assert_eq!(
bytes_from_hex_str::<32>(TOO_LONG_HEX_STR),
Err(HexParseError::InvalidLength {
max: 64,
actual: 65
})
);
// Regression: previously max in the error message was hard-coded at 64,
// so try another buf size to make sure it is not anymore
assert_eq!(
&format!("{}", bytes_from_hex_str::<1>("abc").unwrap_err()),
"More than 2 digits found: 3"
);
}
#[test]
fn invalid_digit() {
starkhash_from_dec_str("123a").unwrap();
assert_eq!(
starkhash_from_dec_str("123z")
.unwrap_err()
.downcast::<HexParseError>()
.unwrap(),
HexParseError::InvalidNibble(b'z')
);
assert_eq!(
bytes_from_hex_str::<32>("0x123z"),
Err(HexParseError::InvalidNibble(b'z'))
);
}
mod block_number_as_hex_str {
#[serde_with::serde_as]
#[derive(Debug, Copy, Clone, PartialEq, serde::Deserialize, serde::Serialize)]
struct BlockNum(
#[serde_as(as = "super::StarknetBlockNumberAsHexStr")] pathfinder_common::BlockNumber,
);
impl BlockNum {
pub const fn new_or_panic(v: u64) -> Self {
Self(pathfinder_common::BlockNumber::new_or_panic(v))
}
}
#[test]
fn deserialize() {
// u64::from_str_radix does not accept the `0x` prefix, so also make sure it is stripped
["", "0x"].into_iter().for_each(|prefix| {
assert_eq!(
serde_json::from_str::<BlockNum>(&format!("\"{prefix}0\"")).unwrap(),
BlockNum::new_or_panic(0)
);
assert_eq!(
serde_json::from_str::<BlockNum>(&format!("\"{prefix}123\"")).unwrap(),
BlockNum::new_or_panic(0x123)
);
assert_eq!(
serde_json::from_str::<BlockNum>(&format!("\"{prefix}1234\"")).unwrap(),
BlockNum::new_or_panic(0x1234)
);
let e = serde_json::from_str::<BlockNum>(&format!("\"{prefix}ffffffffffffffff\""))
.unwrap_err();
assert!(e.is_data(), "{e:?}");
});
}
}
}
|
pub mod logging;
pub mod frame_clock;
|
use log::debug;
use std::cell::RefCell;
use crate::query::Connection;
use crate::{Odbc, OdbcError};
thread_local! {
static DB: RefCell<Option<Connection>> = RefCell::new(None);
}
/// Access to thread local connection.
///
/// Provided closure will receive the `Connection` object and may return it for reuse by another call or drop it to force new connection to be established to database on next call.
///
/// If there was an error during connection establishment it is provided to the closure. Next call will attempt to connect again and a new error may be provided.
///
/// `connection_string` is used only when making new `Connection` object initially, after error or after old `Connection` object was dropped.
pub fn connection_with<O, F>(
connection_string: &str,
f: F
) -> O where F: Fn(Result<Connection, OdbcError>) -> (Option<Connection>, O) {
initialized_connection_with(connection_string, |_| Ok(()), f)
}
/// Access to thread local connection with connection initialization.
///
/// Like `connection_with` but also takes `init` closure that is executed once when new connection was
/// successfully established. This allows for execution of connection configuration queries.
///
/// If `init` returns an error it is passed to the second closure and the connection will be dropped.
pub fn initialized_connection_with<O, E, I, F>(
connection_string: &str,
init: I,
f: F
) -> O where E: From<OdbcError>, I: Fn(&mut Connection) -> Result<(), E>, F: Fn(Result<Connection, E>) -> (Option<Connection>, O) {
DB.with(|db| {
let connection;
let conn = db.borrow_mut().take();
match conn {
Some(conn) => connection = conn,
None => {
let id = std::thread::current().id();
debug!("[{:?}] Connecting to database: {}", id, &connection_string);
match Odbc::connect(&connection_string)
.map_err(Into::into)
.and_then(|mut conn| init(&mut conn).map(|_| conn)) {
Ok(conn) => {
connection = conn;
}
Err(err) => return f(Err(err)).1,
}
}
}
let (connection, o) = f(Ok(connection));
*db.borrow_mut() = connection;
o
})
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
#[allow(unused_imports)]
use crate::*;
#[allow(unused_imports)]
use assert_matches::assert_matches;
#[cfg(feature = "test-monetdb")]
#[test]
fn test_connection_with() {
connection_with(
crate::tests::monetdb_connection_string().as_str(),
|result| {
let mut monetdb = result.expect("connect to MonetDB");
let data = monetdb
.handle()
.query::<ValueRow>("SELECT 'foo'")
.expect("failed to run query")
.collect::<Result<Vec<_>, _>>()
.expect("fetch data");
assert_matches!(data[0][0], Some(Value::String(ref string)) => assert_eq!(string, "foo"));
(Some(monetdb), ())
},
)
}
#[cfg(feature = "test-monetdb")]
#[test]
fn test_connection_with_reconnect() {
connection_with(
crate::tests::monetdb_connection_string().as_str(),
|result| {
let mut monetdb = result.expect("connect to MonetDB");
let data = monetdb
.handle()
.query::<ValueRow>("SELECT 'foo'")
.expect("failed to run query")
.collect::<Result<Vec<_>, _>>()
.expect("fetch data");
assert_matches!(data[0][0], Some(Value::String(ref string)) => assert_eq!(string, "foo"));
(None, ())
},
);
connection_with(
crate::tests::monetdb_connection_string().as_str(),
|result| {
let mut monetdb = result.expect("connect to MonetDB");
let data = monetdb
.handle()
.query::<ValueRow>("SELECT 'foo'")
.expect("failed to run query")
.collect::<Result<Vec<_>, _>>()
.expect("fetch data");
assert_matches!(data[0][0], Some(Value::String(ref string)) => assert_eq!(string, "foo"));
(None, ())
},
)
}
#[cfg(feature = "test-monetdb")]
#[test]
fn test_connection_with_nested() {
connection_with(
crate::tests::monetdb_connection_string().as_str(),
|result| {
let mut monetdb = result.expect("connect to MonetDB");
let data = monetdb
.handle()
.query::<ValueRow>("SELECT 'foo'")
.expect("failed to run query")
.collect::<Result<Vec<_>, _>>()
.expect("fetch data");
assert_matches!(data[0][0], Some(Value::String(ref string)) => assert_eq!(string, "foo"));
connection_with(
crate::tests::monetdb_connection_string().as_str(),
|result| {
let mut monetdb = result.expect("connect to MonetDB");
let data = monetdb
.handle()
.query::<ValueRow>("SELECT 'foo'")
.expect("failed to run query")
.collect::<Result<Vec<_>, _>>()
.expect("fetch data");
assert_matches!(data[0][0], Some(Value::String(ref string)) => assert_eq!(string, "foo"));
(Some(monetdb), ())
},
);
(Some(monetdb), ())
},
)
}
}
|
//! Alacritty - The GPU Enhanced Terminal.
#![warn(rust_2018_idioms, future_incompatible)]
#![deny(clippy::all, clippy::if_not_else, clippy::enum_glob_use)]
#![cfg_attr(feature = "cargo-clippy", deny(warnings))]
pub mod ansi;
pub mod config;
pub mod event;
pub mod event_loop;
pub mod grid;
pub mod index;
pub mod selection;
pub mod sync;
pub mod term;
pub mod thread;
pub mod tty;
pub mod vi_mode;
pub use crate::grid::Grid;
pub use crate::term::Term;
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::SCGCGPIO {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S0R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S0R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S0W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S0W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 0);
self.w.bits |= ((value as u32) & 1) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S1R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S1R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S1W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S1W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 1);
self.w.bits |= ((value as u32) & 1) << 1;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S2R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S2R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S2W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S2W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 2);
self.w.bits |= ((value as u32) & 1) << 2;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S3R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S3R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S3W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S3W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 3);
self.w.bits |= ((value as u32) & 1) << 3;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S4R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S4R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S4W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S4W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u32) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S5R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S5R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S5W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S5W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 5);
self.w.bits |= ((value as u32) & 1) << 5;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S6R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S6R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S6W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S6W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u32) & 1) << 6;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S7R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S7R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S7W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S7W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 7);
self.w.bits |= ((value as u32) & 1) << 7;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S8R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S8R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S8W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S8W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 8);
self.w.bits |= ((value as u32) & 1) << 8;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S9R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S9R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S9W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S9W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 9);
self.w.bits |= ((value as u32) & 1) << 9;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S10R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S10R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S10W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S10W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 10);
self.w.bits |= ((value as u32) & 1) << 10;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S11R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S11R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S11W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S11W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 11);
self.w.bits |= ((value as u32) & 1) << 11;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S12R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S12R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S12W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S12W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 12);
self.w.bits |= ((value as u32) & 1) << 12;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S13R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S13R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S13W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S13W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 13);
self.w.bits |= ((value as u32) & 1) << 13;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SYSCTL_SCGCGPIO_S14R {
bits: bool,
}
impl SYSCTL_SCGCGPIO_S14R {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SYSCTL_SCGCGPIO_S14W<'a> {
w: &'a mut W,
}
impl<'a> _SYSCTL_SCGCGPIO_S14W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 14);
self.w.bits |= ((value as u32) & 1) << 14;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - GPIO Port A Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s0(&self) -> SYSCTL_SCGCGPIO_S0R {
let bits = ((self.bits >> 0) & 1) != 0;
SYSCTL_SCGCGPIO_S0R { bits }
}
#[doc = "Bit 1 - GPIO Port B Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s1(&self) -> SYSCTL_SCGCGPIO_S1R {
let bits = ((self.bits >> 1) & 1) != 0;
SYSCTL_SCGCGPIO_S1R { bits }
}
#[doc = "Bit 2 - GPIO Port C Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s2(&self) -> SYSCTL_SCGCGPIO_S2R {
let bits = ((self.bits >> 2) & 1) != 0;
SYSCTL_SCGCGPIO_S2R { bits }
}
#[doc = "Bit 3 - GPIO Port D Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s3(&self) -> SYSCTL_SCGCGPIO_S3R {
let bits = ((self.bits >> 3) & 1) != 0;
SYSCTL_SCGCGPIO_S3R { bits }
}
#[doc = "Bit 4 - GPIO Port E Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s4(&self) -> SYSCTL_SCGCGPIO_S4R {
let bits = ((self.bits >> 4) & 1) != 0;
SYSCTL_SCGCGPIO_S4R { bits }
}
#[doc = "Bit 5 - GPIO Port F Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s5(&self) -> SYSCTL_SCGCGPIO_S5R {
let bits = ((self.bits >> 5) & 1) != 0;
SYSCTL_SCGCGPIO_S5R { bits }
}
#[doc = "Bit 6 - GPIO Port G Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s6(&self) -> SYSCTL_SCGCGPIO_S6R {
let bits = ((self.bits >> 6) & 1) != 0;
SYSCTL_SCGCGPIO_S6R { bits }
}
#[doc = "Bit 7 - GPIO Port H Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s7(&self) -> SYSCTL_SCGCGPIO_S7R {
let bits = ((self.bits >> 7) & 1) != 0;
SYSCTL_SCGCGPIO_S7R { bits }
}
#[doc = "Bit 8 - GPIO Port J Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s8(&self) -> SYSCTL_SCGCGPIO_S8R {
let bits = ((self.bits >> 8) & 1) != 0;
SYSCTL_SCGCGPIO_S8R { bits }
}
#[doc = "Bit 9 - GPIO Port K Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s9(&self) -> SYSCTL_SCGCGPIO_S9R {
let bits = ((self.bits >> 9) & 1) != 0;
SYSCTL_SCGCGPIO_S9R { bits }
}
#[doc = "Bit 10 - GPIO Port L Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s10(&self) -> SYSCTL_SCGCGPIO_S10R {
let bits = ((self.bits >> 10) & 1) != 0;
SYSCTL_SCGCGPIO_S10R { bits }
}
#[doc = "Bit 11 - GPIO Port M Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s11(&self) -> SYSCTL_SCGCGPIO_S11R {
let bits = ((self.bits >> 11) & 1) != 0;
SYSCTL_SCGCGPIO_S11R { bits }
}
#[doc = "Bit 12 - GPIO Port N Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s12(&self) -> SYSCTL_SCGCGPIO_S12R {
let bits = ((self.bits >> 12) & 1) != 0;
SYSCTL_SCGCGPIO_S12R { bits }
}
#[doc = "Bit 13 - GPIO Port P Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s13(&self) -> SYSCTL_SCGCGPIO_S13R {
let bits = ((self.bits >> 13) & 1) != 0;
SYSCTL_SCGCGPIO_S13R { bits }
}
#[doc = "Bit 14 - GPIO Port Q Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s14(&self) -> SYSCTL_SCGCGPIO_S14R {
let bits = ((self.bits >> 14) & 1) != 0;
SYSCTL_SCGCGPIO_S14R { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - GPIO Port A Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s0(&mut self) -> _SYSCTL_SCGCGPIO_S0W {
_SYSCTL_SCGCGPIO_S0W { w: self }
}
#[doc = "Bit 1 - GPIO Port B Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s1(&mut self) -> _SYSCTL_SCGCGPIO_S1W {
_SYSCTL_SCGCGPIO_S1W { w: self }
}
#[doc = "Bit 2 - GPIO Port C Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s2(&mut self) -> _SYSCTL_SCGCGPIO_S2W {
_SYSCTL_SCGCGPIO_S2W { w: self }
}
#[doc = "Bit 3 - GPIO Port D Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s3(&mut self) -> _SYSCTL_SCGCGPIO_S3W {
_SYSCTL_SCGCGPIO_S3W { w: self }
}
#[doc = "Bit 4 - GPIO Port E Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s4(&mut self) -> _SYSCTL_SCGCGPIO_S4W {
_SYSCTL_SCGCGPIO_S4W { w: self }
}
#[doc = "Bit 5 - GPIO Port F Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s5(&mut self) -> _SYSCTL_SCGCGPIO_S5W {
_SYSCTL_SCGCGPIO_S5W { w: self }
}
#[doc = "Bit 6 - GPIO Port G Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s6(&mut self) -> _SYSCTL_SCGCGPIO_S6W {
_SYSCTL_SCGCGPIO_S6W { w: self }
}
#[doc = "Bit 7 - GPIO Port H Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s7(&mut self) -> _SYSCTL_SCGCGPIO_S7W {
_SYSCTL_SCGCGPIO_S7W { w: self }
}
#[doc = "Bit 8 - GPIO Port J Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s8(&mut self) -> _SYSCTL_SCGCGPIO_S8W {
_SYSCTL_SCGCGPIO_S8W { w: self }
}
#[doc = "Bit 9 - GPIO Port K Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s9(&mut self) -> _SYSCTL_SCGCGPIO_S9W {
_SYSCTL_SCGCGPIO_S9W { w: self }
}
#[doc = "Bit 10 - GPIO Port L Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s10(&mut self) -> _SYSCTL_SCGCGPIO_S10W {
_SYSCTL_SCGCGPIO_S10W { w: self }
}
#[doc = "Bit 11 - GPIO Port M Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s11(&mut self) -> _SYSCTL_SCGCGPIO_S11W {
_SYSCTL_SCGCGPIO_S11W { w: self }
}
#[doc = "Bit 12 - GPIO Port N Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s12(&mut self) -> _SYSCTL_SCGCGPIO_S12W {
_SYSCTL_SCGCGPIO_S12W { w: self }
}
#[doc = "Bit 13 - GPIO Port P Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s13(&mut self) -> _SYSCTL_SCGCGPIO_S13W {
_SYSCTL_SCGCGPIO_S13W { w: self }
}
#[doc = "Bit 14 - GPIO Port Q Sleep Mode Clock Gating Control"]
#[inline(always)]
pub fn sysctl_scgcgpio_s14(&mut self) -> _SYSCTL_SCGCGPIO_S14W {
_SYSCTL_SCGCGPIO_S14W { w: self }
}
}
|
use std::borrow::Cow;
use std::collections::HashMap;
use std::convert::TryInto;
use std::sync::Arc;
use futures_core::future::BoxFuture;
use futures_util::TryFutureExt;
use crate::connection::{Connect, Connection};
use crate::executor::Executor;
use crate::postgres::protocol::{
Authentication, AuthenticationMd5, AuthenticationSasl, BackendKeyData, Message,
PasswordMessage, StartupMessage, StatementId, Terminate,
};
use crate::postgres::row::Statement;
use crate::postgres::stream::PgStream;
use crate::postgres::type_info::SharedStr;
use crate::postgres::{sasl, tls};
use crate::url::Url;
/// An asynchronous connection to a [Postgres](struct.Postgres.html) database.
///
/// The connection string expected by [Connect::connect] should be a PostgreSQL connection
/// string, as documented at
/// <https://www.postgresql.org/docs/12/libpq-connect.html#LIBPQ-CONNSTRING>
///
/// ### TLS Support (requires `tls` feature)
/// This connection type supports the same `sslmode` query parameter that `libpq` does in
/// connection strings: <https://www.postgresql.org/docs/12/libpq-ssl.html>
///
/// ```text
/// postgresql://<user>[:<password>]@<host>[:<port>]/<database>[?sslmode=<ssl-mode>[&sslcrootcert=<path>]]
/// ```
/// where
/// ```text
/// ssl-mode = disable | allow | prefer | require | verify-ca | verify-full
/// path = percent (URL) encoded path on the local machine
/// ```
///
/// If the `tls` feature is not enabled, `disable`, `allow` and `prefer` are no-ops and `require`,
/// `verify-ca` and `verify-full` are forbidden (attempting to connect with these will return
/// an error).
///
/// If the `tls` feature is enabled, an upgrade to TLS is attempted on every connection by default
/// (equivalent to `sslmode=prefer`). If the server does not support TLS (because it was not
/// started with a valid certificate and key, see <https://www.postgresql.org/docs/12/ssl-tcp.html>)
/// then it falls back to an unsecured connection and logs a warning.
///
/// Add `sslmode=require` to your connection string to emit an error if the TLS upgrade fails.
///
/// If you're running Postgres locally, your connection string might look like this:
/// ```text
/// postgresql://root:password@localhost/my_database?sslmode=require
/// ```
///
/// However, like with `libpq` the server certificate is **not** checked for validity by default.
///
/// Specifying `sslmode=verify-ca` will cause the TLS upgrade to verify the server's SSL
/// certificate against a local CA root certificate; this is not the system root certificate
/// but is instead expected to be specified in one of a few ways:
///
/// * The path to the certificate can be specified by adding the `sslrootcert` query parameter
/// to the connection string. (Remember to percent-encode it!)
///
/// * The path may also be specified via the `PGSSLROOTCERT` environment variable (which
/// should *not* be percent-encoded.)
///
/// * Otherwise, the library will look for the Postgres global root CA certificate in the default
/// location:
///
/// * `$HOME/.postgresql/root.crt` on POSIX systems
/// * `%APPDATA%\postgresql\root.crt` on Windows
///
/// These locations are documented here: <https://www.postgresql.org/docs/12/libpq-ssl.html#LIBQ-SSL-CERTIFICATES>
/// If the root certificate cannot be found by any of these means then the TLS upgrade will fail.
///
/// If `sslmode=verify-full` is specified, in addition to checking the certificate as with
/// `sslmode=verify-ca`, the hostname in the connection string will be verified
/// against the hostname in the server certificate, so they must be the same for the TLS
/// upgrade to succeed.
pub struct PgConnection {
pub(super) stream: PgStream,
pub(super) next_statement_id: u32,
pub(super) is_ready: bool,
// cache query -> statement ID
pub(super) cache_statement_id: HashMap<Box<str>, StatementId>,
// cache statement ID -> statement description
pub(super) cache_statement: HashMap<StatementId, Arc<Statement>>,
// cache type name -> type OID
pub(super) cache_type_oid: HashMap<SharedStr, u32>,
// cache type OID -> type name
pub(super) cache_type_name: HashMap<u32, SharedStr>,
// Work buffer for the value ranges of the current row
// This is used as the backing memory for each Row's value indexes
pub(super) current_row_values: Vec<Option<(u32, u32)>>,
// TODO: Find a use for these values. Perhaps in a debug impl of PgConnection?
#[allow(dead_code)]
process_id: u32,
#[allow(dead_code)]
secret_key: u32,
}
// https://www.postgresql.org/docs/12/protocol-flow.html#id-1.10.5.7.3
async fn startup(stream: &mut PgStream, url: &Url) -> crate::Result<BackendKeyData> {
// Defaults to postgres@.../postgres
let username = url.username().unwrap_or(Cow::Borrowed("postgres"));
let database = url.database().unwrap_or("postgres");
// See this doc for more runtime parameters
// https://www.postgresql.org/docs/12/runtime-config-client.html
let params = &[
("user", username.as_ref()),
("database", database),
// Sets the display format for date and time values,
// as well as the rules for interpreting ambiguous date input values.
("DateStyle", "ISO, MDY"),
// Sets the time zone for displaying and interpreting time stamps.
("TimeZone", "UTC"),
// Adjust postgres to return percise values for floats
// NOTE: This is default in postgres 12+
("extra_float_digits", "3"),
// Sets the client-side encoding (character set).
("client_encoding", "UTF-8"),
];
stream.write(StartupMessage { params });
stream.flush().await?;
let mut key_data = BackendKeyData {
process_id: 0,
secret_key: 0,
};
loop {
match stream.receive().await? {
Message::Authentication => match Authentication::read(stream.buffer())? {
Authentication::Ok => {
// do nothing. no password is needed to continue.
}
Authentication::CleartextPassword => {
stream.write(PasswordMessage::ClearText(
&url.password().unwrap_or_default(),
));
stream.flush().await?;
}
Authentication::Md5Password => {
// TODO: Just reference the salt instead of returning a stack array
// TODO: Better way to make sure we skip the first 4 bytes here
let data = AuthenticationMd5::read(&stream.buffer()[4..])?;
stream.write(PasswordMessage::Md5 {
password: &url.password().unwrap_or_default(),
user: username.as_ref(),
salt: data.salt,
});
stream.flush().await?;
}
Authentication::Sasl => {
// TODO: Make this iterative for traversing the mechanisms to remove the allocation
// TODO: Better way to make sure we skip the first 4 bytes here
let data = AuthenticationSasl::read(&stream.buffer()[4..])?;
let mut has_sasl: bool = false;
let mut has_sasl_plus: bool = false;
for mechanism in &*data.mechanisms {
match &**mechanism {
"SCRAM-SHA-256" => {
has_sasl = true;
}
"SCRAM-SHA-256-PLUS" => {
has_sasl_plus = true;
}
_ => {
log::info!("unsupported auth mechanism: {}", mechanism);
}
}
}
if has_sasl || has_sasl_plus {
// TODO: Handle -PLUS differently if we're in a TLS stream
sasl::authenticate(
stream,
username.as_ref(),
&url.password().unwrap_or_default(),
)
.await?;
} else {
return Err(protocol_err!(
"unsupported SASL auth mechanisms: {:?}",
data.mechanisms
)
.into());
}
}
auth => {
return Err(
protocol_err!("requested unsupported authentication: {:?}", auth).into(),
);
}
},
Message::BackendKeyData => {
// do nothing. we do not care about the server values here.
key_data = BackendKeyData::read(stream.buffer())?;
}
Message::ParameterStatus => {
// do nothing. we do not care about the server values here.
}
Message::ReadyForQuery => {
// done. connection is now fully established and can accept
// queries for execution.
break;
}
type_ => {
return Err(protocol_err!("unexpected message: {:?}", type_).into());
}
}
}
Ok(key_data)
}
// https://www.postgresql.org/docs/12/protocol-flow.html#id-1.10.5.7.10
async fn terminate(mut stream: PgStream) -> crate::Result<()> {
stream.write(Terminate);
stream.flush().await?;
stream.shutdown()?;
Ok(())
}
impl PgConnection {
pub(super) async fn new(url: std::result::Result<Url, url::ParseError>) -> crate::Result<Self> {
let url = url?;
let mut stream = PgStream::new(&url).await?;
tls::request_if_needed(&mut stream, &url).await?;
let key_data = startup(&mut stream, &url).await?;
Ok(Self {
stream,
current_row_values: Vec::with_capacity(10),
next_statement_id: 1,
is_ready: true,
cache_type_oid: HashMap::new(),
cache_type_name: HashMap::new(),
cache_statement_id: HashMap::with_capacity(10),
cache_statement: HashMap::with_capacity(10),
process_id: key_data.process_id,
secret_key: key_data.secret_key,
})
}
}
impl Connect for PgConnection {
fn connect<T>(url: T) -> BoxFuture<'static, crate::Result<PgConnection>>
where
T: TryInto<Url, Error = url::ParseError>,
Self: Sized,
{
Box::pin(PgConnection::new(url.try_into()))
}
}
impl Connection for PgConnection {
fn close(self) -> BoxFuture<'static, crate::Result<()>> {
Box::pin(terminate(self.stream))
}
fn ping(&mut self) -> BoxFuture<crate::Result<()>> {
Box::pin(Executor::execute(self, "SELECT 1").map_ok(|_| ()))
}
}
|
pub type PFN_vkVoidFunction = extern "system" fn() -> ();
// create dummy
pub extern "system" fn vkVoidFunction() -> () {}
|
#![feature(link_args)]
#![no_main]
#[link_args = "-s MODULARIZE=1 -s EXPORT_NAME='LiveSplitCore' -Oz -s TOTAL_MEMORY=33554432 -s ALLOW_MEMORY_GROWTH=1 -s BINARYEN_METHOD='native-wasm'"]
extern "C" {}
extern crate livesplit_core_capi;
pub use livesplit_core_capi::*;
|
use crate::event::Button;
impl Button {
pub(crate) fn to_raw(&self) -> u16 {
use Button::*;
match *self {
A => 0x0130,
B => 0x0131,
Back => 0x0116,
Base => 0x0126,
Base2 => 0x0127,
Base3 => 0x0128,
Base4 => 0x0129,
Base5 => 0x012A,
Base6 => 0x012B,
C => 0x0132,
Dead => 0x012F,
Digi => 0x0140,
DpadDown => 0x0221,
DpadLeft => 0x0222,
DpadRight => 0x0223,
DpadUp => 0x0220,
East => 0x0131,
Extra => 0x0114,
Forward => 0x0115,
Gamepad => 0x0130,
GearDown => 0x0150,
GearUp => 0x0151,
Joystick => 0x0120,
Left => 0x0110,
Middle => 0x0112,
Misc => 0x0100,
Mode => 0x013C,
Mouse => 0x0110,
N0 => 0x0100,
N1 => 0x0101,
N2 => 0x0102,
N3 => 0x0103,
N4 => 0x0104,
N5 => 0x0105,
N6 => 0x0106,
N7 => 0x0107,
N8 => 0x0108,
N9 => 0x0109,
North => 0x0133,
Pinkie => 0x0125,
Right => 0x0111,
Select => 0x013A,
Side => 0x0113,
South => 0x0130,
Start => 0x013B,
Stylus => 0x014B,
Stylus2 => 0x014C,
Stylus3 => 0x0149,
Task => 0x0117,
Thumb => 0x0121,
Thumb2 => 0x0122,
Thumbl => 0x013D,
Thumbr => 0x013E,
Tl => 0x0136,
Tl2 => 0x0138,
ToolAirbrush => 0x0144,
ToolBrush => 0x0142,
ToolDoubletap => 0x014D,
ToolFinger => 0x0145,
ToolLens => 0x0147,
ToolMouse => 0x0146,
ToolPen => 0x0140,
ToolPencil => 0x0143,
ToolQuadtap => 0x014F,
ToolQuinttap => 0x0148,
ToolRubber => 0x0141,
ToolTripletap => 0x014E,
Top => 0x0123,
Top2 => 0x0124,
Touch => 0x014A,
Tr => 0x0137,
Tr2 => 0x0139,
Trigger => 0x0120,
TriggerHappy => 0x02C0,
TriggerHappy1 => 0x02C0,
TriggerHappy10 => 0x02C9,
TriggerHappy11 => 0x02CA,
TriggerHappy12 => 0x02CB,
TriggerHappy13 => 0x02CC,
TriggerHappy14 => 0x02CD,
TriggerHappy15 => 0x02CE,
TriggerHappy16 => 0x02CF,
TriggerHappy17 => 0x02D0,
TriggerHappy18 => 0x02D1,
TriggerHappy19 => 0x02D2,
TriggerHappy2 => 0x02C1,
TriggerHappy20 => 0x02D3,
TriggerHappy21 => 0x02D4,
TriggerHappy22 => 0x02D5,
TriggerHappy23 => 0x02D6,
TriggerHappy24 => 0x02D7,
TriggerHappy25 => 0x02D8,
TriggerHappy26 => 0x02D9,
TriggerHappy27 => 0x02DA,
TriggerHappy28 => 0x02DB,
TriggerHappy29 => 0x02DC,
TriggerHappy3 => 0x02C2,
TriggerHappy30 => 0x02DD,
TriggerHappy31 => 0x02DE,
TriggerHappy32 => 0x02DF,
TriggerHappy33 => 0x02E0,
TriggerHappy34 => 0x02E1,
TriggerHappy35 => 0x02E2,
TriggerHappy36 => 0x02E3,
TriggerHappy37 => 0x02E4,
TriggerHappy38 => 0x02E5,
TriggerHappy39 => 0x02E6,
TriggerHappy4 => 0x02C3,
TriggerHappy40 => 0x02E7,
TriggerHappy5 => 0x02C4,
TriggerHappy6 => 0x02C5,
TriggerHappy7 => 0x02C6,
TriggerHappy8 => 0x02C7,
TriggerHappy9 => 0x02C8,
West => 0x0134,
Wheel => 0x0150,
X => 0x0130,
Y => 0x0150,
Z => 0x0135,
}
}
pub(crate) fn from_raw(code: u16) -> Option<Self> {
use Button::*;
// This is generated from linux headers, some patterns are unreachable, and we don't care.
#[allow(unreachable_patterns, clippy::match_overlapping_arm)]
let button = match code {
0x0130 => A,
0x0131 => B,
0x0116 => Back,
0x0126 => Base,
0x0127 => Base2,
0x0128 => Base3,
0x0129 => Base4,
0x012A => Base5,
0x012B => Base6,
0x0132 => C,
0x012F => Dead,
0x0140 => Digi,
0x0221 => DpadDown,
0x0222 => DpadLeft,
0x0223 => DpadRight,
0x0220 => DpadUp,
0x0131 => East,
0x0114 => Extra,
0x0115 => Forward,
0x0130 => Gamepad,
0x0150 => GearDown,
0x0151 => GearUp,
0x0120 => Joystick,
0x0110 => Left,
0x0112 => Middle,
0x0100 => Misc,
0x013C => Mode,
0x0110 => Mouse,
0x0100 => N0,
0x0101 => N1,
0x0102 => N2,
0x0103 => N3,
0x0104 => N4,
0x0105 => N5,
0x0106 => N6,
0x0107 => N7,
0x0108 => N8,
0x0109 => N9,
0x0133 => North,
0x0125 => Pinkie,
0x0111 => Right,
0x013A => Select,
0x0113 => Side,
0x0130 => South,
0x013B => Start,
0x014B => Stylus,
0x014C => Stylus2,
0x0149 => Stylus3,
0x0117 => Task,
0x0121 => Thumb,
0x0122 => Thumb2,
0x013D => Thumbl,
0x013E => Thumbr,
0x0136 => Tl,
0x0138 => Tl2,
0x0144 => ToolAirbrush,
0x0142 => ToolBrush,
0x014D => ToolDoubletap,
0x0145 => ToolFinger,
0x0147 => ToolLens,
0x0146 => ToolMouse,
0x0140 => ToolPen,
0x0143 => ToolPencil,
0x014F => ToolQuadtap,
0x0148 => ToolQuinttap,
0x0141 => ToolRubber,
0x014E => ToolTripletap,
0x0123 => Top,
0x0124 => Top2,
0x014A => Touch,
0x0137 => Tr,
0x0139 => Tr2,
0x0120 => Trigger,
0x02C0 => TriggerHappy,
0x02C0 => TriggerHappy1,
0x02C9 => TriggerHappy10,
0x02CA => TriggerHappy11,
0x02CB => TriggerHappy12,
0x02CC => TriggerHappy13,
0x02CD => TriggerHappy14,
0x02CE => TriggerHappy15,
0x02CF => TriggerHappy16,
0x02D0 => TriggerHappy17,
0x02D1 => TriggerHappy18,
0x02D2 => TriggerHappy19,
0x02C1 => TriggerHappy2,
0x02D3 => TriggerHappy20,
0x02D4 => TriggerHappy21,
0x02D5 => TriggerHappy22,
0x02D6 => TriggerHappy23,
0x02D7 => TriggerHappy24,
0x02D8 => TriggerHappy25,
0x02D9 => TriggerHappy26,
0x02DA => TriggerHappy27,
0x02DB => TriggerHappy28,
0x02DC => TriggerHappy29,
0x02C2 => TriggerHappy3,
0x02DD => TriggerHappy30,
0x02DE => TriggerHappy31,
0x02DF => TriggerHappy32,
0x02E0 => TriggerHappy33,
0x02E1 => TriggerHappy34,
0x02E2 => TriggerHappy35,
0x02E3 => TriggerHappy36,
0x02E4 => TriggerHappy37,
0x02E5 => TriggerHappy38,
0x02E6 => TriggerHappy39,
0x02C3 => TriggerHappy4,
0x02E7 => TriggerHappy40,
0x02C4 => TriggerHappy5,
0x02C5 => TriggerHappy6,
0x02C6 => TriggerHappy7,
0x02C7 => TriggerHappy8,
0x02C8 => TriggerHappy9,
0x0134 => West,
0x0150 => Wheel,
0x0135 => Z,
_ => return None,
};
Some(button)
}
}
|
#[doc = "Reader of register OUT_CTRL"]
pub type R = crate::R<u8, super::OUT_CTRL>;
#[doc = "Writer for register OUT_CTRL"]
pub type W = crate::W<u8, super::OUT_CTRL>;
#[doc = "Register OUT_CTRL `reset()`'s with value 0"]
impl crate::ResetValue for super::OUT_CTRL {
type Type = u8;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `epno`"]
pub type EPNO_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `epno`"]
pub struct EPNO_W<'a> {
w: &'a mut W,
}
impl<'a> EPNO_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u8) & 0x0f);
self.w
}
}
#[doc = "Reader of field `enable`"]
pub type ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `enable`"]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u8) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `reset`"]
pub type RESET_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `reset`"]
pub struct RESET_W<'a> {
w: &'a mut W,
}
impl<'a> RESET_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u8) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `stall`"]
pub type STALL_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `stall`"]
pub struct STALL_W<'a> {
w: &'a mut W,
}
impl<'a> STALL_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u8) & 0x01) << 6);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - The endpoint number to update the ``enable`` and ``status`` bits for."]
#[inline(always)]
pub fn epno(&self) -> EPNO_R {
EPNO_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bit 4 - Write a ``1`` here to enable receiving data"]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Write a ``1`` here to reset the ``OUT`` handler"]
#[inline(always)]
pub fn reset(&self) -> RESET_R {
RESET_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Write a ``1`` here to stall an endpoint"]
#[inline(always)]
pub fn stall(&self) -> STALL_R {
STALL_R::new(((self.bits >> 6) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - The endpoint number to update the ``enable`` and ``status`` bits for."]
#[inline(always)]
pub fn epno(&mut self) -> EPNO_W {
EPNO_W { w: self }
}
#[doc = "Bit 4 - Write a ``1`` here to enable receiving data"]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
#[doc = "Bit 5 - Write a ``1`` here to reset the ``OUT`` handler"]
#[inline(always)]
pub fn reset(&mut self) -> RESET_W {
RESET_W { w: self }
}
#[doc = "Bit 6 - Write a ``1`` here to stall an endpoint"]
#[inline(always)]
pub fn stall(&mut self) -> STALL_W {
STALL_W { w: self }
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_catalog::table::Table;
use common_catalog::table_context::TableContext;
use common_exception::Result;
use common_expression::types::StringType;
use common_expression::utils::FromData;
use common_expression::DataBlock;
use common_expression::TableDataType;
use common_expression::TableField;
use common_expression::TableSchemaRefExt;
use common_meta_app::schema::TableIdent;
use common_meta_app::schema::TableInfo;
use common_meta_app::schema::TableMeta;
use crate::SyncOneBlockSystemTable;
use crate::SyncSystemTable;
pub struct TableFunctionsTable {
table_info: TableInfo,
}
impl SyncSystemTable for TableFunctionsTable {
const NAME: &'static str = "system.table_functions";
fn get_table_info(&self) -> &TableInfo {
&self.table_info
}
fn get_full_data(&self, ctx: Arc<dyn TableContext>) -> Result<DataBlock> {
let func_names = ctx.get_catalog("default")?.list_table_functions();
let names = func_names.iter().map(|s| s.as_str()).collect::<Vec<_>>();
Ok(DataBlock::new_from_columns(vec![StringType::from_data(
names,
)]))
}
}
impl TableFunctionsTable {
pub fn create(table_id: u64) -> Arc<dyn Table> {
let schema =
TableSchemaRefExt::create(vec![TableField::new("name", TableDataType::String)]);
let table_info = TableInfo {
desc: "'system'.'table_functions'".to_string(),
name: "table_functions".to_string(),
ident: TableIdent::new(table_id, 0),
meta: TableMeta {
schema,
engine: "SystemTableFunctions".to_string(),
..Default::default()
},
..Default::default()
};
SyncOneBlockSystemTable::create(TableFunctionsTable { table_info })
}
}
|
pub mod base;
pub mod computer;
mod util;
|
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use fibers::sync::mpsc;
use futures::{Async, Future, Poll, Stream};
use {Error, ErrorKind, Result, WatchMask, Watcher, WatcherEvent};
use internal_inotify::{Inotify, WatchDecriptor};
use watcher::WatcherId;
/// [Inotify] service.
///
/// This is a [`Future`] that never terminate except error cases.
/// Internally it manages zero or more file descriptors of [inotify] as needed.
///
/// [inotify]: https://en.wikipedia.org/wiki/Inotify
/// [`Future`]: https://docs.rs/futures/0.1/futures/future/trait.Future.html
#[derive(Debug)]
pub struct InotifyService {
inotifies: Vec<InotifyState>,
command_tx: mpsc::Sender<Command>,
command_rx: mpsc::Receiver<Command>,
watcher_id: Arc<AtomicUsize>,
watchers: HashMap<WatcherId, WatcherState>,
}
impl InotifyService {
/// Makes a new `InotifyService` instance.
pub fn new() -> Self {
let (command_tx, command_rx) = mpsc::channel();
InotifyService {
inotifies: Vec::new(),
command_tx,
command_rx,
watcher_id: Arc::new(AtomicUsize::new(0)),
watchers: HashMap::new(),
}
}
/// Returns the handle of this service.
pub fn handle(&self) -> InotifyServiceHandle {
InotifyServiceHandle {
command_tx: self.command_tx.clone(),
watcher_id: Arc::clone(&self.watcher_id),
}
}
fn handle_command(&mut self, command: Command) -> Result<()> {
match command {
Command::RegisterWatcher {
watcher_id,
path,
mask,
event_tx,
} => {
let watcher = WatcherState {
id: watcher_id,
inotify_index: 0,
wd: WatchDecriptor(-1), // dummy (updated in `register_watcher()`)
path,
mask,
event_tx,
};
track!(self.register_watcher(watcher))?;
}
Command::DeregisterWatcher { watcher_id } => {
track!(self.deregister_watcher(watcher_id))?;
}
}
Ok(())
}
fn register_watcher(&mut self, mut watcher: WatcherState) -> Result<()> {
track_assert!(!self.watchers.contains_key(&watcher.id), ErrorKind::Other);
let is_succeeded = track!(self.add_watch(&mut watcher))?;
if is_succeeded {
self.watchers.insert(watcher.id, watcher);
}
Ok(())
}
fn deregister_watcher(&mut self, watcher_id: WatcherId) -> Result<()> {
if let Some(watcher) = self.watchers.remove(&watcher_id) {
let mut i = watcher.inotify_index;
track!(self.inotifies[i].inotify.remove_watch(watcher.wd))?;
track_assert_some!(self.inotifies[i].wds.remove(&watcher.wd), ErrorKind::Other);
while i + 1 == self.inotifies.len() && self.inotifies[i].wds.is_empty() {
self.inotifies.pop();
i -= 1;
}
}
Ok(())
}
fn add_watch(&mut self, watcher: &mut WatcherState) -> Result<bool> {
let i = watcher.inotify_index;
if i == self.inotifies.len() {
self.inotifies.push(track!(InotifyState::new())?);
}
let mut mask = watcher.mask;
mask.remove(WatchMask::MASK_ADD);
let result = track!(self.inotifies[i].inotify.add_watch(&watcher.path, mask));
let wd = match result {
Err(e) => {
let _ = watcher.event_tx.send(Err(e));
return Ok(false);
}
Ok(wd) => wd,
};
if let Some(overwritten_id) = self.inotifies[i].wds.insert(wd, watcher.id) {
let mut overwritten_watcher =
track_assert_some!(self.watchers.remove(&overwritten_id), ErrorKind::Other);
overwritten_watcher.inotify_index = i + 1;
track!(self.add_watch(&mut overwritten_watcher))?;
self.watchers
.insert(overwritten_watcher.id, overwritten_watcher);
}
watcher.wd = wd;
let event = if i == 0 {
WatcherEvent::StartWatching
} else {
WatcherEvent::RestartWatching
};
let _ = watcher.event_tx.send(Ok(event));
Ok(true)
}
}
impl Future for InotifyService {
type Item = ();
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
while let Async::Ready(Some(command)) = self.command_rx.poll().expect("Never fails") {
track!(self.handle_command(command))?;
}
for inotify in &mut self.inotifies {
while let Async::Ready(Some(event)) = track!(inotify.inotify.poll())? {
if let Some(watcher_id) = inotify.wds.get(&event.wd) {
let _ = self.watchers[watcher_id]
.event_tx
.send(Ok(WatcherEvent::Notified(event)));
}
}
}
Ok(Async::NotReady)
}
}
impl Default for InotifyService {
fn default() -> Self {
Self::new()
}
}
/// Handle of `InotifyService`.
#[derive(Debug, Clone)]
pub struct InotifyServiceHandle {
command_tx: mpsc::Sender<Command>,
watcher_id: Arc<AtomicUsize>,
}
impl InotifyServiceHandle {
/// Makes a new `Watcher` that watches `path` with the given mask.
///
/// If the inode indicated by the path has already been watched by other watchers,
/// the one of existing watcher will be got kicked out and the new one will be added instead.
/// After that the service will create new inotify instance (i.e., file descriptor) and
/// re-add the victim watcher to it.
/// In that case the re-added watcher will receive the event `WatcherEvent::RestartWatching`.
pub fn watch<P: AsRef<Path>>(&self, path: P, mask: WatchMask) -> Watcher {
let watcher_id = self.watcher_id.fetch_add(1, Ordering::SeqCst);
let (event_tx, event_rx) = mpsc::channel();
let command = Command::RegisterWatcher {
watcher_id,
path: path.as_ref().to_path_buf(),
mask,
event_tx,
};
let _ = self.command_tx.send(command);
Watcher::new(watcher_id, self.clone(), event_rx)
}
pub(crate) fn deregister_watcher(&self, watcher_id: WatcherId) {
let command = Command::DeregisterWatcher { watcher_id };
let _ = self.command_tx.send(command);
}
}
#[derive(Debug)]
enum Command {
RegisterWatcher {
watcher_id: WatcherId,
path: PathBuf,
mask: WatchMask,
event_tx: mpsc::Sender<Result<WatcherEvent>>,
},
DeregisterWatcher {
watcher_id: WatcherId,
},
}
#[derive(Debug)]
struct WatcherState {
id: WatcherId,
inotify_index: usize,
wd: WatchDecriptor,
path: PathBuf,
mask: WatchMask,
event_tx: mpsc::Sender<Result<WatcherEvent>>,
}
#[derive(Debug)]
struct InotifyState {
inotify: Inotify,
wds: HashMap<WatchDecriptor, WatcherId>,
}
impl InotifyState {
fn new() -> Result<Self> {
Ok(InotifyState {
inotify: track!(Inotify::new())?,
wds: HashMap::new(),
})
}
}
|
fn main() {
let _four = IpAddrKind::V4;
let _six = IpAddrKind::V6;
route(IpAddrKind::V4);
route(IpAddrKind::V6);
let _loopback = IpAddrS {
kind: IpAddrKind::V6,
address: String::from("::1"),
};
let _home = IpAddr::V4(String::from("127.0.0.1"));
let _home_e = IpAddrEnhanced::V4(127, 0, 0, 1);
let m = Message::Write(String::from("hello"));
m.call();
let _y: Option<String> = None;
let _z = Some(5);
}
enum IpAddrKind {
V4,
V6,
}
struct IpAddrS {
kind: IpAddrKind,
address: String,
}
enum IpAddr {
V4(String),
V6(String),
}
enum IpAddrEnhanced {
V4(u8, u8, u8, u8),
V6(String),
}
fn route(_ip_type: IpAddrKind) {
}
enum Message {
Quit,
Move { x: i32, y: i32},
Write(String),
ChangeColor(i32, i32, i32),
}
impl Message {
fn call(&self) {
// do something
}
}
|
use alloc::alloc::Layout;
use core::iter::{ExactSizeIterator, Iterator};
use core::marker::PhantomData;
use core::mem;
use core::ptr;
use core::slice;
use core::sync::atomic;
use core::usize;
use super::{Arc, ArcInner};
/// Structure to allow Arc-managing some fixed-sized data and a variably-sized
/// slice in a single allocation.
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd)]
#[repr(C)]
pub struct HeaderSlice<H, T: ?Sized> {
/// The fixed-sized data.
pub header: H,
/// The dynamically-sized data.
pub slice: T,
}
impl<H, T> Arc<HeaderSlice<H, [T]>> {
/// Creates an Arc for a HeaderSlice using the given header struct and
/// iterator to generate the slice. The resulting Arc will be fat.
pub fn from_header_and_iter<I>(header: H, mut items: I) -> Self
where
I: Iterator<Item = T> + ExactSizeIterator,
{
assert_ne!(mem::size_of::<T>(), 0, "Need to think about ZST");
let num_items = items.len();
// Offset of the start of the slice in the allocation.
let inner_to_data_offset = offset_of!(ArcInner<HeaderSlice<H, [T; 0]>>, data);
let data_to_slice_offset = offset_of!(HeaderSlice<H, [T; 0]>, slice);
let slice_offset = inner_to_data_offset + data_to_slice_offset;
// Compute the size of the real payload.
let slice_size = mem::size_of::<T>()
.checked_mul(num_items)
.expect("size overflows");
let usable_size = slice_offset
.checked_add(slice_size)
.expect("size overflows");
// Round up size to alignment.
let align = mem::align_of::<ArcInner<HeaderSlice<H, [T; 0]>>>();
let size = usable_size.wrapping_add(align - 1) & !(align - 1);
assert!(size >= usable_size, "size overflows");
let layout = Layout::from_size_align(size, align).expect("invalid layout");
let ptr: *mut ArcInner<HeaderSlice<H, [T]>>;
unsafe {
let buffer = alloc::alloc::alloc(layout);
if buffer.is_null() {
alloc::alloc::handle_alloc_error(layout);
}
// Synthesize the fat pointer. We do this by claiming we have a direct
// pointer to a [T], and then changing the type of the borrow. The key
// point here is that the length portion of the fat pointer applies
// only to the number of elements in the dynamically-sized portion of
// the type, so the value will be the same whether it points to a [T]
// or something else with a [T] as its last member.
let fake_slice: &mut [T] = slice::from_raw_parts_mut(buffer as *mut T, num_items);
ptr = fake_slice as *mut [T] as *mut ArcInner<HeaderSlice<H, [T]>>;
let count = atomic::AtomicUsize::new(1);
// Write the data.
//
// Note that any panics here (i.e. from the iterator) are safe, since
// we'll just leak the uninitialized memory.
ptr::write(&mut ((*ptr).count), count);
ptr::write(&mut ((*ptr).data.header), header);
if num_items != 0 {
let mut current = (*ptr).data.slice.as_mut_ptr();
debug_assert_eq!(current as usize - buffer as usize, slice_offset);
for _ in 0..num_items {
ptr::write(
current,
items
.next()
.expect("ExactSizeIterator over-reported length"),
);
current = current.offset(1);
}
assert!(
items.next().is_none(),
"ExactSizeIterator under-reported length"
);
// We should have consumed the buffer exactly.
debug_assert_eq!(current as *mut u8, buffer.add(usable_size));
}
assert!(
items.next().is_none(),
"ExactSizeIterator under-reported length"
);
}
// Return the fat Arc.
assert_eq!(
mem::size_of::<Self>(),
mem::size_of::<usize>() * 2,
"The Arc will be fat"
);
unsafe {
Arc {
p: ptr::NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
}
/// Header data with an inline length. Consumers that use HeaderWithLength as the
/// Header type in HeaderSlice can take advantage of ThinArc.
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd)]
#[repr(C)]
pub struct HeaderWithLength<H> {
/// The fixed-sized data.
pub header: H,
/// The slice length.
pub length: usize,
}
impl<H> HeaderWithLength<H> {
/// Creates a new HeaderWithLength.
#[inline]
pub fn new(header: H, length: usize) -> Self {
HeaderWithLength { header, length }
}
}
pub(crate) type HeaderSliceWithLength<H, T> = HeaderSlice<HeaderWithLength<H>, T>;
|
use libc::{c_int,c_uchar,c_ulonglong};
use std::rand::Rng;
use serialize::{Encodable,Encoder,Decodable,Decoder};
use std::slice::MutableCloneableVector;
use super::randombytes::rng;
pub static KEYBYTES: uint = 32;
pub static NONCEBYTES: uint = 24;
pub static ZEROBYTES: uint = 32;
pub static BOXZEROBYTES: uint = 16;
#[link(name="tweetnacl", kind="static")]
extern {
fn crypto_secretbox_xsalsa20poly1305_tweet(c: *mut c_uchar, m: *const c_uchar, d: c_ulonglong, n: *const c_uchar, k: *const c_uchar) -> c_int;
fn crypto_secretbox_xsalsa20poly1305_tweet_open(m: *mut c_uchar, c: *const c_uchar, d: c_ulonglong, n: *const c_uchar, k: *const c_uchar) -> c_int;
}
pub fn generate_nonce() -> [u8, ..NONCEBYTES] {
let mut nonce = [0, ..NONCEBYTES];
rng().fill_bytes(nonce.as_mut_slice());
nonce
}
pub struct Key {
key: [u8, ..KEYBYTES]
}
impl Clone for Key {
fn clone(&self) -> Key {
Key::from(self.key)
}
}
impl Key {
pub fn generate() -> Key {
Key {
key: {
let mut key = [0, ..KEYBYTES];
rng().fill_bytes(key.as_mut_slice());
key
}
}
}
pub fn from(key: &[u8]) -> Key {
assert_eq!(key.len(), KEYBYTES);
Key {
key: {
let mut k = [0, ..KEYBYTES];
k.copy_from(key);
k
}
}
}
pub fn encrypt(&self, msg: &[u8], nonce: &[u8, ..NONCEBYTES]) -> Vec<u8> {
let plaintext = {
let mut vec = Vec::from_elem(ZEROBYTES+msg.len(), 0u8);
vec.mut_slice_from(ZEROBYTES).copy_from(msg.as_slice());
vec
};
let mut ciphertext = Vec::from_elem(plaintext.len(), 0u8);
unsafe {
assert_eq!(crypto_secretbox_xsalsa20poly1305_tweet(ciphertext.as_mut_ptr(), plaintext.as_ptr(), plaintext.len() as c_ulonglong, nonce.as_ptr(), self.key.as_ptr()), 0);
}
ciphertext.slice_from(BOXZEROBYTES).to_owned()
}
pub fn decrypt(&self, nonce: &[u8, ..NONCEBYTES], msg: &[u8]) -> Option<Vec<u8>> {
let ciphertext = {
let mut vec = Vec::from_elem(BOXZEROBYTES+msg.len(), 0u8);
vec.mut_slice_from(BOXZEROBYTES).copy_from(msg.as_slice());
vec
};
let mut plaintext = Vec::from_elem(ciphertext.len(), 0u8);
unsafe {
if crypto_secretbox_xsalsa20poly1305_tweet_open(plaintext.as_mut_ptr(), ciphertext.as_ptr(), ciphertext.len() as c_ulonglong, nonce.as_ptr(), self.key.as_ptr()) == 0 {
Some(plaintext.slice_from(ZEROBYTES).to_owned())
} else {
None
}
}
}
}
impl <S: Encoder<E>, E> Encodable<S, E> for Key {
fn encode(&self, s: &mut S) -> Result<(), E> {
self.key.encode(s)
}
}
impl <D: Decoder<E>, E> Decodable<D, E> for Key {
fn decode(d: &mut D) -> Result<Key, E> {
let key: Vec<u8> = try!(Decodable::decode(d));
assert_eq!(key.len(), KEYBYTES);
let mut ret = Key { key: [0, ..KEYBYTES] };
ret.key.copy_from(key.as_slice());
return Ok(ret);
}
}
impl Drop for Key {
fn drop(&mut self) {
unsafe {
::std::intrinsics::volatile_set_memory(self.key.as_mut_ptr(), 0, KEYBYTES);
}
}
}
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn parse_http_generic_error(
response: &http::Response<bytes::Bytes>,
) -> Result<smithy_types::Error, smithy_json::deserialize::Error> {
crate::json_errors::parse_generic_error(response.body(), response.headers())
}
pub fn deser_structure_internal_server_exceptionjson_err(
input: &[u8],
mut builder: crate::error::internal_server_exception::Builder,
) -> Result<crate::error::internal_server_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_invalid_request_exceptionjson_err(
input: &[u8],
mut builder: crate::error::invalid_request_exception::Builder,
) -> Result<crate::error::invalid_request_exception::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ErrorCode" => {
builder = builder.set_error_code(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_add_instance_fleet(
input: &[u8],
mut builder: crate::output::add_instance_fleet_output::Builder,
) -> Result<crate::output::add_instance_fleet_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ClusterId" => {
builder = builder.set_cluster_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceFleetId" => {
builder = builder.set_instance_fleet_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ClusterArn" => {
builder = builder.set_cluster_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_structure_internal_server_errorjson_err(
input: &[u8],
mut builder: crate::error::internal_server_error::Builder,
) -> Result<crate::error::internal_server_error::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_add_instance_groups(
input: &[u8],
mut builder: crate::output::add_instance_groups_output::Builder,
) -> Result<crate::output::add_instance_groups_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"JobFlowId" => {
builder = builder.set_job_flow_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceGroupIds" => {
builder = builder.set_instance_group_ids(
crate::json_deser::deser_list_instance_group_ids_list(tokens)?,
);
}
"ClusterArn" => {
builder = builder.set_cluster_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_add_job_flow_steps(
input: &[u8],
mut builder: crate::output::add_job_flow_steps_output::Builder,
) -> Result<crate::output::add_job_flow_steps_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StepIds" => {
builder = builder
.set_step_ids(crate::json_deser::deser_list_step_ids_list(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_cancel_steps(
input: &[u8],
mut builder: crate::output::cancel_steps_output::Builder,
) -> Result<crate::output::cancel_steps_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CancelStepsInfoList" => {
builder = builder.set_cancel_steps_info_list(
crate::json_deser::deser_list_cancel_steps_info_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_create_security_configuration(
input: &[u8],
mut builder: crate::output::create_security_configuration_output::Builder,
) -> Result<
crate::output::create_security_configuration_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_create_studio(
input: &[u8],
mut builder: crate::output::create_studio_output::Builder,
) -> Result<crate::output::create_studio_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StudioId" => {
builder = builder.set_studio_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Url" => {
builder = builder.set_url(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_cluster(
input: &[u8],
mut builder: crate::output::describe_cluster_output::Builder,
) -> Result<crate::output::describe_cluster_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Cluster" => {
builder = builder
.set_cluster(crate::json_deser::deser_structure_cluster(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_job_flows(
input: &[u8],
mut builder: crate::output::describe_job_flows_output::Builder,
) -> Result<crate::output::describe_job_flows_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"JobFlows" => {
builder = builder.set_job_flows(
crate::json_deser::deser_list_job_flow_detail_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_notebook_execution(
input: &[u8],
mut builder: crate::output::describe_notebook_execution_output::Builder,
) -> Result<
crate::output::describe_notebook_execution_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"NotebookExecution" => {
builder = builder.set_notebook_execution(
crate::json_deser::deser_structure_notebook_execution(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_release_label(
input: &[u8],
mut builder: crate::output::describe_release_label_output::Builder,
) -> Result<crate::output::describe_release_label_output::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ReleaseLabel" => {
builder = builder.set_release_label(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Applications" => {
builder = builder.set_applications(
crate::json_deser::deser_list_simplified_application_list(tokens)?,
);
}
"NextToken" => {
builder = builder.set_next_token(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_security_configuration(
input: &[u8],
mut builder: crate::output::describe_security_configuration_output::Builder,
) -> Result<
crate::output::describe_security_configuration_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SecurityConfiguration" => {
builder = builder.set_security_configuration(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_step(
input: &[u8],
mut builder: crate::output::describe_step_output::Builder,
) -> Result<crate::output::describe_step_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Step" => {
builder =
builder.set_step(crate::json_deser::deser_structure_step(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_describe_studio(
input: &[u8],
mut builder: crate::output::describe_studio_output::Builder,
) -> Result<crate::output::describe_studio_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Studio" => {
builder =
builder.set_studio(crate::json_deser::deser_structure_studio(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_auto_termination_policy(
input: &[u8],
mut builder: crate::output::get_auto_termination_policy_output::Builder,
) -> Result<
crate::output::get_auto_termination_policy_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"AutoTerminationPolicy" => {
builder = builder.set_auto_termination_policy(
crate::json_deser::deser_structure_auto_termination_policy(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_block_public_access_configuration(
input: &[u8],
mut builder: crate::output::get_block_public_access_configuration_output::Builder,
) -> Result<
crate::output::get_block_public_access_configuration_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"BlockPublicAccessConfiguration" => {
builder = builder.set_block_public_access_configuration(
crate::json_deser::deser_structure_block_public_access_configuration(
tokens,
)?,
);
}
"BlockPublicAccessConfigurationMetadata" => {
builder = builder.set_block_public_access_configuration_metadata(
crate::json_deser::deser_structure_block_public_access_configuration_metadata(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_managed_scaling_policy(
input: &[u8],
mut builder: crate::output::get_managed_scaling_policy_output::Builder,
) -> Result<
crate::output::get_managed_scaling_policy_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ManagedScalingPolicy" => {
builder = builder.set_managed_scaling_policy(
crate::json_deser::deser_structure_managed_scaling_policy(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_get_studio_session_mapping(
input: &[u8],
mut builder: crate::output::get_studio_session_mapping_output::Builder,
) -> Result<
crate::output::get_studio_session_mapping_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"SessionMapping" => {
builder = builder.set_session_mapping(
crate::json_deser::deser_structure_session_mapping_detail(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_bootstrap_actions(
input: &[u8],
mut builder: crate::output::list_bootstrap_actions_output::Builder,
) -> Result<crate::output::list_bootstrap_actions_output::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"BootstrapActions" => {
builder = builder.set_bootstrap_actions(
crate::json_deser::deser_list_command_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_clusters(
input: &[u8],
mut builder: crate::output::list_clusters_output::Builder,
) -> Result<crate::output::list_clusters_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Clusters" => {
builder = builder.set_clusters(
crate::json_deser::deser_list_cluster_summary_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_instance_fleets(
input: &[u8],
mut builder: crate::output::list_instance_fleets_output::Builder,
) -> Result<crate::output::list_instance_fleets_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"InstanceFleets" => {
builder = builder.set_instance_fleets(
crate::json_deser::deser_list_instance_fleet_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_instance_groups(
input: &[u8],
mut builder: crate::output::list_instance_groups_output::Builder,
) -> Result<crate::output::list_instance_groups_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"InstanceGroups" => {
builder = builder.set_instance_groups(
crate::json_deser::deser_list_instance_group_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_instances(
input: &[u8],
mut builder: crate::output::list_instances_output::Builder,
) -> Result<crate::output::list_instances_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Instances" => {
builder = builder
.set_instances(crate::json_deser::deser_list_instance_list(tokens)?);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_notebook_executions(
input: &[u8],
mut builder: crate::output::list_notebook_executions_output::Builder,
) -> Result<crate::output::list_notebook_executions_output::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"NotebookExecutions" => {
builder = builder.set_notebook_executions(
crate::json_deser::deser_list_notebook_execution_summary_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_release_labels(
input: &[u8],
mut builder: crate::output::list_release_labels_output::Builder,
) -> Result<crate::output::list_release_labels_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ReleaseLabels" => {
builder = builder
.set_release_labels(crate::json_deser::deser_list_string_list(tokens)?);
}
"NextToken" => {
builder = builder.set_next_token(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_security_configurations(
input: &[u8],
mut builder: crate::output::list_security_configurations_output::Builder,
) -> Result<
crate::output::list_security_configurations_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"SecurityConfigurations" => {
builder = builder.set_security_configurations(
crate::json_deser::deser_list_security_configuration_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_steps(
input: &[u8],
mut builder: crate::output::list_steps_output::Builder,
) -> Result<crate::output::list_steps_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Steps" => {
builder = builder
.set_steps(crate::json_deser::deser_list_step_summary_list(tokens)?);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_studios(
input: &[u8],
mut builder: crate::output::list_studios_output::Builder,
) -> Result<crate::output::list_studios_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Studios" => {
builder = builder.set_studios(
crate::json_deser::deser_list_studio_summary_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_list_studio_session_mappings(
input: &[u8],
mut builder: crate::output::list_studio_session_mappings_output::Builder,
) -> Result<
crate::output::list_studio_session_mappings_output::Builder,
smithy_json::deserialize::Error,
> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"SessionMappings" => {
builder = builder.set_session_mappings(
crate::json_deser::deser_list_session_mapping_summary_list(tokens)?,
);
}
"Marker" => {
builder = builder.set_marker(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_modify_cluster(
input: &[u8],
mut builder: crate::output::modify_cluster_output::Builder,
) -> Result<crate::output::modify_cluster_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StepConcurrencyLevel" => {
builder = builder.set_step_concurrency_level(
smithy_json::deserialize::token::expect_number_or_null(tokens.next())?
.map(|v| v.to_i32()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_put_auto_scaling_policy(
input: &[u8],
mut builder: crate::output::put_auto_scaling_policy_output::Builder,
) -> Result<crate::output::put_auto_scaling_policy_output::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ClusterId" => {
builder = builder.set_cluster_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceGroupId" => {
builder = builder.set_instance_group_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AutoScalingPolicy" => {
builder = builder.set_auto_scaling_policy(
crate::json_deser::deser_structure_auto_scaling_policy_description(
tokens,
)?,
);
}
"ClusterArn" => {
builder = builder.set_cluster_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_run_job_flow(
input: &[u8],
mut builder: crate::output::run_job_flow_output::Builder,
) -> Result<crate::output::run_job_flow_output::Builder, smithy_json::deserialize::Error> {
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"JobFlowId" => {
builder = builder.set_job_flow_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ClusterArn" => {
builder = builder.set_cluster_arn(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn deser_operation_start_notebook_execution(
input: &[u8],
mut builder: crate::output::start_notebook_execution_output::Builder,
) -> Result<crate::output::start_notebook_execution_output::Builder, smithy_json::deserialize::Error>
{
let mut tokens_owned =
smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input))
.peekable();
let tokens = &mut tokens_owned;
smithy_json::deserialize::token::expect_start_object(tokens.next())?;
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"NotebookExecutionId" => {
builder = builder.set_notebook_execution_id(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
if tokens.next().is_some() {
return Err(smithy_json::deserialize::Error::custom(
"found more JSON tokens after completing parsing",
));
}
Ok(builder)
}
pub fn or_empty_doc(data: &[u8]) -> &[u8] {
if data.is_empty() {
b"{}"
} else {
data
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_instance_group_ids_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_step_ids_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_cancel_steps_info_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::CancelStepsInfo>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_cancel_steps_info(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_cluster<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Cluster>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Cluster::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_cluster_status(tokens)?,
);
}
"Ec2InstanceAttributes" => {
builder = builder.set_ec2_instance_attributes(
crate::json_deser::deser_structure_ec2_instance_attributes(
tokens,
)?,
);
}
"InstanceCollectionType" => {
builder = builder.set_instance_collection_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceCollectionType::from(u.as_ref())
})
})
.transpose()?,
);
}
"LogUri" => {
builder = builder.set_log_uri(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LogEncryptionKmsKeyId" => {
builder = builder.set_log_encryption_kms_key_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"RequestedAmiVersion" => {
builder = builder.set_requested_ami_version(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"RunningAmiVersion" => {
builder = builder.set_running_ami_version(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ReleaseLabel" => {
builder = builder.set_release_label(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AutoTerminate" => {
builder = builder.set_auto_terminate(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"TerminationProtected" => {
builder = builder.set_termination_protected(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"VisibleToAllUsers" => {
builder = builder.set_visible_to_all_users(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"Applications" => {
builder = builder.set_applications(
crate::json_deser::deser_list_application_list(tokens)?,
);
}
"Tags" => {
builder = builder
.set_tags(crate::json_deser::deser_list_tag_list(tokens)?);
}
"ServiceRole" => {
builder = builder.set_service_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"NormalizedInstanceHours" => {
builder = builder.set_normalized_instance_hours(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"MasterPublicDnsName" => {
builder = builder.set_master_public_dns_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Configurations" => {
builder = builder.set_configurations(
crate::json_deser::deser_list_configuration_list(tokens)?,
);
}
"SecurityConfiguration" => {
builder = builder.set_security_configuration(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AutoScalingRole" => {
builder = builder.set_auto_scaling_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ScaleDownBehavior" => {
builder = builder.set_scale_down_behavior(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ScaleDownBehavior::from(u.as_ref())
})
})
.transpose()?,
);
}
"CustomAmiId" => {
builder = builder.set_custom_ami_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EbsRootVolumeSize" => {
builder = builder.set_ebs_root_volume_size(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"RepoUpgradeOnBoot" => {
builder = builder.set_repo_upgrade_on_boot(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::RepoUpgradeOnBoot::from(u.as_ref())
})
})
.transpose()?,
);
}
"KerberosAttributes" => {
builder = builder.set_kerberos_attributes(
crate::json_deser::deser_structure_kerberos_attributes(tokens)?,
);
}
"ClusterArn" => {
builder = builder.set_cluster_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"OutpostArn" => {
builder = builder.set_outpost_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"StepConcurrencyLevel" => {
builder = builder.set_step_concurrency_level(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"PlacementGroups" => {
builder = builder.set_placement_groups(
crate::json_deser::deser_list_placement_group_config_list(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_job_flow_detail_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::JobFlowDetail>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_job_flow_detail(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_notebook_execution<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::NotebookExecution>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::NotebookExecution::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"NotebookExecutionId" => {
builder = builder.set_notebook_execution_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EditorId" => {
builder = builder.set_editor_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ExecutionEngine" => {
builder = builder.set_execution_engine(
crate::json_deser::deser_structure_execution_engine_config(
tokens,
)?,
);
}
"NotebookExecutionName" => {
builder = builder.set_notebook_execution_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"NotebookParams" => {
builder = builder.set_notebook_params(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::NotebookExecutionStatus::from(u.as_ref())
})
})
.transpose()?,
);
}
"StartTime" => {
builder = builder.set_start_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndTime" => {
builder = builder.set_end_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"Arn" => {
builder = builder.set_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"OutputNotebookURI" => {
builder = builder.set_output_notebook_uri(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LastStateChangeReason" => {
builder = builder.set_last_state_change_reason(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"NotebookInstanceSecurityGroupId" => {
builder = builder.set_notebook_instance_security_group_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Tags" => {
builder = builder
.set_tags(crate::json_deser::deser_list_tag_list(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_simplified_application_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::SimplifiedApplication>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_simplified_application(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_step<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Step>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Step::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Config" => {
builder = builder.set_config(
crate::json_deser::deser_structure_hadoop_step_config(tokens)?,
);
}
"ActionOnFailure" => {
builder = builder.set_action_on_failure(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ActionOnFailure::from(u.as_ref())
})
})
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_step_status(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_studio<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Studio>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Studio::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StudioId" => {
builder = builder.set_studio_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"StudioArn" => {
builder = builder.set_studio_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Description" => {
builder = builder.set_description(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AuthMode" => {
builder = builder.set_auth_mode(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::AuthMode::from(u.as_ref()))
})
.transpose()?,
);
}
"VpcId" => {
builder = builder.set_vpc_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SubnetIds" => {
builder = builder.set_subnet_ids(
crate::json_deser::deser_list_subnet_id_list(tokens)?,
);
}
"ServiceRole" => {
builder = builder.set_service_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"UserRole" => {
builder = builder.set_user_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"WorkspaceSecurityGroupId" => {
builder = builder.set_workspace_security_group_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EngineSecurityGroupId" => {
builder = builder.set_engine_security_group_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Url" => {
builder = builder.set_url(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationTime" => {
builder = builder.set_creation_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"DefaultS3Location" => {
builder = builder.set_default_s3_location(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Tags" => {
builder = builder
.set_tags(crate::json_deser::deser_list_tag_list(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_auto_termination_policy<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::AutoTerminationPolicy>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::AutoTerminationPolicy::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"IdleTimeout" => {
builder = builder.set_idle_timeout(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_block_public_access_configuration<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::BlockPublicAccessConfiguration>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::BlockPublicAccessConfiguration::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"BlockPublicSecurityGroupRules" => {
builder = builder.set_block_public_security_group_rules(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"PermittedPublicSecurityGroupRuleRanges" => {
builder = builder.set_permitted_public_security_group_rule_ranges(
crate::json_deser::deser_list_port_ranges(tokens)?,
);
}
"Classification" => {
builder = builder.set_classification(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Configurations" => {
builder = builder.set_configurations(
crate::json_deser::deser_list_configuration_list(tokens)?,
);
}
"Properties" => {
builder = builder.set_properties(
crate::json_deser::deser_map_string_map(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_block_public_access_configuration_metadata<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<crate::model::BlockPublicAccessConfigurationMetadata>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::BlockPublicAccessConfigurationMetadata::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"CreatedByArn" => {
builder = builder.set_created_by_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_managed_scaling_policy<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ManagedScalingPolicy>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ManagedScalingPolicy::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ComputeLimits" => {
builder = builder.set_compute_limits(
crate::json_deser::deser_structure_compute_limits(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_session_mapping_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::SessionMappingDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::SessionMappingDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StudioId" => {
builder = builder.set_studio_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IdentityId" => {
builder = builder.set_identity_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IdentityName" => {
builder = builder.set_identity_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IdentityType" => {
builder = builder.set_identity_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::IdentityType::from(u.as_ref()))
})
.transpose()?,
);
}
"SessionPolicyArn" => {
builder = builder.set_session_policy_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationTime" => {
builder = builder.set_creation_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"LastModifiedTime" => {
builder = builder.set_last_modified_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_command_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Command>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_command(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_cluster_summary_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::ClusterSummary>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_cluster_summary(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_instance_fleet_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::InstanceFleet>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_instance_fleet(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_instance_group_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::InstanceGroup>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_instance_group(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_instance_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Instance>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_instance(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_notebook_execution_summary_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::NotebookExecutionSummary>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_notebook_execution_summary(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_string_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_security_configuration_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::SecurityConfigurationSummary>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_security_configuration_summary(
tokens,
)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_step_summary_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::StepSummary>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_step_summary(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_studio_summary_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::StudioSummary>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_studio_summary(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_session_mapping_summary_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::SessionMappingSummary>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_session_mapping_summary(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_auto_scaling_policy_description<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::AutoScalingPolicyDescription>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::AutoScalingPolicyDescription::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_auto_scaling_policy_status(
tokens,
)?,
);
}
"Constraints" => {
builder = builder.set_constraints(
crate::json_deser::deser_structure_scaling_constraints(tokens)?,
);
}
"Rules" => {
builder = builder.set_rules(
crate::json_deser::deser_list_scaling_rule_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_cancel_steps_info<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::CancelStepsInfo>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::CancelStepsInfo::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StepId" => {
builder = builder.set_step_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::CancelStepsRequestStatus::from(u.as_ref())
})
})
.transpose()?,
);
}
"Reason" => {
builder = builder.set_reason(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_cluster_status<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ClusterStatus>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ClusterStatus::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::ClusterState::from(u.as_ref()))
})
.transpose()?,
);
}
"StateChangeReason" => {
builder = builder.set_state_change_reason(
crate::json_deser::deser_structure_cluster_state_change_reason(
tokens,
)?,
);
}
"Timeline" => {
builder = builder.set_timeline(
crate::json_deser::deser_structure_cluster_timeline(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_ec2_instance_attributes<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Ec2InstanceAttributes>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Ec2InstanceAttributes::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Ec2KeyName" => {
builder = builder.set_ec2_key_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Ec2SubnetId" => {
builder = builder.set_ec2_subnet_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"RequestedEc2SubnetIds" => {
builder = builder.set_requested_ec2_subnet_ids(
crate::json_deser::deser_list_xml_string_max_len256_list(
tokens,
)?,
);
}
"Ec2AvailabilityZone" => {
builder = builder.set_ec2_availability_zone(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"RequestedEc2AvailabilityZones" => {
builder = builder.set_requested_ec2_availability_zones(
crate::json_deser::deser_list_xml_string_max_len256_list(
tokens,
)?,
);
}
"IamInstanceProfile" => {
builder = builder.set_iam_instance_profile(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EmrManagedMasterSecurityGroup" => {
builder = builder.set_emr_managed_master_security_group(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EmrManagedSlaveSecurityGroup" => {
builder = builder.set_emr_managed_slave_security_group(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ServiceAccessSecurityGroup" => {
builder = builder.set_service_access_security_group(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AdditionalMasterSecurityGroups" => {
builder = builder.set_additional_master_security_groups(
crate::json_deser::deser_list_string_list(tokens)?,
);
}
"AdditionalSlaveSecurityGroups" => {
builder = builder.set_additional_slave_security_groups(
crate::json_deser::deser_list_string_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_application_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Application>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_application(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_tag_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Tag>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_tag(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_configuration_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::Configuration>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_configuration(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_kerberos_attributes<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::KerberosAttributes>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::KerberosAttributes::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Realm" => {
builder = builder.set_realm(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"KdcAdminPassword" => {
builder = builder.set_kdc_admin_password(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CrossRealmTrustPrincipalPassword" => {
builder = builder.set_cross_realm_trust_principal_password(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ADDomainJoinUser" => {
builder = builder.set_ad_domain_join_user(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ADDomainJoinPassword" => {
builder = builder.set_ad_domain_join_password(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_placement_group_config_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::PlacementGroupConfig>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_placement_group_config(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_job_flow_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::JobFlowDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::JobFlowDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"JobFlowId" => {
builder = builder.set_job_flow_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LogUri" => {
builder = builder.set_log_uri(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LogEncryptionKmsKeyId" => {
builder = builder.set_log_encryption_kms_key_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AmiVersion" => {
builder = builder.set_ami_version(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ExecutionStatusDetail" => {
builder = builder.set_execution_status_detail(
crate::json_deser::deser_structure_job_flow_execution_status_detail(tokens)?
);
}
"Instances" => {
builder = builder.set_instances(
crate::json_deser::deser_structure_job_flow_instances_detail(
tokens,
)?,
);
}
"Steps" => {
builder = builder.set_steps(
crate::json_deser::deser_list_step_detail_list(tokens)?,
);
}
"BootstrapActions" => {
builder = builder.set_bootstrap_actions(
crate::json_deser::deser_list_bootstrap_action_detail_list(
tokens,
)?,
);
}
"SupportedProducts" => {
builder = builder.set_supported_products(
crate::json_deser::deser_list_supported_products_list(tokens)?,
);
}
"VisibleToAllUsers" => {
builder = builder.set_visible_to_all_users(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"JobFlowRole" => {
builder = builder.set_job_flow_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ServiceRole" => {
builder = builder.set_service_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AutoScalingRole" => {
builder = builder.set_auto_scaling_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ScaleDownBehavior" => {
builder = builder.set_scale_down_behavior(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ScaleDownBehavior::from(u.as_ref())
})
})
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_execution_engine_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ExecutionEngineConfig>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ExecutionEngineConfig::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Type" => {
builder = builder.set_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ExecutionEngineType::from(u.as_ref())
})
})
.transpose()?,
);
}
"MasterInstanceSecurityGroupId" => {
builder = builder.set_master_instance_security_group_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_simplified_application<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::SimplifiedApplication>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::SimplifiedApplication::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Version" => {
builder = builder.set_version(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_hadoop_step_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::HadoopStepConfig>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::HadoopStepConfig::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Jar" => {
builder = builder.set_jar(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Properties" => {
builder = builder.set_properties(
crate::json_deser::deser_map_string_map(tokens)?,
);
}
"MainClass" => {
builder = builder.set_main_class(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Args" => {
builder = builder
.set_args(crate::json_deser::deser_list_string_list(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_step_status<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StepStatus>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StepStatus::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::StepState::from(u.as_ref()))
})
.transpose()?,
);
}
"StateChangeReason" => {
builder = builder.set_state_change_reason(
crate::json_deser::deser_structure_step_state_change_reason(
tokens,
)?,
);
}
"FailureDetails" => {
builder = builder.set_failure_details(
crate::json_deser::deser_structure_failure_details(tokens)?,
);
}
"Timeline" => {
builder = builder.set_timeline(
crate::json_deser::deser_structure_step_timeline(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_subnet_id_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_port_ranges<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::PortRange>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_port_range(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_map_string_map<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::collections::HashMap<std::string::String, std::string::String>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
let mut map = std::collections::HashMap::new();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
let key = key.to_unescaped().map(|u| u.into_owned())?;
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
map.insert(key, value);
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(map))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_compute_limits<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ComputeLimits>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ComputeLimits::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"UnitType" => {
builder = builder.set_unit_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ComputeLimitsUnitType::from(u.as_ref())
})
})
.transpose()?,
);
}
"MinimumCapacityUnits" => {
builder = builder.set_minimum_capacity_units(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"MaximumCapacityUnits" => {
builder = builder.set_maximum_capacity_units(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"MaximumOnDemandCapacityUnits" => {
builder = builder.set_maximum_on_demand_capacity_units(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"MaximumCoreCapacityUnits" => {
builder = builder.set_maximum_core_capacity_units(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_command<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Command>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Command::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ScriptPath" => {
builder = builder.set_script_path(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Args" => {
builder = builder
.set_args(crate::json_deser::deser_list_string_list(tokens)?);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_cluster_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ClusterSummary>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ClusterSummary::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_cluster_status(tokens)?,
);
}
"NormalizedInstanceHours" => {
builder = builder.set_normalized_instance_hours(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"ClusterArn" => {
builder = builder.set_cluster_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"OutpostArn" => {
builder = builder.set_outpost_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_fleet<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceFleet>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceFleet::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_instance_fleet_status(
tokens,
)?,
);
}
"InstanceFleetType" => {
builder = builder.set_instance_fleet_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceFleetType::from(u.as_ref())
})
})
.transpose()?,
);
}
"TargetOnDemandCapacity" => {
builder = builder.set_target_on_demand_capacity(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"TargetSpotCapacity" => {
builder = builder.set_target_spot_capacity(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"ProvisionedOnDemandCapacity" => {
builder = builder.set_provisioned_on_demand_capacity(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"ProvisionedSpotCapacity" => {
builder = builder.set_provisioned_spot_capacity(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"InstanceTypeSpecifications" => {
builder = builder.set_instance_type_specifications(
crate::json_deser::deser_list_instance_type_specification_list(
tokens,
)?,
);
}
"LaunchSpecifications" => {
builder = builder.set_launch_specifications(
crate::json_deser::deser_structure_instance_fleet_provisioning_specifications(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_group<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceGroup>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceGroup::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Market" => {
builder = builder.set_market(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::MarketType::from(u.as_ref()))
})
.transpose()?,
);
}
"InstanceGroupType" => {
builder = builder.set_instance_group_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceGroupType::from(u.as_ref())
})
})
.transpose()?,
);
}
"BidPrice" => {
builder = builder.set_bid_price(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceType" => {
builder = builder.set_instance_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"RequestedInstanceCount" => {
builder = builder.set_requested_instance_count(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"RunningInstanceCount" => {
builder = builder.set_running_instance_count(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_instance_group_status(
tokens,
)?,
);
}
"Configurations" => {
builder = builder.set_configurations(
crate::json_deser::deser_list_configuration_list(tokens)?,
);
}
"ConfigurationsVersion" => {
builder = builder.set_configurations_version(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
"LastSuccessfullyAppliedConfigurations" => {
builder = builder.set_last_successfully_applied_configurations(
crate::json_deser::deser_list_configuration_list(tokens)?,
);
}
"LastSuccessfullyAppliedConfigurationsVersion" => {
builder = builder
.set_last_successfully_applied_configurations_version(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i64()),
);
}
"EbsBlockDevices" => {
builder = builder.set_ebs_block_devices(
crate::json_deser::deser_list_ebs_block_device_list(tokens)?,
);
}
"EbsOptimized" => {
builder = builder.set_ebs_optimized(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"ShrinkPolicy" => {
builder = builder.set_shrink_policy(
crate::json_deser::deser_structure_shrink_policy(tokens)?,
);
}
"AutoScalingPolicy" => {
builder = builder.set_auto_scaling_policy(
crate::json_deser::deser_structure_auto_scaling_policy_description(tokens)?
);
}
"CustomAmiId" => {
builder = builder.set_custom_ami_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Instance>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Instance::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Ec2InstanceId" => {
builder = builder.set_ec2_instance_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"PublicDnsName" => {
builder = builder.set_public_dns_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"PublicIpAddress" => {
builder = builder.set_public_ip_address(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"PrivateDnsName" => {
builder = builder.set_private_dns_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"PrivateIpAddress" => {
builder = builder.set_private_ip_address(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_instance_status(tokens)?,
);
}
"InstanceGroupId" => {
builder = builder.set_instance_group_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceFleetId" => {
builder = builder.set_instance_fleet_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Market" => {
builder = builder.set_market(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::MarketType::from(u.as_ref()))
})
.transpose()?,
);
}
"InstanceType" => {
builder = builder.set_instance_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EbsVolumes" => {
builder = builder.set_ebs_volumes(
crate::json_deser::deser_list_ebs_volume_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_notebook_execution_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::NotebookExecutionSummary>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::NotebookExecutionSummary::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"NotebookExecutionId" => {
builder = builder.set_notebook_execution_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"EditorId" => {
builder = builder.set_editor_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"NotebookExecutionName" => {
builder = builder.set_notebook_execution_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::NotebookExecutionStatus::from(u.as_ref())
})
})
.transpose()?,
);
}
"StartTime" => {
builder = builder.set_start_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndTime" => {
builder = builder.set_end_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_security_configuration_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::SecurityConfigurationSummary>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::SecurityConfigurationSummary::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_step_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StepSummary>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StepSummary::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Id" => {
builder = builder.set_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Config" => {
builder = builder.set_config(
crate::json_deser::deser_structure_hadoop_step_config(tokens)?,
);
}
"ActionOnFailure" => {
builder = builder.set_action_on_failure(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ActionOnFailure::from(u.as_ref())
})
})
.transpose()?,
);
}
"Status" => {
builder = builder.set_status(
crate::json_deser::deser_structure_step_status(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_studio_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StudioSummary>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StudioSummary::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StudioId" => {
builder = builder.set_studio_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"VpcId" => {
builder = builder.set_vpc_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Description" => {
builder = builder.set_description(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Url" => {
builder = builder.set_url(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationTime" => {
builder = builder.set_creation_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_session_mapping_summary<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::SessionMappingSummary>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::SessionMappingSummary::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StudioId" => {
builder = builder.set_studio_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IdentityId" => {
builder = builder.set_identity_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IdentityName" => {
builder = builder.set_identity_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"IdentityType" => {
builder = builder.set_identity_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::IdentityType::from(u.as_ref()))
})
.transpose()?,
);
}
"SessionPolicyArn" => {
builder = builder.set_session_policy_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationTime" => {
builder = builder.set_creation_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_auto_scaling_policy_status<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::AutoScalingPolicyStatus>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::AutoScalingPolicyStatus::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::AutoScalingPolicyState::from(u.as_ref())
})
})
.transpose()?,
);
}
"StateChangeReason" => {
builder = builder.set_state_change_reason(
crate::json_deser::deser_structure_auto_scaling_policy_state_change_reason(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_scaling_constraints<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ScalingConstraints>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ScalingConstraints::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"MinCapacity" => {
builder = builder.set_min_capacity(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"MaxCapacity" => {
builder = builder.set_max_capacity(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_scaling_rule_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::ScalingRule>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_scaling_rule(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_cluster_state_change_reason<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ClusterStateChangeReason>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ClusterStateChangeReason::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Code" => {
builder = builder.set_code(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ClusterStateChangeReasonCode::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_cluster_timeline<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ClusterTimeline>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ClusterTimeline::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ReadyDateTime" => {
builder = builder.set_ready_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_xml_string_max_len256_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_application<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Application>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Application::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Version" => {
builder = builder.set_version(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Args" => {
builder = builder
.set_args(crate::json_deser::deser_list_string_list(tokens)?);
}
"AdditionalInfo" => {
builder = builder.set_additional_info(
crate::json_deser::deser_map_string_map(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_tag<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Tag>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Tag::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Key" => {
builder = builder.set_key(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Value" => {
builder = builder.set_value(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_configuration<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::Configuration>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::Configuration::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Classification" => {
builder = builder.set_classification(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Configurations" => {
builder = builder.set_configurations(
crate::json_deser::deser_list_configuration_list(tokens)?,
);
}
"Properties" => {
builder = builder.set_properties(
crate::json_deser::deser_map_string_map(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_placement_group_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::PlacementGroupConfig>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::PlacementGroupConfig::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"InstanceRole" => {
builder = builder.set_instance_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceRoleType::from(u.as_ref())
})
})
.transpose()?,
);
}
"PlacementStrategy" => {
builder = builder.set_placement_strategy(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::PlacementGroupStrategy::from(u.as_ref())
})
})
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_job_flow_execution_status_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::JobFlowExecutionStatusDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::JobFlowExecutionStatusDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::JobFlowExecutionState::from(u.as_ref())
})
})
.transpose()?,
);
}
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"StartDateTime" => {
builder = builder.set_start_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ReadyDateTime" => {
builder = builder.set_ready_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"LastStateChangeReason" => {
builder = builder.set_last_state_change_reason(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_job_flow_instances_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::JobFlowInstancesDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::JobFlowInstancesDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"MasterInstanceType" => {
builder = builder.set_master_instance_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"MasterPublicDnsName" => {
builder = builder.set_master_public_dns_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"MasterInstanceId" => {
builder = builder.set_master_instance_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"SlaveInstanceType" => {
builder = builder.set_slave_instance_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceCount" => {
builder = builder.set_instance_count(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"InstanceGroups" => {
builder = builder.set_instance_groups(
crate::json_deser::deser_list_instance_group_detail_list(
tokens,
)?,
);
}
"NormalizedInstanceHours" => {
builder = builder.set_normalized_instance_hours(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"Ec2KeyName" => {
builder = builder.set_ec2_key_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Ec2SubnetId" => {
builder = builder.set_ec2_subnet_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Placement" => {
builder = builder.set_placement(
crate::json_deser::deser_structure_placement_type(tokens)?,
);
}
"KeepJobFlowAliveWhenNoSteps" => {
builder = builder.set_keep_job_flow_alive_when_no_steps(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"TerminationProtected" => {
builder = builder.set_termination_protected(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"HadoopVersion" => {
builder = builder.set_hadoop_version(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_step_detail_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::StepDetail>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_step_detail(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_bootstrap_action_detail_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::BootstrapActionDetail>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_bootstrap_action_detail(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_supported_products_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_step_state_change_reason<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StepStateChangeReason>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StepStateChangeReason::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Code" => {
builder = builder.set_code(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::StepStateChangeReasonCode::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_failure_details<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::FailureDetails>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::FailureDetails::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Reason" => {
builder = builder.set_reason(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"LogFile" => {
builder = builder.set_log_file(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_step_timeline<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StepTimeline>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StepTimeline::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"StartDateTime" => {
builder = builder.set_start_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_port_range<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::PortRange>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::PortRange::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"MinRange" => {
builder = builder.set_min_range(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"MaxRange" => {
builder = builder.set_max_range(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_fleet_status<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceFleetStatus>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceFleetStatus::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceFleetState::from(u.as_ref())
})
})
.transpose()?,
);
}
"StateChangeReason" => {
builder = builder.set_state_change_reason(
crate::json_deser::deser_structure_instance_fleet_state_change_reason(tokens)?
);
}
"Timeline" => {
builder = builder.set_timeline(
crate::json_deser::deser_structure_instance_fleet_timeline(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_instance_type_specification_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<std::vec::Vec<crate::model::InstanceTypeSpecification>>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_instance_type_specification(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_instance_fleet_provisioning_specifications<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<
Option<crate::model::InstanceFleetProvisioningSpecifications>,
smithy_json::deserialize::Error,
>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceFleetProvisioningSpecifications::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"SpotSpecification" => {
builder = builder.set_spot_specification(
crate::json_deser::deser_structure_spot_provisioning_specification(tokens)?
);
}
"OnDemandSpecification" => {
builder = builder.set_on_demand_specification(
crate::json_deser::deser_structure_on_demand_provisioning_specification(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_group_status<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceGroupStatus>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceGroupStatus::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceGroupState::from(u.as_ref())
})
})
.transpose()?,
);
}
"StateChangeReason" => {
builder = builder.set_state_change_reason(
crate::json_deser::deser_structure_instance_group_state_change_reason(tokens)?
);
}
"Timeline" => {
builder = builder.set_timeline(
crate::json_deser::deser_structure_instance_group_timeline(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_ebs_block_device_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::EbsBlockDevice>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_ebs_block_device(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_shrink_policy<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ShrinkPolicy>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ShrinkPolicy::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"DecommissionTimeout" => {
builder = builder.set_decommission_timeout(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"InstanceResizePolicy" => {
builder = builder.set_instance_resize_policy(
crate::json_deser::deser_structure_instance_resize_policy(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_status<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceStatus>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceStatus::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::InstanceState::from(u.as_ref()))
})
.transpose()?,
);
}
"StateChangeReason" => {
builder = builder.set_state_change_reason(
crate::json_deser::deser_structure_instance_state_change_reason(tokens)?
);
}
"Timeline" => {
builder = builder.set_timeline(
crate::json_deser::deser_structure_instance_timeline(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_ebs_volume_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::EbsVolume>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_ebs_volume(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_auto_scaling_policy_state_change_reason<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::AutoScalingPolicyStateChangeReason>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::AutoScalingPolicyStateChangeReason::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Code" => {
builder = builder.set_code(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?.map(|s|
s.to_unescaped().map(|u|
crate::model::AutoScalingPolicyStateChangeReasonCode::from(u.as_ref())
)
).transpose()?
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_scaling_rule<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ScalingRule>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ScalingRule::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Description" => {
builder = builder.set_description(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Action" => {
builder = builder.set_action(
crate::json_deser::deser_structure_scaling_action(tokens)?,
);
}
"Trigger" => {
builder = builder.set_trigger(
crate::json_deser::deser_structure_scaling_trigger(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_instance_group_detail_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::InstanceGroupDetail>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
crate::json_deser::deser_structure_instance_group_detail(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_placement_type<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::PlacementType>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::PlacementType::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"AvailabilityZone" => {
builder = builder.set_availability_zone(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"AvailabilityZones" => {
builder = builder.set_availability_zones(
crate::json_deser::deser_list_xml_string_max_len256_list(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_step_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StepDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StepDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"StepConfig" => {
builder = builder.set_step_config(
crate::json_deser::deser_structure_step_config(tokens)?,
);
}
"ExecutionStatusDetail" => {
builder = builder.set_execution_status_detail(
crate::json_deser::deser_structure_step_execution_status_detail(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_bootstrap_action_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::BootstrapActionDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::BootstrapActionDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"BootstrapActionConfig" => {
builder = builder.set_bootstrap_action_config(
crate::json_deser::deser_structure_bootstrap_action_config(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_fleet_state_change_reason<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceFleetStateChangeReason>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceFleetStateChangeReason::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Code" => {
builder = builder.set_code(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceFleetStateChangeReasonCode::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_fleet_timeline<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceFleetTimeline>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceFleetTimeline::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ReadyDateTime" => {
builder = builder.set_ready_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_type_specification<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceTypeSpecification>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceTypeSpecification::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"InstanceType" => {
builder = builder.set_instance_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"WeightedCapacity" => {
builder = builder.set_weighted_capacity(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"BidPrice" => {
builder = builder.set_bid_price(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"BidPriceAsPercentageOfOnDemandPrice" => {
builder = builder.set_bid_price_as_percentage_of_on_demand_price(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_f64()),
);
}
"Configurations" => {
builder = builder.set_configurations(
crate::json_deser::deser_list_configuration_list(tokens)?,
);
}
"EbsBlockDevices" => {
builder = builder.set_ebs_block_devices(
crate::json_deser::deser_list_ebs_block_device_list(tokens)?,
);
}
"EbsOptimized" => {
builder = builder.set_ebs_optimized(
smithy_json::deserialize::token::expect_bool_or_null(
tokens.next(),
)?,
);
}
"CustomAmiId" => {
builder = builder.set_custom_ami_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_spot_provisioning_specification<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::SpotProvisioningSpecification>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::SpotProvisioningSpecification::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"TimeoutDurationMinutes" => {
builder = builder.set_timeout_duration_minutes(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"TimeoutAction" => {
builder = builder.set_timeout_action(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::SpotProvisioningTimeoutAction::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
"BlockDurationMinutes" => {
builder = builder.set_block_duration_minutes(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"AllocationStrategy" => {
builder = builder.set_allocation_strategy(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::SpotProvisioningAllocationStrategy::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_on_demand_provisioning_specification<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::OnDemandProvisioningSpecification>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::OnDemandProvisioningSpecification::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"AllocationStrategy" => {
builder = builder.set_allocation_strategy(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?.map(|s|
s.to_unescaped().map(|u|
crate::model::OnDemandProvisioningAllocationStrategy::from(u.as_ref())
)
).transpose()?
);
}
"CapacityReservationOptions" => {
builder = builder.set_capacity_reservation_options(
crate::json_deser::deser_structure_on_demand_capacity_reservation_options(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_group_state_change_reason<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceGroupStateChangeReason>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceGroupStateChangeReason::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Code" => {
builder = builder.set_code(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceGroupStateChangeReasonCode::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_group_timeline<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceGroupTimeline>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceGroupTimeline::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ReadyDateTime" => {
builder = builder.set_ready_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_ebs_block_device<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::EbsBlockDevice>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::EbsBlockDevice::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"VolumeSpecification" => {
builder = builder.set_volume_specification(
crate::json_deser::deser_structure_volume_specification(
tokens,
)?,
);
}
"Device" => {
builder = builder.set_device(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_resize_policy<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceResizePolicy>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceResizePolicy::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"InstancesToTerminate" => {
builder = builder.set_instances_to_terminate(
crate::json_deser::deser_list_ec2_instance_ids_list(tokens)?,
);
}
"InstancesToProtect" => {
builder = builder.set_instances_to_protect(
crate::json_deser::deser_list_ec2_instance_ids_list(tokens)?,
);
}
"InstanceTerminationTimeout" => {
builder = builder.set_instance_termination_timeout(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_state_change_reason<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceStateChangeReason>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceStateChangeReason::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Code" => {
builder = builder.set_code(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceStateChangeReasonCode::from(
u.as_ref(),
)
})
})
.transpose()?,
);
}
"Message" => {
builder = builder.set_message(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_timeline<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceTimeline>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceTimeline::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ReadyDateTime" => {
builder = builder.set_ready_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_ebs_volume<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::EbsVolume>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::EbsVolume::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Device" => {
builder = builder.set_device(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"VolumeId" => {
builder = builder.set_volume_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_scaling_action<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ScalingAction>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ScalingAction::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Market" => {
builder = builder.set_market(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::MarketType::from(u.as_ref()))
})
.transpose()?,
);
}
"SimpleScalingPolicyConfiguration" => {
builder = builder.set_simple_scaling_policy_configuration(
crate::json_deser::deser_structure_simple_scaling_policy_configuration(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_scaling_trigger<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ScalingTrigger>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ScalingTrigger::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"CloudWatchAlarmDefinition" => {
builder = builder.set_cloud_watch_alarm_definition(
crate::json_deser::deser_structure_cloud_watch_alarm_definition(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_instance_group_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::InstanceGroupDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::InstanceGroupDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"InstanceGroupId" => {
builder = builder.set_instance_group_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Market" => {
builder = builder.set_market(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::MarketType::from(u.as_ref()))
})
.transpose()?,
);
}
"InstanceRole" => {
builder = builder.set_instance_role(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceRoleType::from(u.as_ref())
})
})
.transpose()?,
);
}
"BidPrice" => {
builder = builder.set_bid_price(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceType" => {
builder = builder.set_instance_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"InstanceRequestCount" => {
builder = builder.set_instance_request_count(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"InstanceRunningCount" => {
builder = builder.set_instance_running_count(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::InstanceGroupState::from(u.as_ref())
})
})
.transpose()?,
);
}
"LastStateChangeReason" => {
builder = builder.set_last_state_change_reason(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"StartDateTime" => {
builder = builder.set_start_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"ReadyDateTime" => {
builder = builder.set_ready_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"CustomAmiId" => {
builder = builder.set_custom_ami_id(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_step_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StepConfig>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StepConfig::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ActionOnFailure" => {
builder = builder.set_action_on_failure(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ActionOnFailure::from(u.as_ref())
})
})
.transpose()?,
);
}
"HadoopJarStep" => {
builder = builder.set_hadoop_jar_step(
crate::json_deser::deser_structure_hadoop_jar_step_config(
tokens,
)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_step_execution_status_detail<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::StepExecutionStatusDetail>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::StepExecutionStatusDetail::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"State" => {
builder = builder.set_state(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::StepExecutionState::from(u.as_ref())
})
})
.transpose()?,
);
}
"CreationDateTime" => {
builder = builder.set_creation_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"StartDateTime" => {
builder = builder.set_start_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"EndDateTime" => {
builder = builder.set_end_date_time(
smithy_json::deserialize::token::expect_timestamp_or_null(
tokens.next(),
smithy_types::instant::Format::EpochSeconds,
)?,
);
}
"LastStateChangeReason" => {
builder = builder.set_last_state_change_reason(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_bootstrap_action_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::BootstrapActionConfig>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::BootstrapActionConfig::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Name" => {
builder = builder.set_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"ScriptBootstrapAction" => {
builder = builder.set_script_bootstrap_action(
crate::json_deser::deser_structure_script_bootstrap_action_config(tokens)?
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_on_demand_capacity_reservation_options<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::OnDemandCapacityReservationOptions>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::OnDemandCapacityReservationOptions::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"UsageStrategy" => {
builder = builder.set_usage_strategy(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?.map(|s|
s.to_unescaped().map(|u|
crate::model::OnDemandCapacityReservationUsageStrategy::from(u.as_ref())
)
).transpose()?
);
}
"CapacityReservationPreference" => {
builder = builder.set_capacity_reservation_preference(
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?.map(|s|
s.to_unescaped().map(|u|
crate::model::OnDemandCapacityReservationPreference::from(u.as_ref())
)
).transpose()?
);
}
"CapacityReservationResourceGroupArn" => {
builder = builder.set_capacity_reservation_resource_group_arn(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_volume_specification<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::VolumeSpecification>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::VolumeSpecification::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"VolumeType" => {
builder = builder.set_volume_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Iops" => {
builder = builder.set_iops(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"SizeInGB" => {
builder = builder.set_size_in_gb(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_ec2_instance_ids_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_simple_scaling_policy_configuration<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::SimpleScalingPolicyConfiguration>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::SimpleScalingPolicyConfiguration::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"AdjustmentType" => {
builder = builder.set_adjustment_type(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::AdjustmentType::from(u.as_ref()))
})
.transpose()?,
);
}
"ScalingAdjustment" => {
builder = builder.set_scaling_adjustment(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"CoolDown" => {
builder = builder.set_cool_down(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_cloud_watch_alarm_definition<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::CloudWatchAlarmDefinition>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::CloudWatchAlarmDefinition::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"ComparisonOperator" => {
builder = builder.set_comparison_operator(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped().map(|u| {
crate::model::ComparisonOperator::from(u.as_ref())
})
})
.transpose()?,
);
}
"EvaluationPeriods" => {
builder = builder.set_evaluation_periods(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"MetricName" => {
builder = builder.set_metric_name(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Namespace" => {
builder = builder.set_namespace(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Period" => {
builder = builder.set_period(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_i32()),
);
}
"Statistic" => {
builder = builder.set_statistic(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::Statistic::from(u.as_ref()))
})
.transpose()?,
);
}
"Threshold" => {
builder = builder.set_threshold(
smithy_json::deserialize::token::expect_number_or_null(
tokens.next(),
)?
.map(|v| v.to_f64()),
);
}
"Unit" => {
builder = builder.set_unit(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| {
s.to_unescaped()
.map(|u| crate::model::Unit::from(u.as_ref()))
})
.transpose()?,
);
}
"Dimensions" => {
builder = builder.set_dimensions(
crate::json_deser::deser_list_metric_dimension_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_hadoop_jar_step_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::HadoopJarStepConfig>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::HadoopJarStepConfig::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Properties" => {
builder = builder.set_properties(
crate::json_deser::deser_list_key_value_list(tokens)?,
);
}
"Jar" => {
builder = builder.set_jar(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"MainClass" => {
builder = builder.set_main_class(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Args" => {
builder = builder.set_args(
crate::json_deser::deser_list_xml_string_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_script_bootstrap_action_config<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::ScriptBootstrapActionConfig>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::ScriptBootstrapActionConfig::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Path" => {
builder = builder.set_path(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Args" => {
builder = builder.set_args(
crate::json_deser::deser_list_xml_string_list(tokens)?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_metric_dimension_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::MetricDimension>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_metric_dimension(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_key_value_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<crate::model::KeyValue>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value = crate::json_deser::deser_structure_key_value(tokens)?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
#[allow(clippy::type_complexity, non_snake_case)]
pub fn deser_list_xml_string_list<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartArray { .. }) => {
let mut items = Vec::new();
loop {
match tokens.peek() {
Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => {
tokens.next().transpose().unwrap();
break;
}
_ => {
let value =
smithy_json::deserialize::token::expect_string_or_null(tokens.next())?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?;
if let Some(value) = value {
items.push(value);
}
}
}
}
Ok(Some(items))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start array or null",
)),
}
}
pub fn deser_structure_metric_dimension<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::MetricDimension>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::MetricDimension::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Key" => {
builder = builder.set_key(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Value" => {
builder = builder.set_value(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
pub fn deser_structure_key_value<'a, I>(
tokens: &mut std::iter::Peekable<I>,
) -> Result<Option<crate::model::KeyValue>, smithy_json::deserialize::Error>
where
I: Iterator<
Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>,
>,
{
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None),
Some(smithy_json::deserialize::Token::StartObject { .. }) => {
#[allow(unused_mut)]
let mut builder = crate::model::KeyValue::builder();
loop {
match tokens.next().transpose()? {
Some(smithy_json::deserialize::Token::EndObject { .. }) => break,
Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => {
match key.to_unescaped()?.as_ref() {
"Key" => {
builder = builder.set_key(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
"Value" => {
builder = builder.set_value(
smithy_json::deserialize::token::expect_string_or_null(
tokens.next(),
)?
.map(|s| s.to_unescaped().map(|u| u.into_owned()))
.transpose()?,
);
}
_ => smithy_json::deserialize::token::skip_value(tokens)?,
}
}
_ => {
return Err(smithy_json::deserialize::Error::custom(
"expected object key or end object",
))
}
}
}
Ok(Some(builder.build()))
}
_ => Err(smithy_json::deserialize::Error::custom(
"expected start object or null",
)),
}
}
|
use std::fmt;
use header;
use header::shared;
/// The `Accept-Encoding` header
///
/// The `Accept-Encoding` header can be used by clients to indicate what
/// response encodings they accept.
#[derive(Clone, PartialEq, Show)]
pub struct AcceptEncoding(pub Vec<shared::QualityItem<shared::Encoding>>);
deref!(AcceptEncoding => Vec<shared::QualityItem<shared::Encoding>>);
impl header::Header for AcceptEncoding {
fn header_name(_: Option<AcceptEncoding>) -> &'static str {
"AcceptEncoding"
}
fn parse_header(raw: &[Vec<u8>]) -> Option<AcceptEncoding> {
shared::from_comma_delimited(raw).map(AcceptEncoding)
}
}
impl header::HeaderFormat for AcceptEncoding {
fn fmt_header(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
shared::fmt_comma_delimited(fmt, &self[])
}
}
#[test]
fn test_parse_header() {
let a: AcceptEncoding = header::Header::parse_header([b"gzip;q=1.0, identity; q=0.5".to_vec()].as_slice()).unwrap();
let b = AcceptEncoding(vec![
shared::QualityItem{item: shared::Gzip, quality: 1f32},
shared::QualityItem{item: shared::Identity, quality: 0.5f32},
]);
assert_eq!(a, b);
}
|
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
pub enum Error {
Failed,
}
impl From<libk4a_sys::k4a_result_t> for Error {
fn from(result: libk4a_sys::k4a_result_t) -> Self {
match result {
libk4a_sys::k4a_result_t::K4A_RESULT_FAILED => Error::Failed,
_ => unreachable!(),
}
}
}
pub fn k4a_result(result: libk4a_sys::k4a_result_t) -> Result<(), Error> {
match result {
libk4a_sys::k4a_result_t::K4A_RESULT_SUCCEEDED => Ok(()),
_ => Err(result.into()),
}
}
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
pub enum WaitError {
Failed,
Timeout,
}
impl From<libk4a_sys::k4a_wait_result_t> for WaitError {
fn from(result: libk4a_sys::k4a_wait_result_t) -> Self {
match result {
libk4a_sys::k4a_wait_result_t::K4A_WAIT_RESULT_FAILED => WaitError::Failed,
libk4a_sys::k4a_wait_result_t::K4A_WAIT_RESULT_TIMEOUT => WaitError::Timeout,
_ => unreachable!(),
}
}
}
pub fn k4a_wait_result(wait_result: libk4a_sys::k4a_wait_result_t) -> Result<(), WaitError> {
match wait_result {
libk4a_sys::k4a_wait_result_t::K4A_WAIT_RESULT_SUCCEEDED => Ok(()),
_ => Err(wait_result.into()),
}
}
#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
pub enum StreamError {
Failed,
Eof,
}
impl From<libk4a_sys::k4a_stream_result_t> for StreamError {
fn from(result: libk4a_sys::k4a_stream_result_t) -> Self {
match result {
libk4a_sys::k4a_stream_result_t::K4A_STREAM_RESULT_FAILED => StreamError::Failed,
libk4a_sys::k4a_stream_result_t::K4A_STREAM_RESULT_EOF => StreamError::Eof,
_ => unreachable!(),
}
}
}
pub fn k4a_stream_result(wait_result: libk4a_sys::k4a_stream_result_t) -> Result<(), StreamError> {
match wait_result {
libk4a_sys::k4a_stream_result_t::K4A_STREAM_RESULT_SUCCEEDED => Ok(()),
_ => Err(wait_result.into()),
}
}
|
use ::bytes::Bytes;
use ::errors::Error;
use ::futures::future::BoxFuture;
use ::futures::sink::BoxSink;
use ::futures::stream::BoxStream;
pub type BoxMqttFuture<T> = BoxFuture<T, Error>;
pub type BoxMqttSink<T> = BoxSink<T, Error>;
pub type BoxMqttStream<T> = BoxStream<T, Error>;
pub type SubItem = (String, Bytes);
pub type SubscriptionStream = BoxMqttStream<SubItem>;
|
/*
给你一个链表,每 k 个节点一组进行翻转,请你返回翻转后的链表。
k 是一个正整数,它的值小于或等于链表的长度。
如果节点总数不是 k 的整数倍,那么请将最后剩余的节点保持原有顺序。
示例 :
给定这个链表:1->2->3->4->5
当 k = 2 时,应当返回: 2->1->4->3->5
当 k = 3 时,应当返回: 3->2->1->4->5
说明 :
你的算法只能使用常数的额外空间。
你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/reverse-nodes-in-k-group
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
*/
// Definition for singly-linked list.
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct ListNode {
pub val: i32,
pub next: Option<Box<ListNode>>,
}
impl ListNode {
#[inline]
fn new(val: i32) -> Self {
ListNode { next: None, val }
}
}
impl Solution {
pub fn reverse_k_group(mut head: Option<Box<ListNode>>, k: i32) -> Option<Box<ListNode>> {
if k == 1 {
return head;
}
let next_head = {
let mut temp = &mut head;
for _ in 0..k {
if temp.is_none() {
return head;
}
temp = &mut temp.as_mut().unwrap().next;
}
Self::reverse_k_group(temp.take(), k)
};
let mut node = head;
let next = node.as_mut().unwrap().next.take();
node.as_mut().unwrap().next = next_head;
let mut new_head = node;
node = next;
for _i in 1..k {
let next = node.as_mut().unwrap().next.take();
node.as_mut().unwrap().next = new_head;
new_head = node;
node = next;
}
new_head
}
}
fn main() {
let mut l1 = ListNode::new(1);
let mut l2 = ListNode::new(2);
let mut l3 = ListNode::new(3);
let mut l4 = ListNode::new(4);
let mut l5 = ListNode::new(5);
let l6 = ListNode::new(6);
l5.next = Some(Box::new(l6));
l4.next = Some(Box::new(l5));
l3.next = Some(Box::new(l4));
l2.next = Some(Box::new(l3));
l1.next = Some(Box::new(l2));
let head = Some(Box::new(l1));
let new_head = Solution::reverse_k_group(head, 6);
dbg!(new_head);
}
struct Solution {}
|
/*!
A tree-view control is a window that displays a hierarchical list of items
*/
use winapi::shared::minwindef::{WPARAM, LPARAM};
use winapi::um::winuser::{WS_VISIBLE, WS_DISABLED, WS_TABSTOP};
use winapi::um::commctrl::{HTREEITEM, TVIS_EXPANDED, TVIS_SELECTED, TVS_SHOWSELALWAYS, TVITEMW};
use crate::win32::window_helper as wh;
use crate::win32::base_helper::{check_hwnd, to_utf16, from_utf16};
use crate::{Font, NwgError};
use super::{ControlBase, ControlHandle};
use std::{mem, ptr};
#[cfg(feature="image-list")]
use winapi::um::commctrl::HIMAGELIST;
#[cfg(feature="image-list")]
use crate::ImageList;
const NOT_BOUND: &'static str = "TreeView is not yet bound to a winapi object";
const BAD_HANDLE: &'static str = "INTERNAL ERROR: TreeView handle is not HWND!";
bitflags! {
/**
The tree view flags
* VISIBLE: The tree view is immediatly visible after creation
* DISABLED: The tree view cannot be interacted with by the user. It also has a grayed out look.
* TAB_STOP: The tree view can be selected using tab navigation
*/
pub struct TreeViewFlags: u32 {
const VISIBLE = WS_VISIBLE;
const DISABLED = WS_DISABLED;
const TAB_STOP = WS_TABSTOP;
const ALWAYS_SHOW_SELECTION = TVS_SHOWSELALWAYS;
}
}
bitflags! {
/**
A tree item state
* SELECTED: The tree view is immediatly visible after creation
* DISABLED: The tree view cannot be interacted with by the user. It also has a grayed out look.
* TAB_STOP: The tree view can be selected using tab navigation
*/
pub struct TreeItemState: u32 {
const SELECTED = TVIS_SELECTED;
const EXPANDED = TVIS_EXPANDED;
}
}
/// Select the position of a new item that is about to be inserted in a TreeView
#[derive(Copy, Clone, Debug)]
pub enum TreeInsert {
/// Inserts the item at the beginning of the list.
First,
/// Inserts the item at the end of the list.
Last,
/// Add the item as a root item
Root,
/// Inserts the item into the list in alphabetical order
Sort,
/// Insert the item after the choosen item
After(HTREEITEM)
}
/// Possible state of a tree item regarding the "expanded/collapsed" state
#[derive(Copy, Clone, Debug)]
#[repr(u8)]
pub enum ExpandState {
Collapse,
CollapseReset,
Expand,
ExpandPartial,
Toggle
}
/// An action that can be applied to a tree item. Used in events
#[derive(Copy, Clone, Debug)]
pub enum TreeItemAction {
/// An unexpected value was passed to NWG
Unknown,
/// A tree item was expanded or collapsed.
Expand(ExpandState),
/// The state of the item was changed
State { old: TreeItemState, new: TreeItemState }
}
/// A reference to an item in a TreeView
#[derive(Debug)]
pub struct TreeItem {
pub handle: HTREEITEM
}
impl TreeItem {
/// Checks if the inner handle is null
pub fn is_null(&self) -> bool {
self.handle.is_null()
}
}
/**
A tree-view control is a window that displays a hierarchical list of items.
While a treeview can support selected multiple item programatically (using `select_item`), this is not fully supported
by the winapi implementation.
Requires the `tree-view` feature
**Builder parameters:**
* `parent`: **Required.** The tree-view parent container.
* `position`: The treeview position.
* `enabled`: If the treeview can be used by the user. It also has a grayed out look if disabled.
* `focus`: The control receive focus after being created
* `flags`: A combination of the `TreeViewFlags` values.
* `ex_flags`: A combination of win32 window extended flags. Unlike `flags`, ex_flags must be used straight from winapi
* `font`: The font used for the treeview text
* `parent`: The treeview parent container.
* `image_list`: Image list containing the icon to use in the tree-view
**Control events:**
* `MousePress(_)`: Generic mouse press events on the tree view
* `OnMouseMove`: Generic mouse mouse event
* `OnMouseWheel`: Generic mouse wheel event
* `OnTreeViewClick`: When the user has clicked the left mouse button within the control.
* `OnTreeViewDoubleClick`: When the user has clicked the left mouse button within the control twice rapidly.
* `OnTreeViewRightClick`: When the user has clicked the right mouse button within the control.
* `OnTreeFocusLost`: When the control has lost the input focus
* `OnTreeFocus`: When the control has acquired the input focus
* `OnTreeItemDelete`: Just before an item is deleted. Also sent for all the children.
* `OnTreeItemExpanded`: After an item was expanded or collapsed. Sends a `EventData::OnTreeItemUpdate`.
* `OnTreeItemChanged`: After the state of an item was changed. Sends a `EventData::OnTreeItemUpdate`.
* `OnTreeItemSelectionChanged`: After the current selection was changed. Sends a `EventData::OnTreeItemChanged`.
*/
#[derive(Default, PartialEq, Eq)]
pub struct TreeView {
pub handle: ControlHandle
}
impl TreeView {
pub fn builder<'a>() -> TreeViewBuilder<'a> {
TreeViewBuilder {
size: (100, 200),
position: (0, 0),
enabled: true,
focus: false,
flags: None,
ex_flags: 0,
font: None,
parent: None,
#[cfg(feature="image-list")]
image_list: None,
}
}
/// Sets the image list of the treeview
#[cfg(feature="image-list")]
pub fn set_image_list(&self, list: Option<&ImageList>) {
use winapi::um::commctrl::{TVM_SETIMAGELIST, TVSIL_NORMAL};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let list_handle = list.map(|l| l.handle).unwrap_or(ptr::null_mut());
wh::send_message(handle, TVM_SETIMAGELIST, TVSIL_NORMAL, list_handle as _);
}
/// Returns the image list of the treeview or None if there is none.
/// The returned image list is not owned
#[cfg(feature="image-list")]
pub fn image_list(&self) -> Option<ImageList> {
use winapi::um::commctrl::{TVM_GETIMAGELIST, TVSIL_NORMAL};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let handle = wh::send_message(handle, TVM_GETIMAGELIST, TVSIL_NORMAL, 0) as HIMAGELIST;
if handle.is_null() {
None
} else {
Some(ImageList { handle, owned: false })
}
}
/// Sets the image that will appear left to the item text. `index` is the index of the image in the image-list
/// Won't do anything if the control do not have a image list or if the item is not in the tree
/// If `on_select` is set to true, sets the icon that is used when an item is active
#[cfg(feature="image-list")]
pub fn set_item_image(&self, item: &TreeItem, index: i32, on_select: bool) {
use winapi::um::commctrl::{TVM_SETITEMW, TVIF_IMAGE, TVIF_SELECTEDIMAGE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut tree_item = blank_item();
tree_item.hItem = item.handle;
tree_item.mask = match on_select {
true => TVIF_SELECTEDIMAGE,
false => TVIF_IMAGE
};
match on_select {
true => { tree_item.iSelectedImage = index; },
false => { tree_item.iImage = index; }
}
wh::send_message(handle, TVM_SETITEMW, 0, &mut tree_item as *mut TVITEMW as LPARAM);
}
/// Returns the index of the image in the tree view image list.
/// If there is no image list in the control or the item is not in the control, 0 will be returned.
/// If `on_select` is set to true, returns the icon that is used when an item is active
#[cfg(feature="image-list")]
pub fn item_image(&self, item: &TreeItem, on_select: bool) -> i32 {
use winapi::um::commctrl::{TVM_GETITEMW, TVIF_IMAGE, TVIF_SELECTEDIMAGE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut tree_item = blank_item();
tree_item.mask = TVIF_IMAGE | TVIF_SELECTEDIMAGE;
tree_item.hItem = item.handle;
match wh::send_message(handle, TVM_GETITEMW, 0, &mut tree_item as *mut TVITEMW as LPARAM) {
0 => 0,
_ => match on_select {
true => tree_item.iSelectedImage,
false => tree_item.iImage
}
}
}
/// Sets the text color in the treeview
pub fn set_text_color(&self, r: u8, g: u8, b: u8) {
use winapi::um::commctrl::TVM_SETTEXTCOLOR;
use winapi::um::wingdi::RGB;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let color = RGB(r, g, b);
wh::send_message(handle, TVM_SETTEXTCOLOR, 0, color as _);
self.invalidate();
}
/// Returns the text color in the treeview
pub fn text_color(&self) -> [u8; 3] {
use winapi::um::commctrl::TVM_GETTEXTCOLOR;
use winapi::um::wingdi::{GetRValue, GetGValue, GetBValue};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let col = wh::send_message(handle, TVM_GETTEXTCOLOR, 0, 0) as u32;
[
GetRValue(col),
GetGValue(col),
GetBValue(col),
]
}
/// Retrieves the amount, in pixels, that child items are indented relative to their parent items.
pub fn indent(&self) -> u32 {
use winapi::um::commctrl::TVM_GETINDENT;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_GETINDENT, 0, 0) as u32
}
/// Sets the width of indentation for a tree-view control and redraws the control to reflect the new width.
pub fn set_indent(&self, indent: u32) {
use winapi::um::commctrl::TVM_SETINDENT;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_SETINDENT, indent as _, 0);
}
/// Return the root item of the tree view if one is present.
/// If there is no root in the tree, returns `None`.
pub fn root(&self) -> Option<TreeItem> {
use winapi::um::commctrl::TVGN_ROOT;
next_treeview_item(&self.handle, TVGN_ROOT, ptr::null_mut())
}
/// Returns the first child of an item or `None` if the item has no child or if it's not part of the tree view
/// To iterate over all the children, use `TreeView.iter_item(&parent_item)`
pub fn first_child(&self, item: &TreeItem) -> Option<TreeItem> {
use winapi::um::commctrl::TVGN_CHILD;
next_treeview_item(&self.handle, TVGN_CHILD, item.handle)
}
/// Returns the next sibling in the tree or `None` if the item has no more sibling or if it's not part of the tree view
pub fn next_sibling(&self, item: &TreeItem) -> Option<TreeItem> {
use winapi::um::commctrl::TVGN_NEXT;
next_treeview_item(&self.handle, TVGN_NEXT, item.handle)
}
/// Returns the previous sibling in the tree or `None` if the item has no more sibling or if it's not part of the tree view
pub fn previous_sibling(&self, item: &TreeItem) -> Option<TreeItem> {
use winapi::um::commctrl::TVGN_PREVIOUS;
next_treeview_item(&self.handle, TVGN_PREVIOUS, item.handle)
}
/// Returns the parent of the item in the tree or `None` if the item is root
pub fn parent(&self, item: &TreeItem) -> Option<TreeItem> {
use winapi::um::commctrl::TVGN_PARENT;
next_treeview_item(&self.handle, TVGN_PARENT, item.handle)
}
/// Return the currently selected item. If there are more than one selected item, returns the first one.
/// If there is no selected item, returns `None`.
pub fn selected_item(&self) -> Option<TreeItem> {
use winapi::um::commctrl::{TVM_GETNEXTITEM, TVGN_NEXTSELECTED};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let tree_handle = wh::send_message(handle, TVM_GETNEXTITEM, TVGN_NEXTSELECTED, 0) as HTREEITEM;
if tree_handle.is_null() {
None
} else {
Some(TreeItem { handle: tree_handle })
}
}
/// Returns the selected items in a Treeview
/// If there is no selected items, returns an empty `Vec`.
pub fn selected_items(&self) -> Vec<TreeItem> {
use winapi::um::commctrl::{TVM_GETNEXTITEM, TVGN_NEXTSELECTED};
let mut items = Vec::new();
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut last_handle = wh::send_message(handle, TVM_GETNEXTITEM, TVGN_NEXTSELECTED, 0);
while last_handle != 0 {
items.push(TreeItem { handle: last_handle as _ } );
last_handle = wh::send_message(handle, TVM_GETNEXTITEM, TVGN_NEXTSELECTED, last_handle as _);
}
items
}
/// Returns the number of selected item in the tree view
pub fn selected_item_count(&self) -> usize {
use winapi::um::commctrl::{TVM_GETNEXTITEM, TVGN_NEXTSELECTED};
let mut count = 0;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut last_handle = wh::send_message(handle, TVM_GETNEXTITEM, TVGN_NEXTSELECTED, 0);
while last_handle != 0 {
count += 1;
last_handle = wh::send_message(handle, TVM_GETNEXTITEM, TVGN_NEXTSELECTED, last_handle as _);
}
count
}
/// Insert a new item into the TreeView and return a reference to new newly added item
pub fn insert_item<'a>(&self, new: &'a str, parent: Option<&TreeItem>, position: TreeInsert) -> TreeItem {
use winapi::um::commctrl::{TVM_INSERTITEMW, TVINSERTSTRUCTW, TVI_FIRST, TVI_LAST, TVI_ROOT, TVI_SORT, TVIF_TEXT};
use winapi::um::commctrl::TVINSERTSTRUCTW_u;
use winapi::um::winnt::LPWSTR;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let insert = match position {
TreeInsert::First => TVI_FIRST,
TreeInsert::Last => TVI_LAST,
TreeInsert::Root => TVI_ROOT,
TreeInsert::Sort => TVI_SORT,
TreeInsert::After(i) => i
};
let text = to_utf16(new);
let item = {
let mut item: TVINSERTSTRUCTW_u = unsafe { mem::zeroed() };
let i = unsafe { item.item_mut() };
i.mask = TVIF_TEXT;
i.pszText = text.as_ptr() as LPWSTR;
item
};
let new_item = TVINSERTSTRUCTW {
hParent: parent.map(|p| p.handle ).unwrap_or(ptr::null_mut()),
hInsertAfter: insert,
u: item
};
let ptr = &new_item as *const TVINSERTSTRUCTW;
let handle = wh::send_message(handle, TVM_INSERTITEMW, 0, ptr as LPARAM) as HTREEITEM;
self.invalidate();
TreeItem { handle }
}
/// Insert a new item into the TreeView with associated lParam and return a reference to new newly added item
pub fn insert_item_with_param<'a>(&self, new: &'a str, parent: Option<&TreeItem>, position: TreeInsert, data: isize) -> TreeItem {
use winapi::um::commctrl::{TVM_INSERTITEMW, TVINSERTSTRUCTW, TVI_FIRST, TVI_LAST, TVI_ROOT, TVI_SORT, TVIF_TEXT, TVIF_PARAM};
use winapi::um::commctrl::TVINSERTSTRUCTW_u;
use winapi::um::winnt::LPWSTR;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let insert = match position {
TreeInsert::First => TVI_FIRST,
TreeInsert::Last => TVI_LAST,
TreeInsert::Root => TVI_ROOT,
TreeInsert::Sort => TVI_SORT,
TreeInsert::After(i) => i
};
let text = to_utf16(new);
let item = {
let mut item: TVINSERTSTRUCTW_u = unsafe { mem::zeroed() };
let i = unsafe { item.item_mut() };
i.mask = TVIF_TEXT | TVIF_PARAM;
i.pszText = text.as_ptr() as LPWSTR;
i.lParam = data;
item
};
let new_item = TVINSERTSTRUCTW {
hParent: parent.map(|p| p.handle ).unwrap_or(ptr::null_mut()),
hInsertAfter: insert,
u: item
};
let ptr = &new_item as *const TVINSERTSTRUCTW;
let handle = wh::send_message(handle, TVM_INSERTITEMW, 0, ptr as LPARAM) as HTREEITEM;
self.invalidate();
TreeItem { handle }
}
/// Remove an item and its children from the tree view
pub fn remove_item(&self, item: &TreeItem) {
use winapi::um::commctrl::{TVM_DELETEITEM};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_DELETEITEM, 0, item.handle as LPARAM);
}
/// Selects the specified tree-view item and scrolls the item into view.
pub fn select_item(&self, item: &TreeItem) {
use winapi::um::commctrl::{TVM_SETITEMW, TVIF_STATE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut tree_item = blank_item();
tree_item.mask = TVIF_STATE;
tree_item.hItem = item.handle;
tree_item.state = TVIS_SELECTED;
tree_item.stateMask = TVIS_SELECTED;
wh::send_message(handle, TVM_SETITEMW, 0, &mut tree_item as *mut TVITEMW as LPARAM);
}
/// Unselects an item from the treeview
pub fn unselect_item(&self, item: &TreeItem) {
use winapi::um::commctrl::{TVM_SETITEMW, TVIF_STATE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut tree_item = blank_item();
tree_item.mask = TVIF_STATE;
tree_item.hItem = item.handle;
tree_item.state = 0;
tree_item.stateMask = TVIS_SELECTED;
wh::send_message(handle, TVM_SETITEMW, 0, &mut tree_item as *mut TVITEMW as LPARAM);
}
/// Creates an iterator over the tree view items
#[cfg(feature="tree-view-iterator")]
pub fn iter<'a>(&'a self) -> crate::TreeViewIterator<'a> {
check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
crate::TreeViewIterator::new(self, ptr::null_mut())
}
/// Creates an iterator over the children of an item. This does not include the item itself.
#[cfg(feature="tree-view-iterator")]
pub fn iter_item<'a>(&'a self, item: &TreeItem) -> crate::TreeViewIterator<'a> {
check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
crate::TreeViewIterator::new(self, item.handle)
}
/// Returns the text of the selected item. Return None if the item is not in the tree view.
/// The returned text value cannot be bigger than 260 characters
pub fn item_text(&self, tree_item: &TreeItem) -> Option<String> {
use winapi::um::commctrl::{TVM_GETITEMW, TVIF_TEXT, TVIF_HANDLE};
const BUFFER_MAX: usize = 260;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut text_buffer = Vec::with_capacity(BUFFER_MAX);
unsafe { text_buffer.set_len(BUFFER_MAX); }
let mut item: TVITEMW = blank_item();
item.mask = TVIF_TEXT | TVIF_HANDLE;
item.hItem = tree_item.handle;
item.pszText = text_buffer.as_mut_ptr();
item.cchTextMax = BUFFER_MAX as _;
let result = wh::send_message(handle, TVM_GETITEMW, 0, &mut item as *mut TVITEMW as LPARAM);
if result == 0 {
return None;
}
Some(from_utf16(&text_buffer))
}
/// Set the text for specified item in the treeview.
pub fn set_item_text(&self, tree_item: &TreeItem, new_text: &str) {
use winapi::um::commctrl::{TVM_SETITEMW, TVIF_TEXT};
use winapi::um::winnt::LPWSTR;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let text = to_utf16(new_text);
let mut item: TVITEMW = blank_item();
item.mask = TVIF_TEXT;
item.hItem = tree_item.handle;
item.pszText = text.as_ptr() as LPWSTR;
wh::send_message(handle, TVM_SETITEMW, 0, &mut item as *mut TVITEMW as LPARAM);
}
/// Returns the lParam of the selected item. Return None if the item is not in the tree view.
pub fn item_param(&self, tree_item: &TreeItem) -> Option<isize> {
use winapi::um::commctrl::{TVM_GETITEMW, TVIF_PARAM, TVIF_HANDLE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut item: TVITEMW = blank_item();
item.mask = TVIF_HANDLE | TVIF_PARAM;
item.hItem = tree_item.handle;
let result = wh::send_message(handle, TVM_GETITEMW, 0, &mut item as *mut TVITEMW as LPARAM);
if result == 0 {
return None;
}
Some(item.lParam)
}
/// Returns `true` if the tree view item has children. Returns `None` if the item is not in the tree view.
pub fn item_has_children(&self, tree_item: &TreeItem) -> Option<bool> {
use winapi::um::commctrl::{TVM_GETITEMW, TVIF_CHILDREN, TVIF_HANDLE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut item: TVITEMW = blank_item();
item.hItem = tree_item.handle;
item.mask = TVIF_CHILDREN | TVIF_HANDLE;
let result = wh::send_message(handle, TVM_GETITEMW, 0, &mut item as *mut TVITEMW as LPARAM);
if result == 0 {
return None;
}
Some(item.cChildren != 0)
}
/// Returns the item state in the tree view or `None` if the item is not in the tree view
pub fn item_state(&self, tree_item: &TreeItem) -> Option<TreeItemState> {
use winapi::um::commctrl::{TVM_GETITEMW, TVIF_STATE, TVIF_HANDLE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let mut item: TVITEMW = unsafe { mem::zeroed() };
item.hItem = tree_item.handle;
item.mask = TVIF_STATE | TVIF_HANDLE;
item.stateMask = 0xFF;
let result = wh::send_message(handle, TVM_GETITEMW, 0, &mut item as *mut TVITEMW as LPARAM);
if result == 0 {
return None;
}
Some(TreeItemState::from_bits_truncate(item.state))
}
/// Expands or collapses the list of child items associated with the specified parent item, if any.
pub fn set_expand_state(&self, item: &TreeItem, state: ExpandState) {
use winapi::um::commctrl::{TVM_EXPAND, TVE_COLLAPSE, TVE_COLLAPSERESET, TVE_EXPAND, TVE_EXPANDPARTIAL, TVE_TOGGLE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let state = match state {
ExpandState::Collapse => TVE_COLLAPSE,
ExpandState::CollapseReset => TVE_COLLAPSE | TVE_COLLAPSERESET,
ExpandState::Expand => TVE_EXPAND,
ExpandState::ExpandPartial => TVE_EXPANDPARTIAL,
ExpandState::Toggle => TVE_TOGGLE,
};
wh::send_message(handle, TVM_EXPAND, state as WPARAM, item.handle as LPARAM);
}
/// Ensures that a tree-view item is visible, expanding the parent item or scrolling the tree-view control, if necessary.
pub fn ensure_visible(&self, item: &TreeItem) {
use winapi::um::commctrl::{TVM_ENSUREVISIBLE};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_ENSUREVISIBLE, 0, item.handle as LPARAM);
}
/// Remove every item from the treeview by removing the root item
pub fn clear(&self) {
use winapi::um::commctrl::{TVM_DELETEITEM, TVI_ROOT};
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_DELETEITEM, 0, TVI_ROOT as LPARAM);
}
/// Return the total number of item in the tree view
pub fn len(&self) -> usize {
use winapi::um::commctrl::TVM_GETCOUNT;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_GETCOUNT, 0, 0) as usize
}
/// Return the number of item in the tree view visible by the user
pub fn visible_len(&self) -> usize {
use winapi::um::commctrl::TVM_GETVISIBLECOUNT;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_GETVISIBLECOUNT, 0, 0) as usize
}
//
// Common methods
//
/// Invalidate the whole drawing region.
pub fn invalidate(&self) {
use winapi::um::winuser::InvalidateRect;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { InvalidateRect(handle, ptr::null(), 1); }
}
/// Return the font of the control
pub fn font(&self) -> Option<Font> {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let font_handle = wh::get_window_font(handle);
if font_handle.is_null() {
None
} else {
Some(Font { handle: font_handle })
}
}
/// Set the font of the control
pub fn set_font(&self, font: Option<&Font>) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_font(handle, font.map(|f| f.handle), true); }
}
/// Return true if the control currently has the keyboard focus
pub fn focus(&self) -> bool {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_focus(handle) }
}
/// Set the keyboard focus on the button.
pub fn set_focus(&self) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_focus(handle); }
}
/// Return true if the control user can interact with the control, return false otherwise
pub fn enabled(&self) -> bool {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_enabled(handle) }
}
/// Enable or disable the control
pub fn set_enabled(&self, v: bool) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_enabled(handle, v) }
}
/// Return true if the control is visible to the user. Will return true even if the
/// control is outside of the parent client view (ex: at the position (10000, 10000))
pub fn visible(&self) -> bool {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_visibility(handle) }
}
/// Show or hide the control to the user
pub fn set_visible(&self, v: bool) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_visibility(handle, v) }
}
/// Return the size of the button in the parent window
pub fn size(&self) -> (u32, u32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_size(handle) }
}
/// Set the size of the button in the parent window
pub fn set_size(&self, x: u32, y: u32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_size(handle, x, y, false) }
}
/// Return the position of the button in the parent window
pub fn position(&self) -> (i32, i32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::get_window_position(handle) }
}
/// Set the position of the button in the parent window
pub fn set_position(&self, x: i32, y: i32) {
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
unsafe { wh::set_window_position(handle, x, y) }
}
/// Winapi class name used during control creation
pub fn class_name(&self) -> &'static str {
winapi::um::commctrl::WC_TREEVIEW
}
/// Winapi base flags used during window creation
pub fn flags(&self) -> u32 {
use winapi::um::commctrl::{TVS_HASBUTTONS, TVS_LINESATROOT, TVS_HASLINES, TVS_EDITLABELS};
WS_VISIBLE | TVS_HASBUTTONS | TVS_LINESATROOT | TVS_HASLINES | WS_TABSTOP | TVS_SHOWSELALWAYS | TVS_EDITLABELS
}
/// Winapi flags required by the control
pub fn forced_flags(&self) -> u32 {
use winapi::um::winuser::{WS_CHILD, WS_BORDER};
use winapi::um::commctrl::TVS_NOTOOLTIPS;
WS_CHILD | WS_BORDER | TVS_NOTOOLTIPS
}
/// Begins to in-place edit the specified item's text.
/// Return None if Failed.
/// Return the treeview's handle if successful.
pub fn edit_label(&self, item: &TreeItem) -> Option<ControlHandle> {
use winapi::um::commctrl::TVM_EDITLABELW;
use winapi::shared::windef::HWND;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
let result = wh::send_message(handle, TVM_EDITLABELW, 0, item.handle as HTREEITEM as LPARAM);
if result == 0 {
return None;
}
Some(ControlHandle::Hwnd(result as HWND))
}
/// End the in-place editing of the tree item's label.
/// The parameter f_cancel indicates whether the editing is canceled without being saved to the label.
/// If this parameter is TRUE, the system cancels editing without saving the changes. Otherwise, the system saves the changes to the label.
/// Return true if successful, otherwise return false.
pub fn end_edit_label_now(&self, f_cancel: bool) -> bool {
use winapi::um::commctrl::TVM_ENDEDITLABELNOW;
let handle = check_hwnd(&self.handle, NOT_BOUND, BAD_HANDLE);
wh::send_message(handle, TVM_ENDEDITLABELNOW, f_cancel as WPARAM, 0) != 0
}
}
impl Drop for TreeView {
fn drop(&mut self) {
self.handle.destroy();
}
}
/// Builder for a TreeView
pub struct TreeViewBuilder<'a> {
size: (i32, i32),
position: (i32, i32),
enabled: bool,
focus: bool,
flags: Option<TreeViewFlags>,
ex_flags: u32,
font: Option<&'a Font>,
parent: Option<ControlHandle>,
#[cfg(feature="image-list")]
image_list: Option<&'a ImageList>,
}
impl<'a> TreeViewBuilder<'a> {
pub fn flags(mut self, flags: TreeViewFlags) -> TreeViewBuilder<'a> {
self.flags = Some(flags);
self
}
pub fn ex_flags(mut self, flags: u32) -> TreeViewBuilder<'a> {
self.ex_flags = flags;
self
}
pub fn size(mut self, size: (i32, i32)) -> TreeViewBuilder<'a> {
self.size = size;
self
}
pub fn position(mut self, pos: (i32, i32)) -> TreeViewBuilder<'a> {
self.position = pos;
self
}
pub fn enabled(mut self, e: bool) -> TreeViewBuilder<'a> {
self.enabled = e;
self
}
pub fn focus(mut self, focus: bool) -> TreeViewBuilder<'a> {
self.focus = focus;
self
}
pub fn font(mut self, font: Option<&'a Font>) -> TreeViewBuilder<'a> {
self.font = font;
self
}
pub fn parent<C: Into<ControlHandle>>(mut self, p: C) -> TreeViewBuilder<'a> {
self.parent = Some(p.into());
self
}
#[cfg(feature="image-list")]
pub fn image_list(mut self, list: Option<&'a ImageList>) -> TreeViewBuilder<'a> {
self.image_list = list;
self
}
pub fn build(self, out: &mut TreeView) -> Result<(), NwgError> {
let flags = self.flags.map(|f| f.bits()).unwrap_or(out.flags());
let parent = match self.parent {
Some(p) => Ok(p),
None => Err(NwgError::no_parent("TreeView"))
}?;
*out = Default::default();
out.handle = ControlBase::build_hwnd()
.class_name(out.class_name())
.forced_flags(out.forced_flags())
.flags(flags)
.ex_flags(self.ex_flags)
.size(self.size)
.position(self.position)
.parent(Some(parent))
.build()?;
if self.font.is_some() {
out.set_font(self.font);
} else {
out.set_font(Font::global_default().as_ref());
}
builder_set_image_list(&self, out);
if self.focus {
out.set_focus();
}
out.set_enabled(self.enabled);
Ok(())
}
}
impl PartialEq for TreeItem {
fn eq(&self, other: &Self) -> bool {
self.handle == other.handle
}
}
impl Eq for TreeItem {}
fn next_treeview_item(handle: &ControlHandle, action: usize, item: HTREEITEM) -> Option<TreeItem> {
use winapi::um::commctrl::TVM_GETNEXTITEM;
if handle.blank() { panic!("{}", NOT_BOUND); }
let handle = handle.hwnd().expect(BAD_HANDLE);
let handle = wh::send_message(handle, TVM_GETNEXTITEM, action as _, item as _) as HTREEITEM;
if handle.is_null() {
None
} else {
Some(TreeItem { handle })
}
}
#[cfg(feature="image-list")]
fn builder_set_image_list(builder: &TreeViewBuilder, out: &TreeView) {
if builder.image_list.is_some() {
out.set_image_list(builder.image_list);
}
}
#[cfg(not(feature="image-list"))]
fn builder_set_image_list(_builder: &TreeViewBuilder, _out: &TreeView) {
}
fn blank_item() -> TVITEMW {
TVITEMW {
mask: 0,
hItem: ptr::null_mut(),
state: 0,
stateMask: 0,
pszText: ptr::null_mut(),
cchTextMax: 0,
iImage: 0,
iSelectedImage: 0,
cChildren: 0,
lParam: 0
}
}
|
//! The rectangle shape tool
use druid::{
Color, Env, EventCtx, KbKey, KeyEvent, MouseEvent, PaintCtx, Point, Rect, RenderContext,
TextLayout,
};
use crate::cubic_path::CubicPath;
use crate::design_space::DPoint;
use crate::edit_session::EditSession;
use crate::mouse::{Drag, Mouse, MouseDelegate, TaggedEvent};
use crate::point::{EntityId, PathPoint};
use crate::tools::{EditType, Tool};
/// The state of the rectangle tool.
#[derive(Debug, Clone)]
pub struct Rectangle {
gesture: GestureState,
shift_locked: bool,
coord_text: TextLayout<String>,
}
impl Default for Rectangle {
fn default() -> Self {
let mut layout = TextLayout::new();
layout.set_font(crate::theme::UI_DETAIL_FONT);
Rectangle {
gesture: Default::default(),
shift_locked: false,
coord_text: layout,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum GestureState {
Ready,
Down(DPoint),
Begun { start: DPoint, current: DPoint },
Finished,
}
impl Rectangle {
fn pts_for_rect(&self) -> Option<(DPoint, DPoint)> {
if let GestureState::Begun { start, current } = self.gesture {
let mut current = current;
if self.shift_locked {
let mut vec2 = current - start;
vec2.y = if vec2.y.signum() > 0.0 {
vec2.x.abs()
} else {
vec2.x.abs() * -1.0
};
current = start + vec2;
}
Some((start, current))
} else {
None
}
}
fn current_drag_rect(&self, data: &EditSession) -> Option<Rect> {
let (start, current) = self.pts_for_rect()?;
Some(Rect::from_points(
data.viewport.to_screen(start),
data.viewport.to_screen(current),
))
}
}
impl Tool for Rectangle {
fn name(&self) -> &'static str {
"Rectangle"
}
fn cancel(
&mut self,
mouse: &mut Mouse,
_ctx: &mut EventCtx,
data: &mut EditSession,
) -> Option<EditType> {
mouse.cancel(data, self);
None
}
fn key_down(
&mut self,
key: &KeyEvent,
ctx: &mut EventCtx,
_: &mut EditSession,
_: &Env,
) -> Option<EditType> {
if key.key == KbKey::Shift {
self.shift_locked = true;
ctx.request_paint();
}
None
}
fn key_up(
&mut self,
key: &KeyEvent,
ctx: &mut EventCtx,
_: &mut EditSession,
_: &Env,
) -> Option<EditType> {
if key.key == KbKey::Shift {
self.shift_locked = false;
ctx.request_paint();
}
None
}
fn mouse_event(
&mut self,
event: TaggedEvent,
mouse: &mut Mouse,
ctx: &mut EventCtx,
data: &mut EditSession,
_: &Env,
) -> Option<EditType> {
let pre_state = self.gesture;
mouse.mouse_event(event, data, self);
if pre_state != self.gesture {
ctx.request_paint();
}
if self.gesture == GestureState::Finished {
self.gesture = GestureState::Ready;
Some(EditType::Normal)
} else {
None
}
}
fn paint(&mut self, ctx: &mut PaintCtx, data: &EditSession, env: &Env) {
const LABEL_PADDING: f64 = 4.0;
if let Some(rect) = self.current_drag_rect(data) {
ctx.stroke(rect, &Color::BLACK, 1.0);
let (start, current) = self.pts_for_rect().unwrap();
let size = start - current;
let label_text = format!("{}, {}", size.x.abs(), size.y.abs());
self.coord_text.set_text(label_text);
self.coord_text.rebuild_if_needed(ctx.text(), env);
let text_size = self.coord_text.size();
let text_x = rect.x1 - text_size.width - LABEL_PADDING;
let text_y = rect.y1 + LABEL_PADDING;
let text_pos = Point::new(text_x, text_y);
let rect = Rect::from_origin_size(text_pos, text_size)
.inset(2.0)
.to_rounded_rect(2.0);
ctx.fill(rect, &Color::WHITE.with_alpha(0.5));
self.coord_text.draw(ctx, text_pos);
}
}
}
impl MouseDelegate<EditSession> for Rectangle {
fn cancel(&mut self, _data: &mut EditSession) {
self.gesture = GestureState::Ready;
}
fn left_down(&mut self, event: &MouseEvent, data: &mut EditSession) {
if event.count == 1 {
let pt = data.viewport.from_screen(event.pos);
self.gesture = GestureState::Down(pt);
self.shift_locked = event.mods.shift();
}
}
fn left_up(&mut self, _event: &MouseEvent, data: &mut EditSession) {
if let Some((start, current)) = self.pts_for_rect() {
let path = make_rect_path(start, current);
data.paste_paths(vec![path.into()]);
self.gesture = GestureState::Finished;
}
}
fn left_drag_began(&mut self, event: Drag, data: &mut EditSession) {
if let GestureState::Down(start) = self.gesture {
let current = data.viewport.from_screen(event.current.pos);
self.gesture = GestureState::Begun { start, current };
}
}
fn left_drag_changed(&mut self, drag: Drag, data: &mut EditSession) {
if let GestureState::Begun { current, .. } = &mut self.gesture {
*current = data.viewport.from_screen(drag.current.pos);
}
}
}
impl Default for GestureState {
fn default() -> Self {
GestureState::Ready
}
}
fn make_rect_path(p1: DPoint, p3: DPoint) -> CubicPath {
let path_id = EntityId::next();
let p2 = DPoint::new(p3.x, p1.y);
let p4 = DPoint::new(p1.x, p3.y);
// first point goes last in closed paths
let points = vec![
PathPoint::on_curve(path_id, p2),
PathPoint::on_curve(path_id, p3),
PathPoint::on_curve(path_id, p4),
PathPoint::on_curve(path_id, p1),
];
CubicPath::from_raw_parts(path_id, points, None, true)
}
|
extern crate minifb;
extern crate cpal;
extern crate futures;
#[macro_use]
extern crate clap;
extern crate combine;
extern crate rustual_boy_core;
extern crate rustual_boy_middleware;
mod argparse;
#[macro_use]
mod logging;
mod command;
mod cpal_driver;
mod emulator;
mod system_time_source;
mod wave_file_buffer_sink;
use rustual_boy_core::rom::*;
use rustual_boy_core::sram::*;
use rustual_boy_core::vsu::*;
use cpal_driver::*;
use emulator::*;
fn main() {
let config = argparse::parse_args();
logln!("Loading ROM file {}", config.rom_path);
let rom = Rom::load(&config.rom_path).unwrap();
log!("ROM size: ");
if rom.size() >= 1024 * 1024 {
logln!("{}MB", rom.size() / 1024 / 1024);
} else {
logln!("{}KB", rom.size() / 1024);
}
logln!("Header info:");
logln!(" name: \"{}\"", rom.name().unwrap());
logln!(" maker code: \"{}\"", rom.maker_code().unwrap());
logln!(" game code: \"{}\"", rom.game_code().unwrap());
logln!(" game version: 1.{:#02}", rom.game_version_byte());
logln!("Attempting to load SRAM file: {}", config.sram_path);
let sram = match Sram::load(&config.sram_path) {
Ok(sram) => {
logln!(" SRAM loaded successfully");
sram
}
Err(err) => {
logln!(" Couldn't load SRAM file: {}", err);
Sram::new()
}
};
let audio_driver = CpalDriver::new(SAMPLE_RATE as _, 100).unwrap();
let audio_buffer_sink = audio_driver.sink();
let time_source = audio_driver.time_source();
let mut emulator = Emulator::new(rom, sram, audio_buffer_sink, time_source);
emulator.run();
if emulator.virtual_boy.interconnect.sram.size() > 0 {
logln!("SRAM used, saving to {}", config.sram_path);
emulator.virtual_boy.interconnect.sram.save(config.sram_path).unwrap();
}
}
|
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT.
//
use super::*;
/// Stream abstracts to make common the difference between TLS and non-TLS sockets.
#[macro_use] pub mod streams;
/// Factories to create streams.
pub mod stream_factories;
include!("streaming_socket_reactor.rs");
include!("StreamingSocketCommon.rs");
include!("StreamingSocketInternetProtocolVersion4Reactor.rs");
include!("StreamingSocketInternetProtocolVersion6Reactor.rs");
include!("StreamingSocketReactor.rs");
include!("StreamingSocketUnixDomainReactor.rs");
|
use crate::tuples::Tuple4D;
use crate::matrices::Mat;
pub struct Transformation {
pub transformation: Mat
}
impl Transformation {
pub fn view(from: Tuple4D, to: Tuple4D, up: Tuple4D) -> Transformation {
let fwd = to.sub(&from).normalized();
let up_norm = up.normalized();
let left = fwd.cross(&up_norm);
let true_up = left.cross(&fwd);
let orientation = Transformation{ transformation: Mat::new(vec![
left.x, left.y, left.z, 0.0,
true_up.x, true_up.y, true_up.z, 0.0,
-fwd.x, -fwd.y, -fwd.z, 0.0,
0.0, 0.0, 0.0, 1.0
], 4)
};
let trans = Transformation::translation(Tuple4D::new_point(-from.x, -from.y, -from.z));
Transformation::chain(&vec![orientation, trans])
}
pub fn chain(transformations: &[Transformation]) -> Transformation {
let mut transformation = transformations[0].transformation.clone();
for i in 1 .. transformations.len() {
transformation = transformation.mat_mul(&transformations[i].transformation);
}
Transformation{transformation}
}
pub fn identity() -> Transformation {
let transformation = Mat::new(vec![
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
], 4);
Transformation{transformation}
}
pub fn translation(by: Tuple4D) -> Transformation {
let transformation = Mat::new(vec![
1.0, 0.0, 0.0, by.x,
0.0, 1.0, 0.0, by.y,
0.0, 0.0, 1.0, by.z,
0.0, 0.0, 0.0, 1.0
], 4);
Transformation{transformation}
}
pub fn scale(by: Tuple4D) -> Transformation {
let transformation = Mat::new(vec![
by.x, 0.0, 0.0, 0.0,
0.0, by.y, 0.0, 0.0,
0.0, 0.0, by.z, 0.0,
0.0, 0.0, 0.0, 1.0
], 4);
Transformation{transformation}
}
pub fn rotate_x(by: f64) -> Transformation {
let r = by.to_radians();
let transformation = Mat::new(vec![
1.0, 0.0, 0.0, 0.0,
0.0, f64::cos(r), -f64::sin(r), 0.0,
0.0, f64::sin(r), f64::cos(r), 0.0,
0.0, 0.0, 0.0, 1.0
], 4);
Transformation{transformation}
}
pub fn rotate_y(by: f64) -> Transformation {
let r = by.to_radians();
let transformation = Mat::new(vec![
f64::cos(r), 0.0, f64::sin(r), 0.0,
0.0, 1.0, 0.0, 0.0,
-f64::sin(r), 0.0, f64::cos(r), 0.0,
0.0, 0.0, 0.0, 1.0
], 4);
Transformation{transformation}
}
pub fn rotate_z(by: f64) -> Transformation {
let r = by.to_radians();
let transformation = Mat::new(vec![
f64::cos(r), -f64::sin(r), 0.0, 0.0,
f64::sin(r), f64::cos(r), 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
], 4);
Transformation{transformation}
}
pub fn inverse(&self) -> Option<Transformation> {
match self.transformation.inverse() {
None => None,
Some(mat) => Some(Transformation{transformation: mat})
}
}
pub fn transpose(&self) -> Transformation {
Transformation {transformation: self.transformation.transpose()}
}
pub fn transform(&self, tuple: &Tuple4D) -> Tuple4D {
self.transformation.mul(tuple)
}
}
|
extern crate semver;
#[macro_use]
extern crate structopt;
use structopt::StructOpt;
mod filter_versions;
#[derive(StructOpt)]
#[structopt(name = "semver-cli", about = "Prints valid versions sorted by SemVer precedence")]
struct Opt {
#[structopt(short = "r", long = "range")]
range: Option<String>,
#[structopt(name = "VERSION")]
versions: Vec<String>,
}
fn main() {
let opt = Opt::from_args();
match filter_versions::filter_and_sort(opt.versions, opt.range) {
Err(err) => {
eprintln!("Error parsing given range: {0}", &err);
std::process::exit(1);
},
Ok(versions) => {
for v in versions.iter() {
println!("{}", &v);
}
std::process::exit(0);
},
}
}
|
pub use rust::Rust;
mod rust;
pub enum TypeSection {
Line(String),
Indent(Vec<TypeSection>),
}
fn print(sections: Vec<TypeSection>, indent_size: usize, current_indent: usize) -> String {
let mut output = String::new();
for s in sections {
match s {
TypeSection::Line(line) => {
output.push_str(&" ".repeat(current_indent));
output.push_str(&line);
output.push_str("\n");
}
TypeSection::Indent(sections) => output.push_str(&print(sections, indent_size, current_indent + indent_size)),
};
};
output
}
|
use input_i_scanner::{scan_with, InputIScanner};
fn main() {
let stdin = std::io::stdin();
let mut _i_i = InputIScanner::from(stdin.lock());
let n = scan_with!(_i_i, usize);
let s: Vec<char> = scan_with!(_i_i, String).chars().collect();
let mut ans = 0;
for i in 0..(n - 2) {
if s[i] != 'N' {
continue;
}
if s[i + 1] != 'A' {
continue;
}
for j in (i + 2)..n {
if s[j] == 'N' {
ans += 1;
break;
}
}
}
println!("{}", ans);
}
|
//! Create fake blockchain data for test purposes
use crate::Storage;
use pathfinder_common::{BlockHeader, StateUpdate};
use rand::Rng;
use starknet_gateway_types::reply::transaction as gw;
pub type StorageInitializer = Vec<StorageInitializerItem>;
pub type StorageInitializerItem = (
BlockHeader,
Vec<(gw::Transaction, gw::Receipt)>,
StateUpdate,
);
/// Initialize [`Storage`] with fake blocks and state updates
/// maintaining [**limited consistency guarantees**](crate::fake::init::with_n_blocks)
pub fn with_n_blocks(storage: &Storage, n: usize) -> StorageInitializer {
let mut rng = rand::thread_rng();
with_n_blocks_and_rng(storage, n, &mut rng)
}
/// Same as [`with_n_blocks`] except caller can specify the rng used
pub fn with_n_blocks_and_rng(
storage: &Storage,
n: usize,
rng: &mut impl Rng,
) -> StorageInitializer {
let mut connection = storage.connection().unwrap();
let tx = connection.transaction().unwrap();
let fake_data = init::with_n_blocks_and_rng(n, rng);
fake_data
.iter()
.for_each(|(header, transaction_data, state_update)| {
tx.insert_block_header(header).unwrap();
tx.insert_transaction_data(header.hash, header.number, transaction_data)
.unwrap();
state_update
.declared_cairo_classes
.iter()
.for_each(|&cairo_class| tx.insert_cairo_class(cairo_class, b"").unwrap());
state_update
.declared_sierra_classes
.iter()
.for_each(|(sierra_hash, casm_hash)| {
tx.insert_sierra_class(sierra_hash, &[], casm_hash, &[], "1.0.alpha6")
.unwrap()
});
tx.insert_state_update(header.number, state_update).unwrap();
});
tx.commit().unwrap();
fake_data
}
/// Raw _fake state initializers_
pub mod init {
use std::collections::{HashMap, HashSet};
use super::StorageInitializer;
use fake::{Fake, Faker};
use pathfinder_common::state_update::{ContractUpdate, SystemContractUpdate};
use pathfinder_common::test_utils::fake_non_empty_with_rng;
use pathfinder_common::ContractAddress;
use pathfinder_common::{
state_update::ContractClassUpdate, BlockHash, BlockHeader, BlockNumber, StateCommitment,
StateUpdate, TransactionIndex,
};
use rand::Rng;
use starknet_gateway_types::reply::transaction as gw;
/// Create fake blocks and state updates with __limited consistency guarantees__:
/// - block headers:
/// - consecutive numbering starting from genesis (`0`) up to `n-1`
/// - parent hash wrt previous block, genesis' parent hash is `0`
/// - state commitment is a hash of storage and class commitments
/// - block bodies:
/// - transaction indices within a block
/// - transaction hashes in respective receipts
/// - at least 1 transaction with receipt per block
/// - state updates:
/// - block hashes
/// - old roots wrt previous state update, genesis' old root is `0`
/// - replaced classes for block N point to some deployed contracts from block N-1
/// - each storage diff has its respective nonce update
/// - storage entries contrain at least 1 element
///
pub fn with_n_blocks(n: usize) -> StorageInitializer {
let mut rng = rand::thread_rng();
with_n_blocks_and_rng(n, &mut rng)
}
/// Same as [`with_n_blocks`] except caller can specify the rng used
pub fn with_n_blocks_and_rng(n: usize, rng: &mut impl Rng) -> StorageInitializer {
let mut init = Vec::with_capacity(n);
for i in 0..n {
let mut header: BlockHeader = Faker.fake_with_rng(rng);
header.number =
BlockNumber::new_or_panic(i.try_into().expect("u64 is at least as wide as usize"));
header.state_commitment =
StateCommitment::calculate(header.storage_commitment, header.class_commitment);
// There must be at least 1 transaction per block
let transactions_and_receipts = fake_non_empty_with_rng::<Vec<_>, gw::Transaction>(rng)
.into_iter()
.enumerate()
.map(|(i, t)| {
let transaction_hash = t.hash();
(
t,
gw::Receipt {
transaction_hash,
transaction_index: TransactionIndex::new_or_panic(
i.try_into().expect("u64 is at least as wide as usize"),
),
..Faker.fake_with_rng(rng)
},
)
})
.collect::<Vec<_>>();
header.transaction_count = transactions_and_receipts.len();
header.event_count = transactions_and_receipts
.iter()
.map(|(_, r)| r.events.len())
.sum();
let block_hash = header.hash;
let state_commitment = header.state_commitment;
init.push((
header,
transactions_and_receipts,
StateUpdate {
block_hash,
state_commitment,
// Will be fixed in the next loop
parent_state_commitment: StateCommitment::ZERO,
declared_cairo_classes: Faker.fake_with_rng::<HashSet<_>, _>(rng),
declared_sierra_classes: Faker.fake_with_rng::<HashMap<_, _>, _>(rng),
system_contract_updates: HashMap::from([(
ContractAddress::ONE,
SystemContractUpdate {
storage: fake_non_empty_with_rng(rng),
},
)]),
contract_updates: {
let mut x = Faker.fake_with_rng::<HashMap<_, ContractUpdate>, _>(rng);
x.iter_mut().for_each(|(_, u)| {
// Initially generate deploys only
u.class = u
.class
.as_ref()
.map(|x| ContractClassUpdate::Deploy(x.class_hash()));
// Disallow empty storage entries
if u.storage.is_empty() {
u.storage = fake_non_empty_with_rng(rng);
}
});
x
},
},
));
}
//
// "Fix" block headers and state updates
//
if !init.is_empty() {
let (header, _, state_update) = init.get_mut(0).unwrap();
header.parent_hash = BlockHash::ZERO;
header.state_commitment =
StateCommitment::calculate(header.storage_commitment, header.class_commitment);
state_update.block_hash = header.hash;
state_update.parent_state_commitment = StateCommitment::ZERO;
for i in 1..n {
let (parent_hash, parent_state_commitment, deployed_in_parent) = init
.get(i - 1)
.map(|(h, _, state_update)| {
(
h.hash,
h.state_commitment,
state_update
.contract_updates
.iter()
.filter_map(|(&address, update)| match update.class {
Some(ContractClassUpdate::Deploy(class_hash)) => {
Some((address, class_hash))
}
Some(_) | None => None,
})
.collect::<Vec<_>>(),
)
})
.unwrap();
let (header, _, state_update) = init.get_mut(i).unwrap();
header.parent_hash = parent_hash;
header.state_commitment =
StateCommitment::calculate(header.storage_commitment, header.class_commitment);
state_update.block_hash = header.hash;
//
// Fix state updates
//
state_update.parent_state_commitment = parent_state_commitment;
// Disallow empty storage entries
state_update.contract_updates.iter_mut().for_each(|(_, u)| {
if u.storage.is_empty() {
u.storage
.insert(Faker.fake_with_rng(rng), Faker.fake_with_rng(rng));
}
});
let num_deployed_in_parent = deployed_in_parent.len();
if num_deployed_in_parent > 0 {
// Add some replaced classes
let num_replaced = rng.gen_range(1..=num_deployed_in_parent);
use rand::seq::SliceRandom;
deployed_in_parent
.choose_multiple(rng, num_replaced)
.for_each(|(address, _)| {
state_update
.contract_updates
.entry(*address)
// It's ulikely rng has generated an update to the previously deployed class but it is still possible
.or_default()
.class =
Some(ContractClassUpdate::Replace(Faker.fake_with_rng(rng)))
})
}
}
}
init
}
}
|
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(unused_mut)]
use std::path;
// use util::html;
// use util::format;
use util_rust::group::Grouper;
use std::fmt::Write;
use std::collections::BTreeMap;
use connectedtext::*;
use simple::*;
//#[macro_use]
//extern crate util;
const FILE_FULL_EXPORT: &str = r"E:\ConnectedText Restructure 2020-10-17\Home Export One File\Wiki Export.TXT";
const FILE_IMPORT_TOOLS: &str = r"Tools.txt";
const FILE_IMPORT_HOME: &str = r"Home.txt";
const PATH_HOME_ARCHIVE_PROJECT_SOURCE: &str = r"E:\ConnectedText Restructure\Home Archive Project";
const PATH_HOME_ARCHIVE_PROJECT_DEST: &str = r"E:\ConnectedText Restructure\Home Archive Project Dest";
const PATH_TOOLS_PROJECT_SOURCE: &str = r"E:\ConnectedText Restructure\Tools Project";
const PATH_CHROME_BOOKMARKS: &str = r"E:\Temp\bookmarks_1_29_20.html";
fn main() {
println!("\nConnectedText start\n");
log::clear();
// gen::gen_page_from_chrome_bookmarks(path::Path::new(PATH_CHROME_BOOKMARKS));
// audible::main();
// try_load_topics();
// try_load_links();
// dbg!(count_topics_in_tools());
// catalog_attributes();
// catalog_categories();
// import_topics().report_added_dates();
import_topics().report_derived_added_dates();
//bg!(&util::log::get_sorted());
//import::test_delimited_entries();
println!("\nConnectedText done\n");
}
fn run_import() {
let path_file_full_export = path::Path::new(FILE_FULL_EXPORT);
// let path_source = path::Path::new(PATH_HOME_PROJECT_SOURCE);
// let path_dest = path::Path::new(PATH_HOME_PROJECT_DEST);
let path_source = path::Path::new(PATH_HOME_ARCHIVE_PROJECT_SOURCE);
let path_dest = path::Path::new(PATH_HOME_ARCHIVE_PROJECT_DEST);
// let path_source = path::Path::new(PATH_TOOLS_PROJECT_SOURCE);
// let path_dest = path::Path::new(PATH_TOOLS_PROJECT_DEST);
// import::fix_file_names(path_file_full_export, path_source, path_dest).ok();
// dbg!(&import::get_image_file_names(path::Path::new(PATH_HOME_PROJECT_DEST)));
import::copy_image_files(path_source, path_dest).ok();
// dbg!(&import::get_all_topic_names(path_file_full_export));
// dbg!(&import::reconcile_files_and_topics(path_file_full_export, path_source));
// import::fix_file_names(path::Path::new(PATH_TOOLS_PROJECT_SOURCE), path::Path::new(PATH_TOOLS_PROJECT_DEST)).ok();
// dbg!(&import::get_image_file_names(path::Path::new(PATH_TOOLS_PROJECT_DEST)));
// import::copy_image_files(path::Path::new(PATH_TOOLS_PROJECT_SOURCE), path::Path::new(PATH_TOOLS_PROJECT_DEST)).ok();
}
fn count_topics_in_tools() -> usize {
import::import_topics(FILE_IMPORT_TOOLS, "Tools").topics.len()
}
fn import_topics() -> crate::model::Wiki {
let mut wiki = import::import_topics(FILE_IMPORT_TOOLS, "Tools");
wiki.append(import::import_topics(FILE_IMPORT_HOME, "Home"));
import::add_links(&mut wiki);
wiki
}
fn try_load_topics() {
let topics = import_topics();
dbg!(&topics);
}
fn try_load_links() {
let mut wiki = import_topics();
import::add_links(&mut wiki);
}
fn catalog_attributes() {
let wiki = import_topics();
let mut attributes: BTreeMap<String, AttributeForCatalog> = BTreeMap::new();
for topic in wiki.topics.values() {
// for topic in topics.iter().filter(|x| x.category.eq(&Some(CATEGORY_BOOKS.to_string()))) {
for (attr_name, attr_values) in topic.attributes.iter() {
let mut attribute = attributes.entry(attr_name.to_string()).or_insert_with(|| { AttributeForCatalog::new(attr_name) } );
attribute.count += 1;
attribute.max_values = std::cmp::max(attribute.max_values, attr_values.len());
for one_value in attr_values.iter() {
attribute.register_value(one_value);
}
}
}
dbg!(&attributes);
}
#[derive(Debug)]
struct AttributeForCatalog {
pub name: String,
pub count: usize,
pub max_values: usize,
pub values: BTreeMap<String, usize>,
}
impl AttributeForCatalog {
pub fn new(name: &str) -> Self {
Self {
name: name.to_string(),
count: 0,
max_values: 0,
values: BTreeMap::new(),
}
}
pub fn register_value(&mut self, value: &str) {
let mut entry = self.values.entry(value.to_string()).or_insert_with(|| { 0 } );
*entry += 1;
}
}
fn catalog_categories() {
let wiki = import_topics();
let mut g = Grouper::new("Categories");
for topic in wiki.topics.values() {
if let Some(category) = &topic.category {
g.record_entry(category);
}
}
g.list_by_key();
}
|
/// LineBytes
/// You want to read a file line by line.
/// That file isn't UTF8, so you cannot read into a string or use `.lines()`.
// This little library solves that problem.
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
use crate::Client;
use shiplift::{builder::NetworkCreateOptionsBuilder, NetworkCreateOptions};
/// Abstraction of a Docker network
pub struct Network {
id: String,
client: Client,
}
impl Network {
/// Return a Future which resolves to a new Network.
pub async fn new(name: impl AsRef<str>) -> Result<Self, shiplift::Error> {
NetworkBuilder::new(name).build().await
}
/// Create a network using advanced configuration
pub fn builder(name: impl AsRef<str>) -> NetworkBuilder {
NetworkBuilder::new(name)
}
/// The unique id of the Docker network
pub fn id(&self) -> &str {
&self.id
}
/// Remove the Docker network
pub async fn delete(self) -> Result<(), shiplift::Error> {
self.client.networks().get(&self.id).delete().await
}
}
pub struct NetworkBuilder {
client: Client,
options: NetworkCreateOptionsBuilder,
}
impl NetworkBuilder {
fn new(name: impl AsRef<str>) -> Self {
NetworkBuilder {
client: Client::default(),
options: NetworkCreateOptions::builder(name.as_ref()),
}
}
pub async fn build(self) -> Result<Network, shiplift::Error> {
let create_info = self.client.networks().create(&self.options.build()).await?;
Ok(Network {
id: create_info.id,
client: self.client,
})
}
}
|
use nom::types::CompleteByteSlice as Input;
use value::Unit;
named!(pub unit<Input, Unit>,
alt_complete!(
// Distance units, <length> type
value!(Unit::Em, tag!("em")) |
value!(Unit::Ex, tag!("ex")) |
value!(Unit::Ch, tag!("ch")) |
value!(Unit::Rem, tag!("rem")) |
value!(Unit::Vw, tag!("vw")) |
value!(Unit::Vh, tag!("vh")) |
value!(Unit::Vmin, tag!("vmin")) |
value!(Unit::Vmax, tag!("vmax")) |
value!(Unit::Cm, tag!("cm")) |
value!(Unit::Mm, tag!("mm")) |
value!(Unit::Q, tag!("q")) |
value!(Unit::In, tag!("in")) |
value!(Unit::Pt, tag!("pt")) |
value!(Unit::Pc, tag!("pc")) |
value!(Unit::Px, tag!("px")) |
// <angle> type
value!(Unit::Deg, tag!("deg")) |
value!(Unit::Grad, tag!("grad")) |
value!(Unit::Rad, tag!("rad")) |
value!(Unit::Turn, tag!("turn")) |
// <time> type
value!(Unit::S, tag!("s")) |
value!(Unit::Ms, tag!("ms")) |
// <frequency> type
value!(Unit::Hz, tag!("Hz")) |
value!(Unit::Khz, tag!("kHz")) |
// <resolution>
value!(Unit::Dpi, tag!("dpi")) |
value!(Unit::Dpcm, tag!("dpcm")) |
value!(Unit::Dppx, tag!("dppx")) |
// Special units
value!(Unit::Percent, tag!("%")) |
value!(Unit::Fr, tag!("fr")) |
value!(Unit::None)));
|
//use serde::de::{self, Visitor};
use serde::{Deserialize, Serialize};
//use std::fmt;
//use std::marker::PhantomData;
//use std::str::FromStr;
use std::vec::Vec;
//use void::Void;
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Pair {
pub success: bool,
pub data: Vec<String>,
}
#[derive(Debug, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Trade {
pub success: bool,
#[serde(with = "string_or_struct")]
pub data: Vec<TradeInformation>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct TradeInformation {
pub market: String,
pub price: f64,
pub size: f64,
pub side: String,
pub time: i64,
pub order_id: String,
pub fee_cost: f64,
pub market_address: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Volume {
pub success: bool,
pub data: Vec<VolumeInformation>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct VolumeInformation {
pub volume_usd: f64,
pub volume: f64,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct OrderBook {
pub success: bool,
pub data: OrderBookInformation,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct OrderBookInformation {
pub market: String,
pub bids: Vec<PriceSize>,
pub asks: Vec<PriceSize>,
pub market_address: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct PriceSize {
pub price: f64,
pub size: f64,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct NullData {
pub success: String,
pub data: String,
}
pub(crate) mod string_or_struct {
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where
T: fmt::Display,
S: Serializer,
{
serializer.collect_str(value)
}
pub fn deserialize<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
where
D: Deserializer<'de>,
T: Deserialize<'de> + Serialize,
{
#[derive(Deserialize, Serialize)]
#[serde(untagged)]
enum StringOrStruct<T> {
String(Option<String>),
Struct(Vec<T>),
}
match StringOrStruct::deserialize(deserializer)? {
StringOrStruct::String(_) => Ok(Vec::new()),
StringOrStruct::Struct(i) => Ok(i),
}
}
}
|
use crate::rule::{engine::composition::GraphId, MatchGraph};
use crate::tokenizer::Tokenizer;
use crate::utils::regex::SerializeRegex;
use enum_dispatch::enum_dispatch;
use serde::{Deserialize, Serialize};
#[enum_dispatch]
#[derive(Serialize, Deserialize)]
pub enum Filter {
NoDisambiguationEnglishPartialPosTagFilter,
}
#[enum_dispatch(Filter)]
pub trait Filterable {
fn keep(&self, graph: &MatchGraph, tokenizer: &Tokenizer) -> bool;
}
#[derive(Serialize, Deserialize)]
pub struct NoDisambiguationEnglishPartialPosTagFilter {
pub(crate) id: GraphId,
pub(crate) regexp: SerializeRegex,
pub(crate) postag_regexp: SerializeRegex,
#[allow(dead_code)]
pub(crate) negate_postag: bool,
}
impl Filterable for NoDisambiguationEnglishPartialPosTagFilter {
fn keep(&self, graph: &MatchGraph, tokenizer: &Tokenizer) -> bool {
graph.by_id(self.id).tokens(graph.tokens()).all(|token| {
if let Some(captures) = self.regexp.captures(&token.word.text.as_ref()) {
// get group 2 because `full_match` adds one group
let tags = tokenizer.tagger().get_tags(
&captures.at(2).unwrap(),
tokenizer.options().always_add_lower_tags,
tokenizer.options().use_compound_split_heuristic,
);
tags.iter()
.any(|x| self.postag_regexp.is_match(x.pos.as_ref()))
} else {
false
}
})
}
}
|
use crate::decode::Decode;
use crate::encode::{Encode, IsNull};
use crate::io::Buf;
use crate::postgres::types::raw::sequence::PgSequenceDecoder;
use crate::postgres::{PgData, PgRawBuffer, PgValue, Postgres};
use crate::types::Type;
use byteorder::BigEndian;
pub struct PgRecordEncoder<'a> {
buf: &'a mut PgRawBuffer,
beg: usize,
num: u32,
}
impl<'a> PgRecordEncoder<'a> {
pub fn new(buf: &'a mut PgRawBuffer) -> Self {
// reserve space for a field count
buf.extend_from_slice(&(0_u32).to_be_bytes());
Self {
beg: buf.len(),
buf,
num: 0,
}
}
pub fn finish(&mut self) {
// replaces zeros with actual length
self.buf[self.beg - 4..self.beg].copy_from_slice(&self.num.to_be_bytes());
}
pub fn encode<T>(&mut self, value: T) -> &mut Self
where
T: Type<Postgres> + Encode<Postgres>,
{
let info = T::type_info();
if let Some(oid) = info.id {
// write oid
self.buf.extend(&oid.0.to_be_bytes());
} else {
// write hole for this oid
self.buf.push_type_hole(&info.name);
}
// write zeros for length
self.buf.extend(&[0; 4]);
let start = self.buf.len();
if let IsNull::Yes = value.encode_nullable(self.buf) {
self.buf[start - 4..start].copy_from_slice(&(-1_i32).to_be_bytes());
} else {
let end = self.buf.len();
let size = end - start;
// replaces zeros with actual length
self.buf[start - 4..start].copy_from_slice(&(size as u32).to_be_bytes());
}
// keep track of count
self.num += 1;
self
}
}
pub struct PgRecordDecoder<'de>(PgSequenceDecoder<'de>);
impl<'de> PgRecordDecoder<'de> {
pub fn new(value: PgValue<'de>) -> crate::Result<Self> {
let mut data = value.try_get()?;
match data {
PgData::Text(_) => {}
PgData::Binary(ref mut buf) => {
let _expected_len = buf.get_u32::<BigEndian>()?;
}
}
Ok(Self(PgSequenceDecoder::new(data, None)))
}
#[inline]
pub fn decode<T>(&mut self) -> crate::Result<T>
where
T: for<'rec> Decode<'rec, Postgres>,
T: Type<Postgres>,
{
self.0
.decode()?
.ok_or_else(|| decode_err!("no field `{0}` on {0}-element record", self.0.len()))
}
}
#[test]
fn test_encode_field() {
use std::convert::TryInto;
let value = "Foo Bar";
let mut raw_encoded = PgRawBuffer::default();
<&str as Encode<Postgres>>::encode(&value, &mut raw_encoded);
let mut field_encoded = PgRawBuffer::default();
let mut encoder = PgRecordEncoder::new(&mut field_encoded);
encoder.encode(&value);
// check oid
let oid = <&str as Type<Postgres>>::type_info().id.unwrap().0;
let field_encoded_oid = u32::from_be_bytes(field_encoded[4..8].try_into().unwrap());
assert_eq!(oid, field_encoded_oid);
// check length
let field_encoded_length = u32::from_be_bytes(field_encoded[8..12].try_into().unwrap());
assert_eq!(raw_encoded.len(), field_encoded_length as usize);
// check data
assert_eq!(&**raw_encoded, &field_encoded[12..]);
}
#[test]
fn test_decode_field() {
let value = "Foo Bar".to_string();
let mut buf = PgRawBuffer::default();
let mut encoder = PgRecordEncoder::new(&mut buf);
encoder.encode(&value);
let buf = buf.as_slice();
let mut decoder = PgRecordDecoder::new(PgValue::from_bytes(buf)).unwrap();
let value_decoded: String = decoder.decode().unwrap();
assert_eq!(value_decoded, value);
}
|
use std::thread::{sleep, spawn};
use tracy_client::*;
#[global_allocator]
static GLOBAL: ProfiledAllocator<std::alloc::System> =
ProfiledAllocator::new(std::alloc::System, 100);
fn fib(i: u16) -> u64 {
let span = Span::new(&format!("fib({})", i), "fib", file!(), line!(), 100);
let result = match i {
0 => 0,
1 => 1,
_ => fib(i - 1) + fib(i - 2),
};
span.emit_value(result);
result
}
fn main() {
message("starting T1", 10);
let t1 = spawn(|| {
for _ in 0..100 {
let span = Span::new("zone values", "zone_values", file!(), line!(), 100);
span.emit_value(42);
sleep(std::time::Duration::from_secs(1));
span.emit_value(322);
sleep(std::time::Duration::from_secs(1));
span.emit_value(101);
sleep(std::time::Duration::from_secs(1));
span.emit_value(101 - 5);
finish_continuous_frame!("T1");
}
});
message("starting T2", 10);
let t2 = spawn(|| {
for _ in 0..100 {
let span = Span::new("zone text", "zone_text", file!(), line!(), 100);
span.emit_text("sleeping first time");
std::thread::sleep(std::time::Duration::from_secs(1));
let string = format!("sleeping second time");
span.emit_text(&string);
drop(string);
std::thread::sleep(std::time::Duration::from_secs(1));
let string = format!("sleeping third time");
span.emit_text(&string);
drop(string);
std::thread::sleep(std::time::Duration::from_secs(1));
finish_continuous_frame!("T2");
}
});
message("starting t3", 10);
let t3 = spawn(|| {
for _ in 0..100 {
std::thread::sleep(std::time::Duration::from_secs(1));
finish_continuous_frame!();
finish_continuous_frame!("T3")
}
});
message("starting t4", 10);
let t4 = spawn(|| {
static PLOT: Plot = create_plot!("random numbers");
let mut seed = 42u32;
for _ in 0..100 {
seed = (seed * 1103515245 + 12345) & 0x7fffffff;
PLOT.point(seed as f64);
std::thread::sleep(std::time::Duration::from_secs(1));
finish_continuous_frame!("T4")
}
});
message("starting t5", 10);
let t5 = spawn(|| {
for i in 0..100 {
{
let _f = start_noncontinuous_frame!("making vectors");
message(&format!("making vector of {} vectors", i), 20);
let mut vec = Vec::new();
for v in (0..i).map(|v| Vec::<u8>::with_capacity(v * 100)) {
vec.push(v)
}
}
std::thread::sleep(std::time::Duration::from_secs(1));
finish_continuous_frame!("T5");
}
});
message("starting t6", 10);
let t6 = spawn(|| {
fib(25);
});
let _ = t1.join();
message("T1 joined", 10);
let _ = t2.join();
message("T2 joined", 10);
let _ = t3.join();
message("T3 joined", 10);
let _ = t4.join();
message("T4 joined", 10);
let _ = t5.join();
message("T5 joined", 10);
let _ = t6.join();
message("T6 joined", 10);
}
|
pub mod wavelet;
pub trait Sequence {}
|
use demo_macro::print_things;
print_things!();
fn main() {
println!("Hello, world!");
}
|
#[doc = "Reader of register BMCMPR6"]
pub type R = crate::R<u32, super::BMCMPR6>;
#[doc = "Writer for register BMCMPR6"]
pub type W = crate::W<u32, super::BMCMPR6>;
#[doc = "Register BMCMPR6 `reset()`'s with value 0"]
impl crate::ResetValue for super::BMCMPR6 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `BMCMP`"]
pub type BMCMP_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `BMCMP`"]
pub struct BMCMP_W<'a> {
w: &'a mut W,
}
impl<'a> BMCMP_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - BMCMP"]
#[inline(always)]
pub fn bmcmp(&self) -> BMCMP_R {
BMCMP_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - BMCMP"]
#[inline(always)]
pub fn bmcmp(&mut self) -> BMCMP_W {
BMCMP_W { w: self }
}
}
|
#![feature(arbitrary_self_types)]
#![feature(crate_in_paths)]
#![feature(crate_visibility_modifier)]
#![feature(in_band_lifetimes)]
#![feature(existential_impl_trait)]
#![feature(underscore_lifetimes)]
#![feature(universal_impl_trait)]
mod drop_thunk;
mod test;
pub mod layer1;
use crate::drop_thunk::DropThunk;
/// Represents a "suspended" value. Suspended values may have
/// references into heap values that are owned by this `Suspend`. The
/// lifetime(s) of those references are hidden and called the
/// "existential" lifetimes.
///
/// - The type `L` is the "closed" form, a marker type in which those
/// existential lifetimes do not appear.
/// - The bound `'bound` is a bound on the overall lifetime of the
/// data that the existential lifetimes may refer to (which does not
/// otherwise appear in `Suspend`).
pub struct Suspend<'bound, L> {
/// Contains the closed over data. This `Box` *actually* stores
/// the "opened" form of the data in `L`, but we give it the
/// "closed" form of the type to hide the existential lifetime.
///
/// Always `Some` except when dtor has run.
closed_data: Option<Box<L>>,
/// A function that runs in the dtor. It is given the `Box<L>`
/// and is meant to open and free it.
free_suspended: fn(Box<L>),
/// This drop-thunk, when dropped, will cause all the hidden data
/// to be freed. The "hidden data" consists of boxes that were
/// used to build the closed-data.
drop_thunk: Box<DropThunk + 'bound>,
}
impl<'bound, L> Drop for Suspend<'bound, L> {
fn drop(&mut self) {
(self.free_suspended)(self.closed_data.take().unwrap());
}
}
impl<'bound, L> ::std::ops::Deref for Suspend<'bound, L> {
type Target = L;
fn deref(&self) -> &Self::Target {
self.closed_data.as_ref().unwrap()
}
}
|
#[doc = "Reader of register TIMCCR2"]
pub type R = crate::R<u32, super::TIMCCR2>;
#[doc = "Writer for register TIMCCR2"]
pub type W = crate::W<u32, super::TIMCCR2>;
#[doc = "Register TIMCCR2 `reset()`'s with value 0"]
impl crate::ResetValue for super::TIMCCR2 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `TRGHLF`"]
pub type TRGHLF_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TRGHLF`"]
pub struct TRGHLF_W<'a> {
w: &'a mut W,
}
impl<'a> TRGHLF_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `GTCMP3`"]
pub type GTCMP3_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `GTCMP3`"]
pub struct GTCMP3_W<'a> {
w: &'a mut W,
}
impl<'a> GTCMP3_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `GTCMP1`"]
pub type GTCMP1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `GTCMP1`"]
pub struct GTCMP1_W<'a> {
w: &'a mut W,
}
impl<'a> GTCMP1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `FEROM`"]
pub type FEROM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `FEROM`"]
pub struct FEROM_W<'a> {
w: &'a mut W,
}
impl<'a> FEROM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 14)) | (((value as u32) & 0x03) << 14);
self.w
}
}
#[doc = "Reader of field `BMROM`"]
pub type BMROM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `BMROM`"]
pub struct BMROM_W<'a> {
w: &'a mut W,
}
impl<'a> BMROM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 12)) | (((value as u32) & 0x03) << 12);
self.w
}
}
#[doc = "Reader of field `ADROM`"]
pub type ADROM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ADROM`"]
pub struct ADROM_W<'a> {
w: &'a mut W,
}
impl<'a> ADROM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10);
self.w
}
}
#[doc = "Reader of field `OUTROM`"]
pub type OUTROM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `OUTROM`"]
pub struct OUTROM_W<'a> {
w: &'a mut W,
}
impl<'a> OUTROM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8);
self.w
}
}
#[doc = "Reader of field `ROM`"]
pub type ROM_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `ROM`"]
pub struct ROM_W<'a> {
w: &'a mut W,
}
impl<'a> ROM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6);
self.w
}
}
#[doc = "Reader of field `UDM`"]
pub type UDM_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `UDM`"]
pub struct UDM_W<'a> {
w: &'a mut W,
}
impl<'a> UDM_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `DCDR`"]
pub type DCDR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCDR`"]
pub struct DCDR_W<'a> {
w: &'a mut W,
}
impl<'a> DCDR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `DCDS`"]
pub type DCDS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCDS`"]
pub struct DCDS_W<'a> {
w: &'a mut W,
}
impl<'a> DCDS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `DCDE`"]
pub type DCDE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DCDE`"]
pub struct DCDE_W<'a> {
w: &'a mut W,
}
impl<'a> DCDE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 20 - Triggered-half mode"]
#[inline(always)]
pub fn trghlf(&self) -> TRGHLF_R {
TRGHLF_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 17 - Greater than Compare 3 PWM mode"]
#[inline(always)]
pub fn gtcmp3(&self) -> GTCMP3_R {
GTCMP3_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 16 - Greater than Compare 1 PWM mode"]
#[inline(always)]
pub fn gtcmp1(&self) -> GTCMP1_R {
GTCMP1_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bits 14:15 - Fault and Event Roll-Over Mode"]
#[inline(always)]
pub fn ferom(&self) -> FEROM_R {
FEROM_R::new(((self.bits >> 14) & 0x03) as u8)
}
#[doc = "Bits 12:13 - Burst Mode Roll-Over Mode"]
#[inline(always)]
pub fn bmrom(&self) -> BMROM_R {
BMROM_R::new(((self.bits >> 12) & 0x03) as u8)
}
#[doc = "Bits 10:11 - ADC Roll-Over Mode"]
#[inline(always)]
pub fn adrom(&self) -> ADROM_R {
ADROM_R::new(((self.bits >> 10) & 0x03) as u8)
}
#[doc = "Bits 8:9 - Output Roll-Over Mode"]
#[inline(always)]
pub fn outrom(&self) -> OUTROM_R {
OUTROM_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bits 6:7 - Roll-Over Mode"]
#[inline(always)]
pub fn rom(&self) -> ROM_R {
ROM_R::new(((self.bits >> 6) & 0x03) as u8)
}
#[doc = "Bit 4 - Up-Down Mode"]
#[inline(always)]
pub fn udm(&self) -> UDM_R {
UDM_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 2 - Dual Channel DAC Reset trigger"]
#[inline(always)]
pub fn dcdr(&self) -> DCDR_R {
DCDR_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 1 - Dual Channel DAC Step trigger"]
#[inline(always)]
pub fn dcds(&self) -> DCDS_R {
DCDS_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 0 - Dual Channel DAC trigger enable"]
#[inline(always)]
pub fn dcde(&self) -> DCDE_R {
DCDE_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 20 - Triggered-half mode"]
#[inline(always)]
pub fn trghlf(&mut self) -> TRGHLF_W {
TRGHLF_W { w: self }
}
#[doc = "Bit 17 - Greater than Compare 3 PWM mode"]
#[inline(always)]
pub fn gtcmp3(&mut self) -> GTCMP3_W {
GTCMP3_W { w: self }
}
#[doc = "Bit 16 - Greater than Compare 1 PWM mode"]
#[inline(always)]
pub fn gtcmp1(&mut self) -> GTCMP1_W {
GTCMP1_W { w: self }
}
#[doc = "Bits 14:15 - Fault and Event Roll-Over Mode"]
#[inline(always)]
pub fn ferom(&mut self) -> FEROM_W {
FEROM_W { w: self }
}
#[doc = "Bits 12:13 - Burst Mode Roll-Over Mode"]
#[inline(always)]
pub fn bmrom(&mut self) -> BMROM_W {
BMROM_W { w: self }
}
#[doc = "Bits 10:11 - ADC Roll-Over Mode"]
#[inline(always)]
pub fn adrom(&mut self) -> ADROM_W {
ADROM_W { w: self }
}
#[doc = "Bits 8:9 - Output Roll-Over Mode"]
#[inline(always)]
pub fn outrom(&mut self) -> OUTROM_W {
OUTROM_W { w: self }
}
#[doc = "Bits 6:7 - Roll-Over Mode"]
#[inline(always)]
pub fn rom(&mut self) -> ROM_W {
ROM_W { w: self }
}
#[doc = "Bit 4 - Up-Down Mode"]
#[inline(always)]
pub fn udm(&mut self) -> UDM_W {
UDM_W { w: self }
}
#[doc = "Bit 2 - Dual Channel DAC Reset trigger"]
#[inline(always)]
pub fn dcdr(&mut self) -> DCDR_W {
DCDR_W { w: self }
}
#[doc = "Bit 1 - Dual Channel DAC Step trigger"]
#[inline(always)]
pub fn dcds(&mut self) -> DCDS_W {
DCDS_W { w: self }
}
#[doc = "Bit 0 - Dual Channel DAC trigger enable"]
#[inline(always)]
pub fn dcde(&mut self) -> DCDE_W {
DCDE_W { w: self }
}
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_catalog::catalog_kind::CATALOG_DEFAULT;
use common_catalog::table::Table;
use common_catalog::table_context::TableContext;
use common_exception::Result;
use common_expression::types::StringType;
use common_expression::utils::FromData;
use common_expression::DataBlock;
use common_expression::TableDataType;
use common_expression::TableField;
use common_expression::TableSchemaRefExt;
use common_meta_app::schema::TableIdent;
use common_meta_app::schema::TableInfo;
use common_meta_app::schema::TableMeta;
use crate::table::AsyncOneBlockSystemTable;
use crate::table::AsyncSystemTable;
pub struct EnginesTable {
table_info: TableInfo,
}
#[async_trait::async_trait]
impl AsyncSystemTable for EnginesTable {
const NAME: &'static str = "system.engines";
fn get_table_info(&self) -> &TableInfo {
&self.table_info
}
async fn get_full_data(&self, ctx: Arc<dyn TableContext>) -> Result<DataBlock> {
// TODO passing catalog name
let table_engine_descriptors = ctx.get_catalog(CATALOG_DEFAULT)?.get_table_engines();
let mut engine_name = Vec::with_capacity(table_engine_descriptors.len());
let mut engine_comment = Vec::with_capacity(table_engine_descriptors.len());
for descriptor in &table_engine_descriptors {
engine_name.push(descriptor.engine_name.as_bytes().to_vec());
engine_comment.push(descriptor.comment.as_bytes().to_vec());
}
Ok(DataBlock::new_from_columns(vec![
StringType::from_data(engine_name),
StringType::from_data(engine_comment),
]))
}
}
impl EnginesTable {
pub fn create(table_id: u64) -> Arc<dyn Table> {
let schema = TableSchemaRefExt::create(vec![
TableField::new("Engine", TableDataType::String),
TableField::new("Comment", TableDataType::String),
]);
let table_info = TableInfo {
desc: "'system'.'engines'".to_string(),
name: "engines".to_string(),
ident: TableIdent::new(table_id, 0),
meta: TableMeta {
schema,
engine: "SystemEngines".to_string(),
..Default::default()
},
..Default::default()
};
AsyncOneBlockSystemTable::create(EnginesTable { table_info })
}
}
|
use alloc::string::String;
use alloc::vec::Vec;
use core::fmt::{Debug, Formatter};
use core::marker::PhantomData;
use core::ops::{Deref, DerefMut};
#[repr(C)]
pub struct UserPtr<T, P: Policy> {
ptr: *mut T,
mark: PhantomData<P>,
}
pub trait Policy {}
pub trait Read: Policy {}
pub trait Write: Policy {}
pub enum In {}
pub enum Out {}
pub enum InOut {}
impl Policy for In {}
impl Policy for Out {}
impl Policy for InOut {}
impl Read for In {}
impl Write for Out {}
impl Read for InOut {}
impl Write for InOut {}
pub type UserInPtr<T> = UserPtr<T, In>;
pub type UserOutPtr<T> = UserPtr<T, Out>;
pub type UserInOutPtr<T> = UserPtr<T, InOut>;
type Result<T> = core::result::Result<T, Error>;
/// The error type which is returned from user pointer.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Error {
InvalidUtf8,
InvalidPointer,
BufferTooSmall,
InvalidLength,
InvalidVectorAddress,
}
impl<T, P: Policy> Debug for UserPtr<T, P> {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
write!(f, "{:?}", self.ptr)
}
}
// FIXME: this is a workaround for `clear_child_tid`.
unsafe impl<T, P: Policy> Send for UserPtr<T, P> {}
unsafe impl<T, P: Policy> Sync for UserPtr<T, P> {}
impl<T, P: Policy> From<usize> for UserPtr<T, P> {
fn from(x: usize) -> Self {
UserPtr {
ptr: x as _,
mark: PhantomData,
}
}
}
impl<T, P: Policy> UserPtr<T, P> {
pub fn from_addr_size(addr: usize, size: usize) -> Result<Self> {
if size < core::mem::size_of::<T>() {
return Err(Error::BufferTooSmall);
}
Ok(Self::from(addr))
}
pub fn is_null(&self) -> bool {
self.ptr.is_null()
}
pub fn add(&self, count: usize) -> Self {
UserPtr {
ptr: unsafe { self.ptr.add(count) },
mark: PhantomData,
}
}
pub fn as_ptr(&self) -> *mut T {
self.ptr
}
pub fn check(&self) -> Result<()> {
if self.ptr.is_null() {
return Err(Error::InvalidPointer);
}
if (self.ptr as usize) % core::mem::align_of::<T>() != 0 {
return Err(Error::InvalidPointer);
}
Ok(())
}
}
impl<T, P: Read> UserPtr<T, P> {
pub fn as_ref(&self) -> Result<&'static T> {
Ok(unsafe { &*self.ptr })
}
pub fn read(&self) -> Result<T> {
// TODO: check ptr and return err
self.check()?;
Ok(unsafe { self.ptr.read() })
}
pub fn read_if_not_null(&self) -> Result<Option<T>> {
if self.ptr.is_null() {
return Ok(None);
}
let value = self.read()?;
Ok(Some(value))
}
pub fn read_array(&self, len: usize) -> Result<Vec<T>> {
if len == 0 {
return Ok(Vec::default());
}
self.check()?;
let mut ret = Vec::<T>::with_capacity(len);
unsafe {
ret.set_len(len);
ret.as_mut_ptr().copy_from_nonoverlapping(self.ptr, len);
}
Ok(ret)
}
}
impl<P: Read> UserPtr<u8, P> {
pub fn read_string(&self, len: usize) -> Result<String> {
self.check()?;
let src = unsafe { core::slice::from_raw_parts(self.ptr, len) };
let s = core::str::from_utf8(src).map_err(|_| Error::InvalidUtf8)?;
Ok(String::from(s))
}
pub fn read_cstring(&self) -> Result<String> {
self.check()?;
let len = unsafe { (0usize..).find(|&i| *self.ptr.add(i) == 0).unwrap() };
self.read_string(len)
}
}
impl<P: Read> UserPtr<UserPtr<u8, P>, P> {
pub fn read_cstring_array(&self) -> Result<Vec<String>> {
self.check()?;
let len = unsafe {
(0usize..)
.find(|&i| self.ptr.add(i).read().is_null())
.unwrap()
};
self.read_array(len)?
.into_iter()
.map(|ptr| ptr.read_cstring())
.collect()
}
}
impl<T, P: Write> UserPtr<T, P> {
pub fn write(&mut self, value: T) -> Result<()> {
self.check()?;
unsafe {
self.ptr.write(value);
}
Ok(())
}
pub fn write_if_not_null(&mut self, value: T) -> Result<()> {
if self.ptr.is_null() {
return Ok(());
}
self.write(value)
}
pub fn write_array(&mut self, values: &[T]) -> Result<()> {
if values.is_empty() {
return Ok(());
}
self.check()?;
unsafe {
self.ptr
.copy_from_nonoverlapping(values.as_ptr(), values.len());
}
Ok(())
}
}
impl<P: Write> UserPtr<u8, P> {
pub fn write_cstring(&mut self, s: &str) -> Result<()> {
let bytes = s.as_bytes();
self.write_array(bytes)?;
unsafe {
self.ptr.add(bytes.len()).write(0);
}
Ok(())
}
}
#[derive(Debug)]
#[repr(C)]
pub struct IoVec<P: Policy> {
/// Starting address
ptr: UserPtr<u8, P>,
/// Number of bytes to transfer
len: usize,
}
pub type IoVecIn = IoVec<In>;
pub type IoVecOut = IoVec<Out>;
/// A valid IoVecs request from user
#[derive(Debug)]
pub struct IoVecs<P: Policy> {
vec: Vec<IoVec<P>>,
}
impl<P: Policy> UserInPtr<IoVec<P>> {
pub fn read_iovecs(&self, count: usize) -> Result<IoVecs<P>> {
if self.ptr.is_null() {
return Err(Error::InvalidPointer);
}
let vec = self.read_array(count)?;
// The sum of length should not overflow.
let mut total_count = 0usize;
for io_vec in vec.iter() {
let (result, overflow) = total_count.overflowing_add(io_vec.len());
if overflow {
return Err(Error::InvalidLength);
}
total_count = result;
}
Ok(IoVecs { vec })
}
}
impl<P: Policy> IoVecs<P> {
pub fn total_len(&self) -> usize {
self.vec.iter().map(|vec| vec.len).sum()
}
}
impl<P: Read> IoVecs<P> {
pub fn read_to_vec(&self) -> Result<Vec<u8>> {
let mut buf = Vec::new();
for vec in self.vec.iter() {
buf.extend(vec.ptr.read_array(vec.len)?);
}
Ok(buf)
}
}
impl<P: Write> IoVecs<P> {
pub fn write_from_buf(&mut self, mut buf: &[u8]) -> Result<usize> {
let buf_len = buf.len();
for vec in self.vec.iter_mut() {
let copy_len = vec.len.min(buf.len());
if copy_len == 0 {
continue;
}
vec.ptr.write_array(&buf[..copy_len])?;
buf = &buf[copy_len..];
}
Ok(buf_len - buf.len())
}
}
impl<P: Policy> Deref for IoVecs<P> {
type Target = [IoVec<P>];
fn deref(&self) -> &Self::Target {
self.vec.as_slice()
}
}
impl<P: Write> DerefMut for IoVecs<P> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.vec.as_mut_slice()
}
}
impl<P: Policy> IoVec<P> {
pub fn is_null(&self) -> bool {
self.ptr.is_null()
}
pub fn len(&self) -> usize {
self.len
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
pub fn check(&self) -> Result<()> {
self.ptr.check()
}
pub fn as_slice(&self) -> Result<&[u8]> {
if self.ptr.is_null() {
return Err(Error::InvalidVectorAddress);
}
let slice = unsafe { core::slice::from_raw_parts(self.ptr.as_ptr(), self.len) };
Ok(slice)
}
}
impl<P: Write> IoVec<P> {
pub fn as_mut_slice(&mut self) -> Result<&mut [u8]> {
if self.ptr.is_null() {
return Err(Error::InvalidVectorAddress);
}
let slice = unsafe { core::slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) };
Ok(slice)
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::helper::{get_unix_ts, is_global};
use crate::message_processor::{MessageFuture, MessageProcessor};
use crate::net::{build_network_service, SNetworkService};
use crate::{NetworkMessage, PeerEvent, PeerMessage};
use actix::prelude::*;
use anyhow::{bail, Result};
use bitflags::_core::sync::atomic::Ordering;
use bus::{Broadcast, Bus, BusActor};
use config::NodeConfig;
use crypto::{hash::CryptoHash, HashValue};
use futures::lock::Mutex;
use futures::{channel::mpsc, sink::SinkExt, stream::StreamExt};
use futures_timer::Delay;
use libp2p::multiaddr::Protocol;
use libp2p::PeerId;
use lru::LruCache;
use network_api::{messages::RawRpcRequestMessage, NetworkService};
use network_p2p::Multiaddr;
use async_trait::async_trait;
use scs::SCSCodec;
use starcoin_sync_api::sync_messages::PeerNewBlock;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::prelude::*;
use std::io::Write;
use std::iter::FromIterator;
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::time::Duration;
use tokio::runtime::Handle;
use tx_relay::*;
use types::peer_info::PeerInfo;
use types::system_events::SystemEvents;
use types::transaction::SignedUserTransaction;
const LRU_CACHE_SIZE: usize = 1024;
const PEERS_FILE_NAME: &str = "peers.json";
#[derive(Clone)]
pub struct NetworkAsyncService {
addr: Addr<NetworkActor>,
raw_message_processor: MessageProcessor<u128, Vec<u8>>,
tx: mpsc::UnboundedSender<NetworkMessage>,
peer_id: PeerId,
handle: Handle,
inner: Arc<Inner>,
}
struct Inner {
network_service: SNetworkService,
bus: Addr<BusActor>,
raw_message_processor: MessageProcessor<u128, Vec<u8>>,
handle: Handle,
peers: Arc<Mutex<HashMap<PeerId, PeerInfoNet>>>,
connected_tx: mpsc::Sender<PeerEvent>,
need_send_event: AtomicBool,
node_config: Arc<NodeConfig>,
peer_id: PeerId,
}
#[derive(Debug)]
struct PeerInfoNet {
peer_info: PeerInfo,
known_transactions: LruCache<HashValue, ()>,
/// Holds a set of blocks known to this peer.
known_blocks: LruCache<HashValue, ()>,
}
impl PeerInfoNet {
fn new(peer_info: PeerInfo) -> Self {
Self {
peer_info,
known_blocks: LruCache::new(LRU_CACHE_SIZE),
known_transactions: LruCache::new(LRU_CACHE_SIZE),
}
}
}
#[async_trait]
impl NetworkService for NetworkAsyncService {
async fn send_peer_message(&self, peer_id: PeerId, msg: PeerMessage) -> Result<()> {
let data = msg.encode()?;
let network_message = NetworkMessage { peer_id, data };
self.tx.unbounded_send(network_message)?;
Ok(())
}
async fn broadcast_system_event(&self, event: SystemEvents) -> Result<()> {
self.addr.send(event).await?;
Ok(())
}
fn identify(&self) -> &PeerId {
&self.peer_id
}
async fn send_request_bytes(
&self,
peer_id: PeerId,
message: Vec<u8>,
time_out: Duration,
) -> Result<Vec<u8>> {
let request_id = get_unix_ts();
let peer_msg = PeerMessage::RawRPCRequest(request_id, message);
let data = peer_msg.encode()?;
let network_message = NetworkMessage {
peer_id: peer_id.clone(),
data,
};
self.tx.unbounded_send(network_message)?;
let (tx, rx) = futures::channel::mpsc::channel(1);
let message_future = MessageFuture::new(rx);
self.raw_message_processor.add_future(request_id, tx).await;
info!("send request to {} with id {}", peer_id, request_id);
let processor = self.raw_message_processor.clone();
let peer_id_clone = peer_id.clone();
let task = async move {
Delay::new(time_out).await;
processor.remove_future(request_id).await;
warn!(
"send request to {} with id {} timeout",
peer_id_clone, request_id
);
};
self.handle.spawn(task);
let response = message_future.await;
info!("receive response from {} with id {}", peer_id, request_id);
response
}
async fn peer_set(&self) -> Result<Vec<PeerInfo>> {
let mut result = vec![];
for (peer_id, peer) in self.inner.peers.lock().await.iter() {
if self.peer_id.eq(peer_id) {
continue;
}
info!("peer_id is {},peer_info is {:?}", peer_id, peer);
result.push(peer.peer_info.clone());
}
info!("result is {:?}", result);
Ok(result)
}
/// get all peers and sort by difficulty decreasely.
async fn best_peer_set(&self) -> Result<Vec<PeerInfo>> {
let mut peer_infos = self.peer_set().await?;
peer_infos.sort_by_key(|p| p.total_difficult);
peer_infos.reverse();
Ok(peer_infos)
}
async fn get_peer(&self, peer_id: &PeerId) -> Result<Option<PeerInfo>> {
match self.inner.peers.lock().await.get(peer_id) {
Some(peer) => Ok(Some(peer.peer_info.clone())),
None => Ok(None),
}
}
async fn get_self_peer(&self) -> Result<PeerInfo> {
match self.inner.peers.lock().await.get(&self.peer_id) {
Some(peer) => Ok(peer.peer_info.clone()),
None => bail!("Can not find self peer info."),
}
}
async fn best_peer(&self) -> Result<Option<PeerInfo>> {
let self_peer_id = types::peer_info::PeerId::new(self.peer_id.clone());
let best_peer_set = self.best_peer_set().await?;
let best_peer = best_peer_set
.iter()
.filter(|peer| self_peer_id != peer.get_peer_id())
.next();
match best_peer {
Some(peer) => Ok(Some(peer.clone())),
None => Ok(None),
}
}
async fn get_peer_set_size(&self) -> Result<usize> {
let size = self.inner.peers.lock().await.len();
Ok(size)
}
}
impl NetworkAsyncService {
#[cfg(test)]
pub fn network_actor_addr(&self) -> Addr<NetworkActor> {
self.addr.clone()
}
}
pub struct NetworkActor {
network_service: SNetworkService,
bus: Addr<BusActor>,
peers: Arc<Mutex<HashMap<PeerId, PeerInfoNet>>>,
peer_id: PeerId,
}
impl NetworkActor {
pub fn launch(
node_config: Arc<NodeConfig>,
bus: Addr<BusActor>,
handle: Handle,
genesis_hash: HashValue,
self_info: PeerInfo,
) -> NetworkAsyncService {
let has_seed = !node_config.network.seeds.is_empty();
let path = node_config.base.data_dir();
let file = Path::new(PEERS_FILE_NAME);
let path = path.join(file);
let peers_from_json = match File::open(path) {
Ok(mut f) => {
let mut contents = String::new();
match f.read_to_string(&mut contents) {
Ok(_n) => Some(serde_json::from_str::<Vec<Multiaddr>>(&contents)),
Err(_e) => None,
}
}
Err(_e) => {
debug!("no peers file ");
None
}
};
let mut network_config = node_config.network.clone();
let mut seeds = HashSet::new();
for seed in network_config.seeds {
seeds.insert(seed);
}
if let Some(Ok(addrs)) = peers_from_json {
info!("load peers from file {:?}", addrs);
for addr in addrs {
seeds.insert(addr);
}
}
network_config.seeds = Vec::from_iter(seeds.into_iter());
let (service, tx, rx, event_rx, tx_command) = build_network_service(
&network_config,
handle.clone(),
genesis_hash,
self_info.clone(),
);
info!(
"network started at {} with seed {},network address is {}",
&node_config.network.listen,
&node_config
.network
.seeds
.iter()
.fold(String::new(), |acc, arg| acc + arg.to_string().as_str()),
service.identify()
);
let raw_message_processor = MessageProcessor::new();
let raw_message_processor_clone = raw_message_processor.clone();
let peer_id = service.identify().clone();
let peer_id_clone = peer_id.clone();
let service_clone = service.clone();
let bus_clone = bus.clone();
let mut peers = HashMap::new();
peers.insert(
self_info.peer_id.clone().into(),
PeerInfoNet::new(self_info),
);
let peers = Arc::new(Mutex::new(peers));
let peers_clone = peers.clone();
let addr = NetworkActor::create(move |_ctx: &mut Context<NetworkActor>| NetworkActor {
network_service: service_clone,
bus: bus_clone,
peers: peers_clone,
peer_id: peer_id_clone,
});
let (connected_tx, mut connected_rx) = futures::channel::mpsc::channel(1);
let need_send_event = AtomicBool::new(false);
if has_seed {
need_send_event.swap(true, Ordering::Acquire);
}
let inner = Inner {
network_service: service,
bus,
handle: handle.clone(),
raw_message_processor: raw_message_processor_clone,
peers,
connected_tx,
need_send_event,
node_config,
peer_id: peer_id.clone(),
};
let inner = Arc::new(inner);
handle.spawn(Self::start(
handle.clone(),
inner.clone(),
rx,
event_rx,
tx_command,
));
if has_seed {
futures::executor::block_on(async move {
let event = connected_rx.next().await.unwrap();
info!("receive event {:?}", event);
});
}
NetworkAsyncService {
addr,
raw_message_processor,
tx,
peer_id,
inner,
handle,
}
}
async fn start(
handle: Handle,
inner: Arc<Inner>,
net_rx: mpsc::UnboundedReceiver<NetworkMessage>,
event_rx: mpsc::UnboundedReceiver<PeerEvent>,
close_tx: mpsc::UnboundedSender<()>,
) {
let mut net_rx = net_rx.fuse();
let mut event_rx = event_rx.fuse();
loop {
futures::select! {
message = net_rx.select_next_some()=>{
handle.spawn(Inner::handle_network_receive(inner.clone(),message));
info!("receive net message");
},
event = event_rx.select_next_some()=>{
handle.spawn(Inner::handle_event_receive(inner.clone(),event));
info!("receive net event");
},
complete => {
close_tx.unbounded_send(()).unwrap();
warn!("all stream are complete");
break;
}
}
}
}
}
impl Inner {
async fn handle_network_receive(inner: Arc<Inner>, network_msg: NetworkMessage) -> Result<()> {
info!("receive network_message ");
let message = PeerMessage::decode(&network_msg.data);
match message {
Ok(msg) => {
inner
.handle_network_message(network_msg.peer_id, msg)
.await?
}
Err(e) => {
warn!("get error {:?}", e);
}
}
Ok(())
}
async fn handle_network_message(&self, peer_id: PeerId, msg: PeerMessage) -> Result<()> {
match msg {
PeerMessage::UserTransactions(txns) => {
info!("receive new txn list from {:?} ", peer_id);
if let Some(peer_info) = self.peers.lock().await.get_mut(&peer_id) {
for txn in &txns {
let id = txn.crypto_hash();
if !peer_info.known_transactions.contains(&id) {
peer_info.known_transactions.put(id, ());
} else {
return Ok(());
}
}
}
self.bus
.clone()
.broadcast(PeerTransactions::new(txns))
.await?;
}
PeerMessage::Block(block) => {
let block_hash = block.header().id();
info!(
"receive new block from {:?} with hash {:?}",
peer_id, block_hash
);
let block_number = block.header().number();
let total_difficulty = block.get_total_difficulty();
if let Some(peer_info) = self.peers.lock().await.get_mut(&peer_id) {
debug!(
"total_difficulty is {},peer_info is {:?}",
total_difficulty, peer_info
);
if total_difficulty > peer_info.peer_info.total_difficult {
peer_info.peer_info.block_number = block_number;
peer_info.peer_info.block_id = block_hash;
peer_info.peer_info.total_difficult = total_difficulty;
}
}
self.bus
.send(Broadcast {
msg: PeerNewBlock::new(peer_id.into(), block.get_block().clone()),
})
.await?;
}
PeerMessage::RawRPCRequest(id, request) => {
info!("do request.");
let (tx, mut rx) = mpsc::channel(1);
self.bus
.send(Broadcast {
msg: RawRpcRequestMessage {
responder: tx,
request,
},
})
.await?;
let network_service = self.network_service.clone();
let task = async move {
let response = rx.next().await.unwrap();
let peer_msg = PeerMessage::RawRPCResponse(id, response);
let data = peer_msg.encode().unwrap();
network_service.send_message(peer_id, data).await.unwrap();
};
self.handle.spawn(task);
info!("receive rpc request");
}
PeerMessage::RawRPCResponse(id, response) => {
info!("do response.");
self.raw_message_processor
.send_response(id, response)
.await?;
}
}
Ok(())
}
async fn handle_event_receive(inner: Arc<Inner>, event: PeerEvent) -> Result<()> {
info!("event is {:?}", event);
match event.clone() {
PeerEvent::Open(peer_id, peer_info) => {
inner.on_peer_connected(peer_id.into(), peer_info).await?;
if inner.need_send_event.load(Ordering::Acquire) {
info!("send event");
let mut connected_tx = inner.connected_tx.clone();
connected_tx.send(event.clone()).await?;
inner.need_send_event.swap(false, Ordering::Acquire);
}
}
PeerEvent::Close(peer_id) => {
inner.on_peer_disconnected(peer_id.into()).await;
}
}
inner.bus.send(Broadcast { msg: event }).await?;
info!("already broadcast event");
Ok(())
}
async fn on_peer_connected(&self, peer_id: PeerId, peer_info: PeerInfo) -> Result<()> {
self.peers
.lock()
.await
.entry(peer_id.clone())
.or_insert_with(|| PeerInfoNet::new(peer_info));
let path = self.node_config.base.data_dir();
let file = Path::new(PEERS_FILE_NAME);
let path = path.join(file);
let mut peers = HashSet::new();
for peer in self.peers.lock().await.keys() {
if !self.peer_id.eq(peer) {
peers.insert(peer.clone());
}
}
if path.exists() {
std::fs::remove_file(path.clone())?;
}
let mut addrs_list = HashSet::new();
let mut addrs_set = HashSet::new();
for peer_id in peers {
let addrs = self.network_service.get_address(peer_id.clone()).await;
for addr in addrs {
if Self::check_ip(&addr, &mut addrs_set) {
let new_addr = addr.with(Protocol::P2p(peer_id.clone().into()));
addrs_list.insert(new_addr);
}
}
}
let mut file = std::fs::File::create(path)?;
let content = serde_json::to_vec(&addrs_list)?;
file.write_all(&content)?;
Ok(())
}
fn check_ip(addr: &Multiaddr, addrs_set: &mut HashSet<Multiaddr>) -> bool {
if addrs_set.contains(addr) {
return false;
}
let components = addr.iter().collect::<Vec<_>>();
for protocol in components {
match protocol {
Protocol::Ip4(ip) => {
if !is_global(ip) {
return false;
}
}
Protocol::Ip6(_ip) => {
return false;
}
_ => {}
}
}
addrs_set.insert(addr.clone());
true
}
async fn on_peer_disconnected(&self, peer_id: PeerId) {
self.peers.lock().await.remove(&peer_id);
}
}
impl Actor for NetworkActor {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
let txn_propagate_recipient = ctx.address().recipient::<PropagateNewTransactions>();
self.bus
.clone()
.subscribe(txn_propagate_recipient)
.into_actor(self)
.then(|res, act, ctx| {
if let Err(e) = res {
error!("fail to subscribe txn propagate events, err: {:?}", e);
ctx.terminate();
}
async {}.into_actor(act)
})
.wait(ctx);
info!("Network actor started ",);
}
}
/// handler system events.
impl Handler<SystemEvents> for NetworkActor {
type Result = ();
fn handle(&mut self, msg: SystemEvents, _ctx: &mut Self::Context) -> Self::Result {
match msg {
SystemEvents::NewHeadBlock(block) => {
info!("broadcast a new block {:?}", block.header().id());
let id = block.header().id();
let peers = self.peers.clone();
let network_service = self.network_service.clone();
let block_hash = block.header().id();
let block_number = block.header().number();
let total_difficulty = block.get_total_difficulty();
let msg = PeerMessage::Block(block);
let bytes = msg.encode().unwrap();
let self_info = PeerInfo::new(
self.peer_id.clone().into(),
block_number,
total_difficulty,
block_hash,
);
let self_id = self.peer_id.clone();
Arbiter::spawn(async move {
if let Some(peer_info) = peers.lock().await.get_mut(&self_id) {
debug!(
"total_difficulty is {},peer_info is {:?}",
total_difficulty, peer_info
);
if total_difficulty > peer_info.peer_info.total_difficult {
peer_info.peer_info.block_number = block_number;
peer_info.peer_info.block_id = block_hash;
peer_info.peer_info.total_difficult = total_difficulty;
}
}
for (peer_id, peer_info) in peers.lock().await.iter_mut() {
if !peer_info.known_blocks.contains(&id) {
peer_info.known_blocks.put(id.clone(), ());
} else {
continue;
}
network_service
.send_message(peer_id.clone(), bytes.clone())
.await
.unwrap();
}
});
self.network_service.update_self_info(self_info);
}
SystemEvents::MinedBlock(_b) => {}
_ => {}
};
}
}
/// handle txn relay
impl Handler<PropagateNewTransactions> for NetworkActor {
type Result = <PropagateNewTransactions as Message>::Result;
fn handle(&mut self, msg: PropagateNewTransactions, _ctx: &mut Self::Context) -> Self::Result {
let txns = msg.transactions_to_propagate();
// false positive
if txns.is_empty() {
return;
}
info!("propagate new txns, len: {}", txns.len());
let peers = self.peers.clone();
let network_service = self.network_service.clone();
let mut txn_map: HashMap<HashValue, SignedUserTransaction> = HashMap::new();
for txn in txns {
txn_map.insert(txn.crypto_hash(), txn);
}
let self_peer_id = self.peer_id.clone();
Arbiter::spawn(async move {
for (peer_id, peer_info) in peers.lock().await.iter_mut() {
let mut txns_unhandled = Vec::new();
for (id, txn) in &txn_map {
if !peer_info.known_transactions.contains(id) && !peer_id.eq(&self_peer_id) {
peer_info.known_transactions.put(id.clone(), ());
txns_unhandled.push(txn.clone());
}
}
let msg = PeerMessage::UserTransactions(txns_unhandled);
let bytes = msg.encode().unwrap();
network_service
.send_message(peer_id.clone(), bytes)
.await
.unwrap();
}
});
}
}
#[cfg(test)]
mod tests {
use super::*;
use bus::Subscription;
use futures::sink::SinkExt;
use futures_timer::Delay;
use network_p2p::Multiaddr;
use serde::{Deserialize, Serialize};
use tokio::runtime::{Handle, Runtime};
use tokio::task;
use types::account_address::AccountAddress;
use types::transaction::SignedUserTransaction;
#[rtype(result = "Result<()>")]
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Message, Clone)]
pub struct TestRequest {
pub data: HashValue,
}
#[test]
fn test_peer_info() {
let mut peer_info = PeerInfo::default();
peer_info.block_number = 1;
let data = peer_info.encode().unwrap();
let peer_info_decode = PeerInfo::decode(&data).unwrap();
assert_eq!(peer_info, peer_info_decode);
}
#[ignore]
#[stest::test]
fn test_network_with_mock() {
use std::time::Duration;
let mut rt = Runtime::new().unwrap();
let handle = rt.handle().clone();
let local = task::LocalSet::new();
let future = System::run_in_tokio("test", &local);
let mut node_config1 = NodeConfig::random_for_test();
node_config1.network.listen =
format!("/ip4/127.0.0.1/tcp/{}", config::get_available_port())
.parse()
.unwrap();
let node_config1 = Arc::new(node_config1);
let (network1, _addr1, _bus1) = build_network(node_config1.clone(), handle.clone());
let mut node_config2 = NodeConfig::random_for_test();
let addr1_hex = network1.peer_id.to_base58();
let seed: Multiaddr = format!("{}/p2p/{}", &node_config1.network.listen, addr1_hex)
.parse()
.unwrap();
node_config2.network.listen =
format!("/ip4/127.0.0.1/tcp/{}", config::get_available_port())
.parse()
.unwrap();
node_config2.network.seeds = vec![seed];
let node_config2 = Arc::new(node_config2);
let (network2, _addr2, bus2) = build_network(node_config2.clone(), handle.clone());
Arbiter::spawn(async move {
let network_clone2 = network2.clone();
let (tx, mut rx) = mpsc::unbounded();
let response_actor = TestResponseActor::create(network_clone2, tx);
let addr = response_actor.start();
let recipient = addr.clone().recipient::<RawRpcRequestMessage>();
bus2.send(Subscription { recipient }).await.unwrap();
let recipient = addr.clone().recipient::<PeerEvent>();
bus2.send(Subscription { recipient }).await.unwrap();
// subscribe peer txns for network2
bus2.send(Subscription {
recipient: addr.clone().recipient::<PeerTransactions>(),
})
.await
.unwrap();
network1
.network_actor_addr()
.send(PropagateNewTransactions::from(vec![
SignedUserTransaction::mock(),
]))
.await
.unwrap();
network2
.network_actor_addr()
.send(PropagateNewTransactions::from(vec![
SignedUserTransaction::mock(),
]))
.await
.unwrap();
let _ = rx.next().await;
let txns = addr.send(GetPeerTransactions).await.unwrap();
assert_eq!(1, txns.len());
let request = TestRequest {
data: HashValue::random(),
};
info!("req :{:?}", request);
let resp = network1
.send_request_bytes(
network2.identify().clone(),
request.encode().unwrap(),
Duration::from_secs(1),
)
.await;
info!("resp :{:?}", resp);
_delay(Duration::from_millis(100)).await;
System::current().stop();
()
});
local.block_on(&mut rt, future).unwrap();
}
async fn _delay(duration: Duration) {
Delay::new(duration).await;
}
fn build_network(
node_config: Arc<NodeConfig>,
handle: Handle,
) -> (NetworkAsyncService, AccountAddress, Addr<BusActor>) {
let bus = BusActor::launch();
let addr =
AccountAddress::from_public_key(&node_config.network.network_keypair().public_key);
let network = NetworkActor::launch(
node_config.clone(),
bus.clone(),
handle,
HashValue::default(),
PeerInfo::default(),
);
(network, addr, bus)
}
struct TestResponseActor {
_network_service: NetworkAsyncService,
peer_txns: Vec<PeerTransactions>,
event_tx: mpsc::UnboundedSender<()>,
}
impl TestResponseActor {
fn create(
network_service: NetworkAsyncService,
event_tx: mpsc::UnboundedSender<()>,
) -> TestResponseActor {
let instance = Self {
_network_service: network_service,
peer_txns: vec![],
event_tx,
};
instance
}
}
impl Actor for TestResponseActor {
type Context = Context<Self>;
fn started(&mut self, _ctx: &mut Self::Context) {
info!("Test actor started ",);
}
}
impl Handler<PeerTransactions> for TestResponseActor {
type Result = ();
fn handle(&mut self, msg: PeerTransactions, _ctx: &mut Self::Context) -> Self::Result {
self.peer_txns.push(msg);
self.event_tx.unbounded_send(()).unwrap();
}
}
struct GetPeerTransactions;
impl Message for GetPeerTransactions {
type Result = Vec<PeerTransactions>;
}
impl Handler<GetPeerTransactions> for TestResponseActor {
type Result = MessageResult<GetPeerTransactions>;
fn handle(&mut self, _msg: GetPeerTransactions, _ctx: &mut Self::Context) -> Self::Result {
MessageResult(self.peer_txns.clone())
}
}
impl Handler<RawRpcRequestMessage> for TestResponseActor {
type Result = Result<()>;
fn handle(&mut self, msg: RawRpcRequestMessage, ctx: &mut Self::Context) -> Self::Result {
let mut responder = msg.responder.clone();
let f = async move {
responder.send(msg.request).await.unwrap();
};
let f = actix::fut::wrap_future(f);
ctx.spawn(Box::new(f));
Ok(())
}
}
impl Handler<PeerEvent> for TestResponseActor {
type Result = Result<()>;
fn handle(&mut self, msg: PeerEvent, _ctx: &mut Self::Context) -> Self::Result {
info!("Event is {:?}", msg);
Ok(())
}
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
extern crate network_manager_cli_lib as network_manager_cli;
use failure::{format_err, Error, ResultExt};
use fidl::endpoints::{Proxy, ServiceMarker};
use fidl_fuchsia_overnet::{Peer, ServiceConsumerMarker, ServiceConsumerProxy};
use fidl_fuchsia_overnet_protocol::NodeId;
use fidl_fuchsia_router_config::{
RouterAdminMarker, RouterAdminProxy, RouterStateMarker, RouterStateProxy,
};
use fuchsia_async::{self as fasync, TimeoutExt};
use fuchsia_component::client::connect_to_service;
use fuchsia_syslog as syslog;
use fuchsia_zircon::{self as zx, prelude::DurationNum};
use network_manager_cli::{cli::*, opts::*, printer::Printer};
use std::io::{self};
use std::str;
use structopt::StructOpt;
static OVERNET_TIMEOUT_SEC: i64 = 30;
fn connect() -> Result<(RouterAdminProxy, RouterStateProxy), Error> {
let router_admin = connect_to_service::<RouterAdminMarker>()
.context("failed to connect to network manager admin interface")?;
let router_state = connect_to_service::<RouterStateMarker>()
.context("failed to connect to network manager interface")?;
Ok((router_admin, router_state))
}
fn connect_overnet_node(
svc: &ServiceConsumerProxy,
name: &str,
node: &mut NodeId,
) -> Result<fasync::Channel, Error> {
let (ch0, ch1) = zx::Channel::create()?;
svc.connect_to_service(node, name, ch0)?;
fasync::Channel::from_channel(ch1).map_err(|e| e.into())
}
fn supports_network_manager(peer: &Peer) -> bool {
match peer.description.services {
None => false,
Some(ref services) => [RouterAdminMarker::NAME, RouterStateMarker::NAME]
.iter()
.map(|svc| services.contains(&svc.to_string()))
.all(|v| v),
}
}
async fn connect_overnet() -> Result<(RouterAdminProxy, RouterStateProxy), Error> {
let svc = connect_to_service::<ServiceConsumerMarker>()?;
syslog::fx_log_info!("looking for overnet peers...");
loop {
let peers = svc.list_peers().await?;
for mut peer in peers {
if !supports_network_manager(&peer) {
continue;
}
match (
connect_overnet_node(&svc, &RouterAdminMarker::NAME, &mut peer.id),
connect_overnet_node(&svc, &RouterStateMarker::NAME, &mut peer.id),
) {
(Err(_), _) | (_, Err(_)) => {
continue;
}
(Ok(router_admin_channel), Ok(router_state_channel)) => {
syslog::fx_log_info!("connected to peer {:?}", peer.id.id);
return Ok((
RouterAdminProxy::from_channel(router_admin_channel),
RouterStateProxy::from_channel(router_state_channel),
));
}
}
}
}
}
fn main() -> Result<(), Error> {
syslog::init_with_tags(&["network_manager_cli"]).expect("initialising logging");
let Opt { overnet, cmd } = Opt::from_args();
let mut exec = fasync::Executor::new().context("error creating event loop")?;
let fut = async {
let (router_admin, router_state) = if overnet {
connect_overnet()
.on_timeout(fasync::Time::after(OVERNET_TIMEOUT_SEC.seconds()), || {
syslog::fx_log_err!("no suitable overnet peers found");
Err(format_err!("could not find a suitable overnet peer"))
})
.await?
} else {
connect()?
};
let mut printer = Printer::new(io::stdout());
run_cmd(cmd, router_admin, router_state, &mut printer).await
};
exec.run_singlethreaded(fut)
}
|
pub mod device_wsi;
pub mod instance_wsi;
pub mod physical_device_properties2;
pub mod prelude;
|
use core::result::Result::Ok;
use nom::branch::{alt, permutation};
use nom::bytes::complete::tag;
use nom::character::complete::{alphanumeric1, line_ending, space0, space1};
use nom::combinator::recognize;
use nom::error::{context, VerboseError};
use nom::multi::{many0, many1};
use nom::sequence::{pair, terminated, tuple};
use nom::IResult;
#[derive(Debug, PartialEq, Eq)]
pub enum BaseType {
BOOL,
INT,
FLOAT,
STRING
}
#[derive(Debug, PartialEq, Eq)]
pub struct Ident(pub String);
#[derive(Debug, PartialEq, Eq)]
pub struct TiExprAndId {
pub base_type: BaseType,
pub ident: Ident
}
#[derive(Debug, PartialEq, Eq)]
pub struct Model {
pub expressions: Vec<TiExprAndId>
}
type Res<T, U> = IResult<T, U, VerboseError<T>>;
fn base_type(input: &str) -> Res<&str, BaseType> {
let mut parser = context(
"base_type",alt((tag("bool"), tag("int"), tag("float"), tag("string"))));
let (next_input, name) = parser(input)?;
Ok((next_input, match name {
"bool" => BaseType::BOOL,
"int" => BaseType::INT,
"float" => BaseType::FLOAT,
"string" => BaseType::STRING,
_ => panic!()
}))
}
fn ident(input: &str) -> Res<&str, Ident> {
let mut parser =
context("ident",
recognize( many1(alt((alphanumeric1, tag("_"))))));
// TODO: handle full: [A-Za-z][A-Za-z0-9_]* | ’[^’\xa\xd\x0]*’
let (next_input, name) = parser(input)?;
Ok((next_input, Ident(name.to_string())))
}
fn ti_expr_and_id(input: &str) -> Res<&str, TiExprAndId> {
let mut parser =
context("ti_expr_and_id",
tuple(( base_type, space0, tag(":"), space1, ident )));
parser(input).map(|(next_input, res)| {
let base_type = res.0;
let ident = res.4;
(next_input, TiExprAndId { base_type, ident })
})
}
pub fn model(input: &str) -> Res<&str, Model> {
let separator = pair(tag(";"), permutation((space0, many0(line_ending))));
let mut parser =
context("model",
many0(terminated(ti_expr_and_id, separator))
);
parser(input).map(|(next_input, expressions)| {
(next_input, Model { expressions })
})
}
#[cfg(test)]
mod tests {
use nom::{
Err as NomErr,
error::{ErrorKind, VerboseError, VerboseErrorKind},
};
use crate::minizinc::{BaseType, Ident, ti_expr_and_id, TiExprAndId, model, Model, base_type, ident};
#[test]
fn test_base_type_bool() {
assert_eq!(base_type("bool"), Ok(("", BaseType::BOOL)));
}
#[test]
fn test_base_type_int() {
assert_eq!(base_type("int"), Ok(("", BaseType::INT)));
}
#[test]
fn test_base_type_float() {
assert_eq!(base_type("float"), Ok(("", BaseType::FLOAT)));
}
#[test]
fn test_base_type_string() {
assert_eq!(base_type("string"), Ok(("", BaseType::STRING)));
}
#[test]
fn test_base_type_error() {
assert_eq!(base_type("shazbat"), Err(NomErr::Error(VerboseError {
errors: vec![
("shazbat", VerboseErrorKind::Nom(ErrorKind::Tag)),
("shazbat", VerboseErrorKind::Nom(ErrorKind::Alt)),
("shazbat", VerboseErrorKind::Context("base_type")),
]
})));
}
#[test]
fn test_ident_simple() {
assert_eq!(ident("some"), Ok(("", Ident("some".to_string()))));
}
#[test]
fn test_ident_simple_with_underscore() {
assert_eq!(ident("some_ident"), Ok(("", Ident("some_ident".to_string()))));
}
#[test]
fn test_ident_error_empty_string() {
assert_eq!(ident(""), Err(NomErr::Error(VerboseError {
errors: vec![
("", VerboseErrorKind::Nom(ErrorKind::Tag)),
("", VerboseErrorKind::Nom(ErrorKind::Alt)),
("", VerboseErrorKind::Nom(ErrorKind::Many1)),
("", VerboseErrorKind::Context("ident")),
]
})));
}
#[test]
fn test_ti_expr_and_id() {
assert_eq!(ti_expr_and_id("int: foop"), Ok(("", TiExprAndId {
base_type: BaseType::INT,
ident: Ident("foop".to_string())
})));
}
#[test]
fn test_model_single_ti_expr_and_id() {
assert_eq!(model("int: foop;"), Ok(("", Model {
expressions: vec![
TiExprAndId {
base_type: BaseType::INT,
ident: Ident("foop".to_string())
}]
})));
}
#[test]
fn test_model_multi_ti_expr_and_id_on_same_line() {
assert_eq!(model("int: foop; float: farp;"), Ok(("", Model {
expressions: vec![
TiExprAndId {
base_type: BaseType::INT,
ident: Ident("foop".to_string())
},
TiExprAndId {
base_type: BaseType::FLOAT,
ident: Ident("farp".to_string())
}]
})));
}
#[test]
fn test_model_multi_ti_expr_and_id_over_multiple_lines() {
assert_eq!(model("int: foop;\n\nfloat: farp;"), Ok(("", Model {
expressions: vec![
TiExprAndId {
base_type: BaseType::INT,
ident: Ident("foop".to_string())
},
TiExprAndId {
base_type: BaseType::FLOAT,
ident: Ident("farp".to_string())
}]
})));
}
}
|
#[doc = "Reader of register RPR1"]
pub type R = crate::R<u32, super::RPR1>;
#[doc = "Writer for register RPR1"]
pub type W = crate::W<u32, super::RPR1>;
#[doc = "Register RPR1 `reset()`'s with value 0"]
impl crate::ResetValue for super::RPR1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "configurable event inputs x rising edge Pending bit.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RPIF0_A {
#[doc = "0: No trigger request occurred"]
NOTPENDING = 0,
#[doc = "1: Selected trigger request occurred"]
PENDING = 1,
}
impl From<RPIF0_A> for bool {
#[inline(always)]
fn from(variant: RPIF0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `RPIF0`"]
pub type RPIF0_R = crate::R<bool, RPIF0_A>;
impl RPIF0_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RPIF0_A {
match self.bits {
false => RPIF0_A::NOTPENDING,
true => RPIF0_A::PENDING,
}
}
#[doc = "Checks if the value of the field is `NOTPENDING`"]
#[inline(always)]
pub fn is_not_pending(&self) -> bool {
*self == RPIF0_A::NOTPENDING
}
#[doc = "Checks if the value of the field is `PENDING`"]
#[inline(always)]
pub fn is_pending(&self) -> bool {
*self == RPIF0_A::PENDING
}
}
#[doc = "configurable event inputs x rising edge Pending bit.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RPIF0_AW {
#[doc = "1: Clears pending bit"]
CLEAR = 1,
}
impl From<RPIF0_AW> for bool {
#[inline(always)]
fn from(variant: RPIF0_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Write proxy for field `RPIF0`"]
pub struct RPIF0_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF0_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF1_A = RPIF0_A;
#[doc = "Reader of field `RPIF1`"]
pub type RPIF1_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF1_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF1`"]
pub struct RPIF1_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF1_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF2_A = RPIF0_A;
#[doc = "Reader of field `RPIF2`"]
pub type RPIF2_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF2_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF2`"]
pub struct RPIF2_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF2_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF3_A = RPIF0_A;
#[doc = "Reader of field `RPIF3`"]
pub type RPIF3_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF3_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF3`"]
pub struct RPIF3_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF3_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF3_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF4_A = RPIF0_A;
#[doc = "Reader of field `RPIF4`"]
pub type RPIF4_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF4_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF4`"]
pub struct RPIF4_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF4_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF4_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit"]
pub type RPIF5_A = RPIF0_A;
#[doc = "Reader of field `RPIF5`"]
pub type RPIF5_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit"]
pub type RPIF5_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF5`"]
pub struct RPIF5_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF5_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF5_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF6_A = RPIF0_A;
#[doc = "Reader of field `RPIF6`"]
pub type RPIF6_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF6_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF6`"]
pub struct RPIF6_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF6_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF6_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF7_A = RPIF0_A;
#[doc = "Reader of field `RPIF7`"]
pub type RPIF7_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF7_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF7`"]
pub struct RPIF7_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF7_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF7_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF8_A = RPIF0_A;
#[doc = "Reader of field `RPIF8`"]
pub type RPIF8_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF8_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF8`"]
pub struct RPIF8_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF8_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF8_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF9_A = RPIF0_A;
#[doc = "Reader of field `RPIF9`"]
pub type RPIF9_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF9_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF9`"]
pub struct RPIF9_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF9_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF9_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF10_A = RPIF0_A;
#[doc = "Reader of field `RPIF10`"]
pub type RPIF10_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF10_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF10`"]
pub struct RPIF10_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF10_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF10_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF11_A = RPIF0_A;
#[doc = "Reader of field `RPIF11`"]
pub type RPIF11_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF11_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF11`"]
pub struct RPIF11_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF11_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF11_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF12_A = RPIF0_A;
#[doc = "Reader of field `RPIF12`"]
pub type RPIF12_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF12_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF12`"]
pub struct RPIF12_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF12_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF12_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF13_A = RPIF0_A;
#[doc = "Reader of field `RPIF13`"]
pub type RPIF13_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF13_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF13`"]
pub struct RPIF13_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF13_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF13_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF14_A = RPIF0_A;
#[doc = "Reader of field `RPIF14`"]
pub type RPIF14_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF14_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF14`"]
pub struct RPIF14_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF14_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF14_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF15_A = RPIF0_A;
#[doc = "Reader of field `RPIF15`"]
pub type RPIF15_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF15_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF15`"]
pub struct RPIF15_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF15_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF15_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF16_A = RPIF0_A;
#[doc = "Reader of field `RPIF16`"]
pub type RPIF16_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF16_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF16`"]
pub struct RPIF16_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF16_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF16_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF17_A = RPIF0_A;
#[doc = "Reader of field `RPIF17`"]
pub type RPIF17_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF17_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF17`"]
pub struct RPIF17_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF17_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF17_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF18_A = RPIF0_A;
#[doc = "Reader of field `RPIF18`"]
pub type RPIF18_R = crate::R<bool, RPIF0_A>;
#[doc = "configurable event inputs x rising edge Pending bit."]
pub type RPIF18_AW = RPIF0_AW;
#[doc = "Write proxy for field `RPIF18`"]
pub struct RPIF18_W<'a> {
w: &'a mut W,
}
impl<'a> RPIF18_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RPIF18_AW) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Clears pending bit"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(RPIF0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
impl R {
#[doc = "Bit 0 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif0(&self) -> RPIF0_R {
RPIF0_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif1(&self) -> RPIF1_R {
RPIF1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif2(&self) -> RPIF2_R {
RPIF2_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif3(&self) -> RPIF3_R {
RPIF3_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif4(&self) -> RPIF4_R {
RPIF4_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - configurable event inputs x rising edge Pending bit"]
#[inline(always)]
pub fn rpif5(&self) -> RPIF5_R {
RPIF5_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif6(&self) -> RPIF6_R {
RPIF6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif7(&self) -> RPIF7_R {
RPIF7_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif8(&self) -> RPIF8_R {
RPIF8_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif9(&self) -> RPIF9_R {
RPIF9_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif10(&self) -> RPIF10_R {
RPIF10_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif11(&self) -> RPIF11_R {
RPIF11_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif12(&self) -> RPIF12_R {
RPIF12_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif13(&self) -> RPIF13_R {
RPIF13_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif14(&self) -> RPIF14_R {
RPIF14_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif15(&self) -> RPIF15_R {
RPIF15_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 16 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif16(&self) -> RPIF16_R {
RPIF16_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif17(&self) -> RPIF17_R {
RPIF17_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif18(&self) -> RPIF18_R {
RPIF18_R::new(((self.bits >> 18) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif0(&mut self) -> RPIF0_W {
RPIF0_W { w: self }
}
#[doc = "Bit 1 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif1(&mut self) -> RPIF1_W {
RPIF1_W { w: self }
}
#[doc = "Bit 2 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif2(&mut self) -> RPIF2_W {
RPIF2_W { w: self }
}
#[doc = "Bit 3 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif3(&mut self) -> RPIF3_W {
RPIF3_W { w: self }
}
#[doc = "Bit 4 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif4(&mut self) -> RPIF4_W {
RPIF4_W { w: self }
}
#[doc = "Bit 5 - configurable event inputs x rising edge Pending bit"]
#[inline(always)]
pub fn rpif5(&mut self) -> RPIF5_W {
RPIF5_W { w: self }
}
#[doc = "Bit 6 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif6(&mut self) -> RPIF6_W {
RPIF6_W { w: self }
}
#[doc = "Bit 7 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif7(&mut self) -> RPIF7_W {
RPIF7_W { w: self }
}
#[doc = "Bit 8 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif8(&mut self) -> RPIF8_W {
RPIF8_W { w: self }
}
#[doc = "Bit 9 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif9(&mut self) -> RPIF9_W {
RPIF9_W { w: self }
}
#[doc = "Bit 10 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif10(&mut self) -> RPIF10_W {
RPIF10_W { w: self }
}
#[doc = "Bit 11 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif11(&mut self) -> RPIF11_W {
RPIF11_W { w: self }
}
#[doc = "Bit 12 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif12(&mut self) -> RPIF12_W {
RPIF12_W { w: self }
}
#[doc = "Bit 13 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif13(&mut self) -> RPIF13_W {
RPIF13_W { w: self }
}
#[doc = "Bit 14 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif14(&mut self) -> RPIF14_W {
RPIF14_W { w: self }
}
#[doc = "Bit 15 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif15(&mut self) -> RPIF15_W {
RPIF15_W { w: self }
}
#[doc = "Bit 16 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif16(&mut self) -> RPIF16_W {
RPIF16_W { w: self }
}
#[doc = "Bit 17 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif17(&mut self) -> RPIF17_W {
RPIF17_W { w: self }
}
#[doc = "Bit 18 - configurable event inputs x rising edge Pending bit."]
#[inline(always)]
pub fn rpif18(&mut self) -> RPIF18_W {
RPIF18_W { w: self }
}
}
|
fn main() {
let memory: Vec<i32> = vec![1, 2, 3];
let mut mem1 = memory.clone();
mem1[1] = 2;
let mut mem2 = memory.clone();
println!("{}", mem2[1])
}
|
pub struct Solution;
impl Solution {
pub fn get_skyline(buildings: Vec<Vec<i32>>) -> Vec<Vec<i32>> {
if buildings.is_empty() {
return Vec::new();
}
let mut res = Vec::new();
let mut pts: List = None;
for b in buildings {
let (l, r, h) = (b[0], b[1], b[2]);
while pts.as_ref().map_or(false, |node| node.point.x < l) {
let x = pts.as_ref().unwrap().point.x;
pts = pts.unwrap().next;
let y = pts.as_ref().map_or(0, |node| node.point.y);
res.push(vec![x, y]);
}
if pts.as_ref().map_or(true, |node| node.point.y < h) {
if res.last().map_or(false, |v| v[0] == l) {
res.pop();
}
res.push(vec![l, h]);
}
let mut cur = &mut pts;
while cur
.as_ref()
.map_or(false, |node| node.point.x < r && node.point.y > h)
{
cur = &mut cur.as_mut().unwrap().next;
}
let mut tail = cur.take();
while tail
.as_ref()
.map_or(false, |node| node.point.x <= r && node.point.y <= h)
{
tail = tail.unwrap().next;
}
if tail
.as_ref()
.map_or(true, |node| node.point.x > r && node.point.y < h)
{
*cur = Some(Box::new(ListNode::new(Point::new(r, h))));
cur = &mut cur.as_mut().unwrap().next;
}
*cur = tail;
}
while pts.is_some() {
let x = pts.as_ref().unwrap().point.x;
pts = pts.unwrap().next;
let y = pts.as_ref().map_or(0, |node| node.point.y);
res.push(vec![x, y]);
}
res
}
}
#[derive(Clone, Copy)]
struct Point {
x: i32,
y: i32,
}
impl Point {
fn new(x: i32, y: i32) -> Self {
Self { x, y }
}
}
type List = Option<Box<ListNode>>;
struct ListNode {
point: Point,
next: List,
}
impl ListNode {
fn new(point: Point) -> Self {
Self { point, next: None }
}
}
#[test]
fn test0218() {
fn case(buildings: Vec<Vec<i32>>, want: Vec<Vec<i32>>) {
let got = Solution::get_skyline(buildings);
assert_eq!(got, want);
}
case(
vec![
vec![2, 9, 10],
vec![3, 7, 15],
vec![5, 12, 12],
vec![15, 20, 10],
vec![19, 24, 8],
],
vec![
vec![2, 10],
vec![3, 15],
vec![7, 12],
vec![12, 0],
vec![15, 10],
vec![20, 8],
vec![24, 0],
],
);
case(
vec![vec![0, 2, 3], vec![2, 5, 3]],
vec![vec![0, 3], vec![5, 0]],
);
case(
vec![vec![1, 2, 1], vec![1, 2, 2], vec![1, 2, 3]],
vec![vec![1, 3], vec![2, 0]],
);
case(
vec![
vec![0, 5, 7],
vec![5, 10, 7],
vec![5, 10, 12],
vec![10, 15, 7],
vec![15, 20, 7],
vec![15, 20, 12],
vec![20, 25, 7],
],
vec![
vec![0, 7],
vec![5, 12],
vec![10, 7],
vec![15, 12],
vec![20, 7],
vec![25, 0],
],
)
}
|
use log::{error, info};
use std::error::Error;
use std::io::{Read, Write};
use std::net::TcpStream;
use std::path::Path;
use std::{fmt, fs};
/// Request-Line = Method SP Request-URI SP HTTP-Version CRLF
struct Request<'a> {
method: &'a str,
uri: &'a Path,
http_version: &'a str,
}
impl<'a> fmt::Display for Request<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{} {} {}\r\n",
self.method,
self.uri.display(),
self.http_version
)
}
}
fn parse_request_line(request: &str) -> Result<Request, Box<dyn Error>> {
let mut parts = request.split_whitespace();
let method = parts.next().ok_or("Method not specified")?;
// We only accept GET requests
if method != "GET" {
Err("Unsupported method")?;
}
let uri = Path::new(parts.next().ok_or("URI not specified")?);
let norm_uri = uri.to_str().expect("Invalid unicode!");
const ROOT: &str = "/root/lab/rust/linda";
if !Path::new(&format!("{}{}", ROOT, norm_uri)).exists() {
Err("Requested resource does not exist")?;
}
let http_version = parts.next().ok_or("HTTP version not specified")?;
if http_version != "HTTP/1.1" {
Err("Unsupported HTTP version, use HTTP/1.1")?;
}
Ok(Request {
method,
uri,
http_version,
})
}
pub fn handle_connection(mut stream: TcpStream) -> Result<(), Box<dyn Error>> {
// 512 bytes is enough for a toy HTTP server
let mut buffer = [0; 512];
// writes stream into buffer
stream.read(&mut buffer).unwrap();
let request = String::from_utf8_lossy(&buffer[..]);
let request_line = request.lines().next().unwrap();
match parse_request_line(&request_line) {
Ok(request) => {
info!("Request: {}", &request);
let contents = fs::read_to_string("index.html").unwrap();
let response = format!("{}{}", "HTTP/1.1 200 OK\r\n\r\n", contents);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
}
Err(e) => error!("Bad request: {}", e),
}
Ok(())
}
|
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::match_function_call;
use clippy_utils::paths::FUTURE_FROM_GENERATOR;
use clippy_utils::source::{position_before_rarrow, snippet_block, snippet_opt};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{
AsyncGeneratorKind, Block, Body, Expr, ExprKind, FnDecl, FnRetTy, GeneratorKind, GenericArg, GenericBound, HirId,
IsAsync, ItemKind, LifetimeName, TraitRef, Ty, TyKind, TypeBindingKind,
};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{sym, Span};
declare_clippy_lint! {
/// ### What it does
/// It checks for manual implementations of `async` functions.
///
/// ### Why is this bad?
/// It's more idiomatic to use the dedicated syntax.
///
/// ### Example
/// ```rust
/// use std::future::Future;
///
/// fn foo() -> impl Future<Output = i32> { async { 42 } }
/// ```
/// Use instead:
/// ```rust
/// async fn foo() -> i32 { 42 }
/// ```
#[clippy::version = "1.45.0"]
pub MANUAL_ASYNC_FN,
style,
"manual implementations of `async` functions can be simplified using the dedicated syntax"
}
declare_lint_pass!(ManualAsyncFn => [MANUAL_ASYNC_FN]);
impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
kind: FnKind<'tcx>,
decl: &'tcx FnDecl<'_>,
body: &'tcx Body<'_>,
span: Span,
_: HirId,
) {
if_chain! {
if let Some(header) = kind.header();
if header.asyncness == IsAsync::NotAsync;
// Check that this function returns `impl Future`
if let FnRetTy::Return(ret_ty) = decl.output;
if let Some((trait_ref, output_lifetimes)) = future_trait_ref(cx, ret_ty);
if let Some(output) = future_output_ty(trait_ref);
if captures_all_lifetimes(decl.inputs, &output_lifetimes);
// Check that the body of the function consists of one async block
if let ExprKind::Block(block, _) = body.value.kind;
if block.stmts.is_empty();
if let Some(closure_body) = desugared_async_block(cx, block);
then {
let header_span = span.with_hi(ret_ty.span.hi());
span_lint_and_then(
cx,
MANUAL_ASYNC_FN,
header_span,
"this function can be simplified using the `async fn` syntax",
|diag| {
if_chain! {
if let Some(header_snip) = snippet_opt(cx, header_span);
if let Some(ret_pos) = position_before_rarrow(&header_snip);
if let Some((ret_sugg, ret_snip)) = suggested_ret(cx, output);
then {
let help = format!("make the function `async` and {}", ret_sugg);
diag.span_suggestion(
header_span,
&help,
format!("async {}{}", &header_snip[..ret_pos], ret_snip),
Applicability::MachineApplicable
);
let body_snip = snippet_block(cx, closure_body.value.span, "..", Some(block.span));
diag.span_suggestion(
block.span,
"move the body of the async block to the enclosing function",
body_snip.to_string(),
Applicability::MachineApplicable
);
}
}
},
);
}
}
}
}
fn future_trait_ref<'tcx>(
cx: &LateContext<'tcx>,
ty: &'tcx Ty<'tcx>,
) -> Option<(&'tcx TraitRef<'tcx>, Vec<LifetimeName>)> {
if_chain! {
if let TyKind::OpaqueDef(item_id, bounds) = ty.kind;
let item = cx.tcx.hir().item(item_id);
if let ItemKind::OpaqueTy(opaque) = &item.kind;
if let Some(trait_ref) = opaque.bounds.iter().find_map(|bound| {
if let GenericBound::Trait(poly, _) = bound {
Some(&poly.trait_ref)
} else {
None
}
});
if trait_ref.trait_def_id() == cx.tcx.lang_items().future_trait();
then {
let output_lifetimes = bounds
.iter()
.filter_map(|bound| {
if let GenericArg::Lifetime(lt) = bound {
Some(lt.name)
} else {
None
}
})
.collect();
return Some((trait_ref, output_lifetimes));
}
}
None
}
fn future_output_ty<'tcx>(trait_ref: &'tcx TraitRef<'tcx>) -> Option<&'tcx Ty<'tcx>> {
if_chain! {
if let Some(segment) = trait_ref.path.segments.last();
if let Some(args) = segment.args;
if args.bindings.len() == 1;
let binding = &args.bindings[0];
if binding.ident.name == sym::Output;
if let TypeBindingKind::Equality{ty: output} = binding.kind;
then {
return Some(output)
}
}
None
}
fn captures_all_lifetimes(inputs: &[Ty<'_>], output_lifetimes: &[LifetimeName]) -> bool {
let input_lifetimes: Vec<LifetimeName> = inputs
.iter()
.filter_map(|ty| {
if let TyKind::Rptr(lt, _) = ty.kind {
Some(lt.name)
} else {
None
}
})
.collect();
// The lint should trigger in one of these cases:
// - There are no input lifetimes
// - There's only one output lifetime bound using `+ '_`
// - All input lifetimes are explicitly bound to the output
input_lifetimes.is_empty()
|| (output_lifetimes.len() == 1 && matches!(output_lifetimes[0], LifetimeName::Underscore))
|| input_lifetimes
.iter()
.all(|in_lt| output_lifetimes.iter().any(|out_lt| in_lt == out_lt))
}
fn desugared_async_block<'tcx>(cx: &LateContext<'tcx>, block: &'tcx Block<'tcx>) -> Option<&'tcx Body<'tcx>> {
if_chain! {
if let Some(block_expr) = block.expr;
if let Some(args) = match_function_call(cx, block_expr, &FUTURE_FROM_GENERATOR);
if args.len() == 1;
if let Expr{kind: ExprKind::Closure(_, _, body_id, ..), ..} = args[0];
let closure_body = cx.tcx.hir().body(body_id);
if closure_body.generator_kind == Some(GeneratorKind::Async(AsyncGeneratorKind::Block));
then {
return Some(closure_body);
}
}
None
}
fn suggested_ret(cx: &LateContext<'_>, output: &Ty<'_>) -> Option<(&'static str, String)> {
match output.kind {
TyKind::Tup(tys) if tys.is_empty() => {
let sugg = "remove the return type";
Some((sugg, "".into()))
},
_ => {
let sugg = "return the output of the future directly";
snippet_opt(cx, output.span).map(|snip| (sugg, format!(" -> {}", snip)))
},
}
}
|
use crate::raw_volume::RawVolume;
use crate::region::Region;
use crate::sampler::Sampler;
use crate::volume::{PositionError, Volume};
use crate::voxel::Voxel;
use vek::vec3::Vec3;
pub struct RawVolumeSampler<'a, T>
where
T: Voxel,
{
data: &'a Vec<T>,
valid_region: Region,
x_pos: i32,
y_pos: i32,
z_pos: i32,
current_offset: Option<usize>,
current_x_valid: bool,
current_y_valid: bool,
current_z_valid: bool,
border_value: T,
}
impl<'a, T> RawVolumeSampler<'a, T>
where
T: Voxel,
{
pub fn new(volume: &'a RawVolume<T>) -> Self {
let region = volume.get_region().clone();
let x = region.lower_x;
let y = region.lower_y;
let z = region.lower_z;
RawVolumeSampler {
data: volume.get_data(),
valid_region: region,
x_pos: x,
y_pos: y,
z_pos: z,
current_offset: Some(0),
current_x_valid: true,
current_y_valid: true,
current_z_valid: true,
border_value: volume.get_border_value(),
}
}
fn get_offset(&self, x: i32, y: i32, z: i32) -> Result<usize, PositionError> {
if self.valid_region.contains_point(x, y, z) {
let corner = self.valid_region.get_lower_corner();
let local_x = x - corner.x;
let local_y = y - corner.y;
let local_z = z - corner.z;
let width = self.valid_region.get_width();
let height = self.valid_region.get_height();
Ok((local_x + local_y * width + local_z * width * height) as usize)
} else {
Err(PositionError {})
}
}
fn can_go_neg_x(&self, x: i32) -> bool {
x > self.valid_region.lower_x
}
fn can_go_neg_y(&self, y: i32) -> bool {
y > self.valid_region.lower_y
}
fn can_go_neg_z(&self, z: i32) -> bool {
z > self.valid_region.lower_z
}
fn can_go_pos_x(&self, x: i32) -> bool {
x < self.valid_region.upper_x
}
fn can_go_pos_y(&self, y: i32) -> bool {
y < self.valid_region.upper_y
}
fn can_go_pos_z(&self, z: i32) -> bool {
z < self.valid_region.upper_z
}
}
impl<'a, T> Sampler<T> for RawVolumeSampler<'a, T>
where
T: Voxel,
{
fn get_position(&self) -> Vec3<i32> {
Vec3 {
x: self.x_pos,
y: self.y_pos,
z: self.z_pos,
}
}
fn get_voxel(&self) -> T {
match self.current_offset {
Some(offset) => self.data[offset],
_ => self.border_value,
}
}
fn set_position(&mut self, x: i32, y: i32, z: i32) {
self.x_pos = x;
self.y_pos = y;
self.z_pos = z;
self.current_x_valid = self.valid_region.contains_point_in_x(x);
self.current_y_valid = self.valid_region.contains_point_in_y(y);
self.current_z_valid = self.valid_region.contains_point_in_z(z);
match self.get_offset(x, y, z) {
Ok(offset) => self.current_offset = Some(offset),
_ => self.current_offset = None,
}
}
fn move_positive_x(&mut self) {
let was_valid = self.is_current_position_valid();
self.x_pos = self.x_pos + 1;
self.current_x_valid = self.valid_region.contains_point_in_x(self.x_pos);
if was_valid && self.is_current_position_valid() {
self.current_offset = Some(self.current_offset.unwrap() + 1);
} else {
self.current_offset = None
}
}
fn move_positive_y(&mut self) {
let was_valid = self.is_current_position_valid();
self.y_pos = self.y_pos + 1;
self.current_y_valid = self.valid_region.contains_point_in_y(self.y_pos);
if was_valid && self.is_current_position_valid() {
self.current_offset =
Some(self.current_offset.unwrap() + self.valid_region.get_width() as usize);
} else {
self.current_offset = None
}
}
fn move_positive_z(&mut self) {
let was_valid = self.is_current_position_valid();
self.z_pos = self.z_pos + 1;
self.current_z_valid = self.valid_region.contains_point_in_z(self.z_pos);
if was_valid && self.is_current_position_valid() {
self.current_offset =
Some(self.current_offset.unwrap() + self.valid_region.get_area() as usize)
} else {
self.current_offset = None
}
}
fn move_negative_x(&mut self) {
let was_valid = self.is_current_position_valid();
self.x_pos = self.x_pos - 1;
self.current_x_valid = self.valid_region.contains_point_in_x(self.x_pos);
if was_valid && self.is_current_position_valid() {
self.current_offset = Some(self.current_offset.unwrap() - 1);
} else {
self.current_offset = None
}
}
fn move_negative_y(&mut self) {
let was_valid = self.is_current_position_valid();
self.y_pos = self.y_pos - 1;
self.current_y_valid = self.valid_region.contains_point_in_y(self.y_pos);
if was_valid && self.is_current_position_valid() {
self.current_offset =
Some(self.current_offset.unwrap() - self.valid_region.get_width() as usize);
} else {
self.current_offset = None
}
}
fn move_negative_z(&mut self) {
let was_valid = self.is_current_position_valid();
self.z_pos = self.z_pos - 1;
self.current_z_valid = self.valid_region.contains_point_in_z(self.z_pos);
if was_valid && self.is_current_position_valid() {
self.current_offset =
Some(self.current_offset.unwrap() - self.valid_region.get_area() as usize)
} else {
self.current_offset = None
}
}
fn is_current_position_valid(&self) -> bool {
self.current_x_valid && self.current_y_valid && self.current_z_valid
}
fn peek_voxel_1nx1ny1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_neg_y(self.y_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32
- 1
- self.valid_region.get_width()
- self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx1ny0pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_neg_y(self.y_pos)
{
self.data
[(self.current_offset.unwrap() as i32 - 1 - self.valid_region.get_width()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx1ny1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_neg_y(self.y_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 - 1 - self.valid_region.get_width()
+ self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx0py1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data
[(self.current_offset.unwrap() as i32 - 1 - self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx0py0pz(&self) -> T {
if self.is_current_position_valid() && self.can_go_neg_x(self.x_pos) {
self.data[(self.current_offset.unwrap() as i32 - 1) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx0py1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data
[(self.current_offset.unwrap() as i32 - 1 + self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx1py1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_pos_y(self.y_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 - 1 + self.valid_region.get_width()
- self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx1py0pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_pos_y(self.y_pos)
{
self.data
[(self.current_offset.unwrap() as i32 - 1 + self.valid_region.get_width()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1nx1py1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_x(self.x_pos)
&& self.can_go_pos_y(self.y_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 - 1
+ self.valid_region.get_width()
+ self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px1ny1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_y(self.y_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32
- self.valid_region.get_width()
- self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px1ny0pz(&self) -> T {
if self.is_current_position_valid() && self.can_go_neg_y(self.y_pos) {
self.data
[(self.current_offset.unwrap() as i32 - self.valid_region.get_width()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px1ny1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_neg_y(self.y_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 - self.valid_region.get_width()
+ self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px0py1nz(&self) -> T {
if self.is_current_position_valid() && self.can_go_neg_z(self.z_pos) {
self.data[(self.current_offset.unwrap() as i32 - self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px0py0pz(&self) -> T {
if self.is_current_position_valid() {
self.data[self.current_offset.unwrap()]
} else {
self.border_value
}
}
fn peek_voxel_0px0py1pz(&self) -> T {
if self.is_current_position_valid() && self.can_go_pos_z(self.z_pos) {
self.data[(self.current_offset.unwrap() as i32 + self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px1py1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_y(self.y_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 + self.valid_region.get_width()
- self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px1py0pz(&self) -> T {
if self.is_current_position_valid() && self.can_go_pos_y(self.y_pos) {
self.data
[(self.current_offset.unwrap() as i32 + self.valid_region.get_width()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_0px1py1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_y(self.y_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32
+ self.valid_region.get_width()
+ self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px1ny1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_neg_y(self.y_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 + 1
- self.valid_region.get_width()
- self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px1ny0pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_neg_y(self.y_pos)
{
self.data
[(self.current_offset.unwrap() as i32 + 1 - self.valid_region.get_width()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px1ny1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_neg_y(self.y_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 + 1 - self.valid_region.get_width()
+ self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px0py1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data
[(self.current_offset.unwrap() as i32 + 1 - self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px0py0pz(&self) -> T {
if self.is_current_position_valid() && self.can_go_pos_x(self.x_pos) {
self.data[(self.current_offset.unwrap() as i32 + 1) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px0py1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data
[(self.current_offset.unwrap() as i32 + 1 + self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px1py1nz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_pos_y(self.y_pos)
&& self.can_go_neg_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32 + 1 + self.valid_region.get_width()
- self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px1py0pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_pos_y(self.y_pos)
{
self.data
[(self.current_offset.unwrap() as i32 + 1 + self.valid_region.get_width()) as usize]
} else {
self.border_value
}
}
fn peek_voxel_1px1py1pz(&self) -> T {
if self.is_current_position_valid()
&& self.can_go_pos_x(self.x_pos)
&& self.can_go_pos_y(self.y_pos)
&& self.can_go_pos_z(self.z_pos)
{
self.data[(self.current_offset.unwrap() as i32
+ 1
+ self.valid_region.get_width()
+ self.valid_region.get_area()) as usize]
} else {
self.border_value
}
}
}
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(decl_macro)]
#![feature(raw_identifiers)]
r#macro_rules! r#struct {
($r#struct:expr) => { $r#struct }
}
macro_rules! old_macro {
($a:expr) => {$a}
}
macro r#decl_macro($r#fn:expr) {
$r#fn
}
macro passthrough($id:ident) {
$id
}
macro_rules! test_pat_match {
(a) => { 6 };
(r#a) => { 7 };
}
pub fn main() {
r#println!("{struct}", r#struct = 1);
assert_eq!(2, r#struct!(2));
assert_eq!(3, r#old_macro!(3));
assert_eq!(4, decl_macro!(4));
let r#match = 5;
assert_eq!(5, passthrough!(r#match));
assert_eq!("r#struct", stringify!(r#struct));
assert_eq!(6, test_pat_match!(a));
assert_eq!(7, test_pat_match!(r#a));
}
|
#![feature(destructuring_assignment)]
#![feature(map_into_keys_values)]
#![cfg_attr(not(feature = "std"), no_std)]
use ink_lang as ink;
pub use self::newomegaranked::NewOmegaRanked;
pub use self::newomegaranked::PlayerDefence;
/// The logic for all ranked fights between players. Connected to Fight Management
/// in order to run fights, and to Storage in order to save the results and perform
/// actions according to their result.
#[ink::contract]
mod newomegaranked {
use newomegagame::NewOmegaGame;
use newomegastorage::NewOmegaStorage;
use newomega::MAX_SHIPS;
use newomega::FightResult;
use ink_prelude::vec::Vec;
use ink_prelude::string::String;
use ink_storage::{
collections::{
HashMap as StorageHashMap,
},
traits::{
PackedLayout,
SpreadLayout,
},
};
const XP_PER_RANKED_WIN: u32 = 1;
/// Describes a registered defence of a player
#[derive(scale::Encode, scale::Decode, SpreadLayout, PackedLayout, Clone)]
#[cfg_attr(
feature = "std",
derive(
Debug,
PartialEq,
Eq,
scale_info::TypeInfo,
ink_storage::traits::StorageLayout
)
)]
pub struct PlayerDefence {
/// Fleet composition
selection: [u8; MAX_SHIPS],
/// Fleet variants (fittings)
variants: [u8; MAX_SHIPS],
/// Commander index
commander: u8,
/// Defender name
name: String,
}
#[ink(storage)]
pub struct NewOmegaRanked {
owner: AccountId,
new_omega_game: newomegagame::NewOmegaGame,
new_omega_storage: newomegastorage::NewOmegaStorage,
defences: StorageHashMap<AccountId, PlayerDefence>,
}
impl NewOmegaRanked {
#[ink(constructor)]
pub fn new(new_omega_game: NewOmegaGame, new_omega_storage: NewOmegaStorage) -> Self {
Self {
owner: Self::env().caller(),
new_omega_game,
new_omega_storage,
defences: StorageHashMap::default(),
}
}
/// Registers a fleet for Ranked Defence.
///
/// # Arguments
///
/// * `caller` - The account id of the player to register the defence for
/// * `selection` - The fleet composition of the defence
/// * `variants` - The variants (fittings) of the defence
/// * `commander` - Index of the commander leading the defence
/// * `name` - The defender name
#[ink(message)]
pub fn register_defence(&mut self, caller: AccountId, selection: [u8; MAX_SHIPS],
variants: [u8; MAX_SHIPS], commander: u8, name: String) {
assert_eq!(self.env().caller(), self.owner);
self.defences.insert(caller, PlayerDefence {
selection,
variants,
commander,
name,
});
}
/// Gets the registered defence of a player.
/// Will panic if defence has not been registered for the player.
///
/// # Arguments
///
/// * `caller` - The account id of the player to register the defence for
///
/// # Returns
///
/// * `defence` - The registered defence
#[ink(message)]
pub fn get_own_defence(&self, caller: AccountId) -> PlayerDefence {
assert_eq!(self.env().caller(), self.owner);
assert!(self.defences.get(&caller).is_some());
let defence: &PlayerDefence = self.defences.get(&caller).unwrap();
PlayerDefence {
selection: defence.selection,
variants: defence.variants,
commander: defence.commander,
name: defence.name.clone(),
}
}
/// Gets all the registered defenders (all players).
///
/// # Returns
///
/// * `defenders` - The registered defenders
#[ink(message)]
pub fn get_all_defenders(&self) -> Vec<(AccountId, PlayerDefence)> {
self.defences
.iter()
.filter_map(|entry| {
let (&key, value) = entry;
Some((key, value.clone()))
})
.collect()
}
/// Calculates a ranked fight between two players.
///
/// # Arguments
///
/// * `caller` - account id of the attacker
/// * `target` - account id of the defender
/// * `selection` - Attacker fleet composition (array with ship quantities)
/// * `variants` - An array that holds variants of the attacker fleet
/// * `commander` - The attacker commander
#[ink(message)]
pub fn attack(&mut self, caller: AccountId, target: AccountId, selection: [u8; MAX_SHIPS],
variants: [u8; MAX_SHIPS], commander: u8) -> FightResult {
assert_eq!(self.env().caller(), self.owner);
assert!(self.defences.get(&caller).is_some());
assert!(self.defences.get(&target).is_some());
// Try to get the defence
let target_defence: &PlayerDefence = self.defences.get(&target).unwrap();
// Determine the seed, in a naive way -> IMPROVEME: MOVE TO VRF
let seed: u64 = self.env().block_timestamp();
// Calculate the fight result
let (result, _lhs_moves, _rhs_moves) =
self.new_omega_game.fight(
seed,
false,
selection,
target_defence.selection,
variants,
target_defence.variants,
commander,
target_defence.commander);
// Mark results of the fight on the leaderboard and adjust commander xp
if result.lhs_dead {
self.new_omega_storage.mark_ranked_win(target);
self.new_omega_storage.mark_ranked_loss(caller);
self.new_omega_storage.add_commander_xp(target,
target_defence.commander, XP_PER_RANKED_WIN);
} else if result.rhs_dead {
self.new_omega_storage.mark_ranked_win(caller);
self.new_omega_storage.mark_ranked_loss(target);
self.new_omega_storage.add_commander_xp(caller,
commander, XP_PER_RANKED_WIN);
}
result
}
}
}
|
// Convenient reexports for internal uses.
pub(crate) use errno::prelude::*;
pub(crate) use std::sync::Arc;
cfg_if::cfg_if! {
if #[cfg(feature = "sgx")] {
pub(crate) use std::prelude::v1::*;
pub(crate) use std::sync::{SgxMutex as Mutex, SgxRwLock as RwLock, SgxMutexGuard as MutexGuard};
} else {
pub(crate) use std::sync::{Mutex, MutexGuard, RwLock};
}
}
// Convenient type alises for internal uses.
pub(crate) type HostFd = u32;
pub(crate) use async_io::event::{Events, Pollee, Poller};
pub(crate) use async_io::socket::{Addr, Domain};
macro_rules! function {
() => {{
fn f() {}
fn type_name_of<T>(_: T) -> &'static str {
std::any::type_name::<T>()
}
let name = type_name_of(f);
match &name[..name.len() - 3].rfind(':') {
Some(pos) => &name[pos + 1..name.len() - 3],
None => &name[..name.len() - 3],
}
}};
}
macro_rules! debug_trace {
() => {
println!(
"> Function = {}, Line = {}, File = {}",
function!(),
line!(),
file!()
)
};
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.