text
stringlengths
8
4.13M
use juniper::ScalarValue; #[derive(Clone, Debug, PartialEq, ScalarValue)] pub enum DefaultScalarValue { Int(i32), Float(f64), #[value(as_str, as_string, into_string)] String(String), #[value(as_bool)] Boolean(bool), } fn main() {}
use crate::player::PlayerName; use serde::Serialize; use std::error::Error as ErrorT; use std::fmt; pub type Result<T> = std::result::Result<T, Error>; #[derive(Debug, Serialize)] pub enum Error { InvalidPlayerName(PlayerName), InvalidSession, InvalidSecret, PlayerNameTaken(PlayerName), InternalError, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use Error::*; match self { InvalidPlayerName(p) => write!(f, "invalid player name: {}", p), InvalidSession => write!(f, "invalid session id"), InvalidSecret => write!(f, "invalid secret"), PlayerNameTaken(p) => write!(f, "name {} is already taken!", p), InternalError => write!(f, "internal error"), } } } impl ErrorT for Error {}
use std::cmp::Ordering; use std::collections::{BTreeSet, HashSet}; use std::sync::RwLock; use std::time::Duration; use types::{block::BlockNumber, peer_info::PeerId}; #[derive(Eq, PartialEq, Clone, Debug)] struct TTLEntry<E> where E: Ord + Clone, { data: E, expiration_time: Duration, block_number: BlockNumber, peers: HashSet<PeerId>, } impl<E> TTLEntry<E> where E: Ord + Clone, { fn _expiration_time(&self) -> Duration { self.expiration_time } fn new(peer: PeerId, block_number: BlockNumber, entry: E) -> Self { let mut peers = HashSet::new(); peers.insert(peer); TTLEntry { data: entry, expiration_time: Duration::from_secs(60 * 60), block_number, peers, } } } impl<E> PartialOrd for TTLEntry<E> where E: Ord + Clone, { fn partial_cmp(&self, other: &TTLEntry<E>) -> Option<Ordering> { Some(self.cmp(other)) } } impl<E> Ord for TTLEntry<E> where E: Ord + Clone, { fn cmp(&self, other: &Self) -> Ordering { match self.block_number.cmp(&other.block_number) { Ordering::Equal => { return self.data.cmp(&other.data); } ordering => return ordering, } } } /// thread safe pub struct TTLPool<E> where E: Ord + Clone, { data: RwLock<BTreeSet<TTLEntry<E>>>, } impl<E> TTLPool<E> where E: Ord + Clone, { pub(crate) fn new() -> Self { Self { data: RwLock::new(BTreeSet::new()), } } /// add entry to pool pub(crate) fn insert(&self, peer: PeerId, number: BlockNumber, entry: E) { let mut ttl_entry = TTLEntry::new(peer.clone(), number, entry); let mut lock = self.data.write().unwrap(); if lock.contains(&ttl_entry) { ttl_entry = lock.take(&ttl_entry).expect("entry not exist.") }; ttl_entry.peers.insert(peer); lock.insert(ttl_entry); } /// take entry from pool pub(crate) fn take(&self, size: usize) -> Vec<E> { let mut lock = self.data.write().unwrap(); let mut set_iter = lock.iter(); let mut entries = Vec::new(); loop { if entries.len() >= size { break; } let entry = set_iter.next(); if entry.is_none() { break; } let ttl_entry = entry.expect("entry is none.").clone(); entries.push(ttl_entry); } drop(set_iter); if !entries.is_empty() { entries.iter().for_each(|e| { lock.remove(e); }); } entries.iter().map(|e| e.data.clone()).collect() } pub(crate) fn _gc(&self, _now: Duration) -> Vec<E> { //todo unimplemented!() } pub(crate) fn _size(&self) -> usize { self.data.read().unwrap().len() } }
use hashbrown::{HashMap, HashSet}; use utils::Vector3; // #[test] pub fn run() { let input = read_input(include_str!("input/day24.txt")); println!("{}", exercise_1(&input)); println!("{}", exercise_2(&input, 100)); } enum Directions { EAST, WEST, NORTHEAST, NORTHWEST, SOUTHEAST, SOUTHWEST, } const DIRECTIONS: [Directions; 6] = [ Directions::EAST, Directions::WEST, Directions::NORTHEAST, Directions::NORTHWEST, Directions::SOUTHEAST, Directions::SOUTHWEST, ]; impl Directions { fn value(&self) -> Vector3 { match self { Directions::EAST => Vector3::new([1, -1, 0]), Directions::WEST => Vector3::new([-1, 1, 0]), Directions::NORTHEAST => Vector3::new([0, -1, 1]), Directions::NORTHWEST => Vector3::new([-1, 0, 1]), Directions::SOUTHEAST => Vector3::new([1, 0, -1]), Directions::SOUTHWEST => Vector3::new([0, 1, -1]), } } } type Input = Vec<Vec<Directions>>; fn read_input(input: &str) -> Input { input.lines().map(read_line).collect() } fn read_line(input: &str) -> Vec<Directions> { let mut v = Vec::with_capacity(input.len() * 2 / 3); let mut it = input.chars(); while let Some(c) = it.next() { v.push(match c { 'e' => Directions::EAST, 'w' => Directions::WEST, 's' => match it.next() { Some('e') => Directions::SOUTHEAST, Some('w') => Directions::SOUTHWEST, _ => unreachable!(), }, 'n' => match it.next() { Some('e') => Directions::NORTHEAST, Some('w') => Directions::NORTHWEST, _ => unreachable!(), }, _ => unreachable!(), }); } v } fn exercise_1(input: &Input) -> usize { create_set(input).len() } fn exercise_2(input: &Input, len: usize) -> usize { let mut set = create_set(input); for _ in 0..len { let mut new_set = HashSet::new(); for candidate in find_candidates(&set) { let x = DIRECTIONS .iter() .map(|x| candidate + x.value()) .filter(|x| set.contains(x)) .count(); if set.contains(&candidate) { if !(x == 0 || x > 2) { new_set.insert(candidate); } } else { if x == 2 { new_set.insert(candidate); } } } set = new_set; } set.len() } fn create_set(input: &Input) -> HashSet<Vector3> { let mut set = HashSet::new(); for path in input { let c = find_tile(Vector3::new([0, 0, 0]), path); if set.contains(&c) { set.remove(&c); } else { set.insert(c); } } set } fn find_tile(start: Vector3, path: &Vec<Directions>) -> Vector3 { path.iter().fold(start, |acc, x| acc + x.value()) } fn find_candidates(set: &HashSet<Vector3>) -> HashSet<Vector3> { set.iter() .flat_map(|x| DIRECTIONS.iter().map(move |d| *x + d.value())) .collect() } #[cfg(test)] mod tests { use super::*; use crate::test::Bencher; #[test] fn d18ex1() { let input = read_input(include_str!("input/day24test.txt")); assert_eq!(10, exercise_1(&input)); // assert_eq!(71, exercise_1(&input)) } #[test] fn d18ex2() { let input = read_input(include_str!("input/day24test.txt")); assert_eq!(2208, exercise_2(&input, 100)); } #[bench] fn d18_bench_ex1(b: &mut Bencher) { let input = read_input(include_str!("input/day24.txt")); b.iter(|| exercise_1(&input)); } #[bench] fn d18_bench_ex2(b: &mut Bencher) { let input = read_input(include_str!("input/day24.txt")); b.iter(|| exercise_1(&input)); } }
//! # Multireferences (aka _the inverse distributive law_) //! //! Ever wanted to get `&[T]` from `[&T]` without cloning anything? //! //! Semantically, it doesn't make sense (because a slice wraps a block of //! _contiguous_ elements). But sometimes it's very convenient to be //! able to &#8220;algebraically&#8221; extract a common lifetime //! from a bunch of references. //! //! This crate provides two helper types //! [`Slice`](struct.Slice.html) and [`Pair`](struct.Pair.html) //! that allow the following conversions: //! //! * [`&'a [&'x T] -> &'a Slice<T>`](struct.Slice.html#method.new) (and a mutable equivalent) //! * [`&'a (&'x A, &'x B) -> &'a Pair<A, B>`](struct.Pair.html#method.new) (and a mutable equivalent) //! //! Moreover, each of these types provides `.as_ref()` and `.as_mut()` //! methods (with signatures different from the ones used by the `AsRef` and //! `AsMut` traits) implementing the forward distributive law: //! //! * [`&'a Slice<T> -> &'a [&'a T]`](struct.Slice.html#method.as_ref) (and a mutable equivalent) //! * [`&'a Pair<A, B> -> &'a (&'a A, &'a B)`](struct.Pair.html#method.as_ref) (and a mutable equivalent) //! // //! Also there is a macro `declare_named_tuple!` that introduces // //! a user-defined helper type which allows to name // //! the individual wrapped references. //! //! ## Motivation //! //! _The following text is somewhat long. Unfortunately, I do not //! know any realistic uses of the inverse distributive law in situations //! not involving a formal argument in a contra-contravariant position._ //! //! ### Preliminaries //! //! Suppose you have the following trait: //! //! ``` //! trait Info { //! type RelevantPart: ?Sized; //! //! fn info<E, Info>(&self, extractor: E) -> Info where //! E: FnOnce(&Self::RelevantPart) -> Info; //! } //! ``` //! //! I.e. a type implementing `Info` can temporarily give access to //! some its part. For example: //! //! ``` //! # trait Info { type RelevantPart: ?Sized; //! # fn info<E, Info>(&self, extractor: E) -> Info where //! # E: FnOnce(&Self::RelevantPart) -> Info; } //! # use std::collections::HashMap; //! struct Configuration { //! fields: HashMap<String, String>, //! } //! //! impl Info for Configuration { //! type RelevantPart = str; //! //! fn info<E, Info>(&self, extractor: E) -> Info where //! E: FnOnce(&str) -> Info //! { //! match self.fields.get("name") { //! Some(name) => extractor(name), //! None => extractor("UNKNOWN"), //! } //! } //! } //! ``` //! //! If you are interested whether the continuation-passing style is necessary, //! try to write a non-cps equivalent //! //! ``` //! # struct Foo; impl Foo { //! fn info<'a>(&'a self) -> &'a str //! # { todo!() } } //! ``` //! //! for some dynamically generated string (e.g. the current timestamp) instead //! of static `"UNKNOWN"`. //! //! The only safe way to get the `&'a str` from such a string seems to be //! to embed this string directly in the `Configuration`. But it can't be done //! through a shared reference (and if it could, it would be a rather //! strange-looking solution, because this string has nothing to do with //! the configuration). //! //! ### The problem //! //! Now suppose that you want to give two fields to the extractor. //! What the `RelevantPart` would be? //! //! The laziest solution is to define //! //! ``` //! type RelevantPart = (String, String); //! ``` //! //! But such a type requires cloning the strings. It would be better to have //! //! ``` //! # trait Foo { type RelevantPart; } impl<'a> Foo for &'a () { //! type RelevantPart = (&'a str, &'a str); //! # } //! ``` //! //! but our trait doesn't have the `'a` parameter. And if it had it //! would not work either. E.g. a `&str` borrowed from a dynamically //! generated analogue of `"UNKNOWN"` must have its lifetime fully //! contained in the `info` method. But the `'a` lifetime is external //! to this method. //! //! ### A solution //! //! ``` //! # trait Info { type RelevantPart: ?Sized; //! # fn info<E, Info>(&self, extractor: E) -> Info where //! # E: FnOnce(&Self::RelevantPart) -> Info; } //! # use std::collections::HashMap; //! # struct Configuration { fields: HashMap<String, String> } //! fn make_error_string() -> String { unimplemented!() } //! //! use multiref::Pair; // a wrapper around (&'a A, &'a B) //! //! impl Info for Configuration { //! type RelevantPart = Pair<str, str>; // now this type supports any lifetime //! //! fn info<E, Info>(&self, extractor: E) -> Info where //! E: FnOnce(&Pair<str,str>) -> Info //! { //! let error_string = make_error_string(); //! // for simplicity we generate an error string unconditionally //! //! let foo: &str = match self.fields.get("foo") { //! Some(foo) => &foo, //! None => &error_string, //! }; //! //! let bar: &str = match self.fields.get("bar") { //! Some(bar) => &bar, //! None => &error_string, //! }; //! //! extractor( (&(foo, bar)).into() ) //! // Pair::new(&(foo, bar)) also can be used //! } //! } //! ``` //! //! //! ## Warning //! //! This crate uses some `unsafe` code with questionable soundness. It seems //! that this code is sound if DST are sound, but it may not be the case. #![no_std] mod slice; mod pair; mod named_tuple; pub use pair::Pair; pub use slice::Slice;
use std::pin::Pin; use std::task::{Poll, Waker}; use deck_core::{ManifestId, Source}; use futures_preview::compat::{Future01CompatExt, Stream01CompatExt}; use futures_preview::future::{self, FutureExt, TryFutureExt}; use futures_preview::stream::{self, Stream, StreamExt, TryStreamExt}; use hyper::header::CONTENT_LENGTH; use crate::local::context::Context; use crate::progress::{Blocked, Downloading, Progress}; #[must_use = "streams do nothing unless polled"] pub struct FetchSource(Pin<Box<dyn Stream<Item = Result<Progress, ()>> + Send>>); impl FetchSource { pub fn new(ctx: Context, id: ManifestId, source: Source) -> Self { match source { Source::Git => fetch_git(ctx, id), Source::Path { ref path, ref hash } => unimplemented!(), Source::Uri { uri, hash } => fetch_uri(ctx, id, uri, hash), } } fn from_stream<S: Stream<Item = Result<Progress, ()>> + Send + 'static>(inner: S) -> Self { FetchSource(Box::pin(inner)) } } impl Stream for FetchSource { type Item = Result<Progress, ()>; fn poll_next(mut self: Pin<&mut Self>, waker: &Waker) -> Poll<Option<Self::Item>> { self.0.as_mut().poll_next(waker) } } fn fetch_uri(ctx: Context, id: ManifestId, uri: String, _hash: String) -> FetchSource { let future = async move { let get = ctx.client.get(uri.parse().unwrap()).compat(); let response = await!(get).map_err(|e| eprintln!("failed to connect to URI: {}", e))?; let len = response .headers() .get(CONTENT_LENGTH) .and_then(|len| len.to_str().ok()) .and_then(|len| len.parse::<u64>().ok()); let mut progress = Downloading { package_id: id.clone(), downloaded_bytes: 0, total_bytes: len, source: uri.clone(), }; let downloading = response .into_body() .compat() .map_err(|_| ()) .map_ok(move |chunk| { progress.downloaded_bytes += chunk.len() as u64; Progress::Downloading(progress.clone()) }); let progress = Progress::Blocked(Blocked { package_id: id, description: format!("fetched source from `{}`", uri), }); let done = downloading.chain(stream::once(future::ok(progress))); Ok(done) }; let stream = future .map_ok(|stream| Box::pin(stream) as Pin<Box<dyn Stream<Item = _> + Send>>) .unwrap_or_else(|err| Box::pin(stream::once(future::err(err)))) .flatten_stream(); FetchSource::from_stream(stream) } fn fetch_git(_ctx: Context, id: ManifestId) -> FetchSource { FetchSource::from_stream(stream::once(future::ok(Progress::Blocked(Blocked { package_id: id, description: "checked out repository".to_string(), })))) }
// modified version of original https://github.com/PonasKovas/mandelbrot/blob/master/src/main.rs extern crate image; extern crate rayon; extern crate num; use rayon::prelude::*; macro_rules! max_float { ($x: expr) => ($x); ($x: expr, $($z: expr),+) => {{ let y = max_float!($($z),*); if $x > y { $x } else { y } }} } macro_rules! min_float { ($x: expr) => ($x); ($x: expr, $($z: expr),+) => {{ let y = min_float!($($z),*); if $x < y { $x } else { y } }} } fn main() { // the return of parse() is Err let width: u32 = 1080; let height = (width as f32 * 9.0 / 16.0) as u32; let iterations_per_pixel = (width as f64 / 25_f64) as u32; // create a new image for saving let img = image::ImageBuffer::<image::Rgb<u8>, Vec<u8>>::new(width, height); // Distribute the work amongst the cores // into_vec : Consumes the image buffer and returns the underlying data as an owned buffer // so we need to clone() here let mut buffer = img.clone().into_vec(); // Split the image into rows // the par_chunks_mut(): // Returns a parallel iterator over at most size elements of self at a time. // The chunks are mutable and do not overlap. buffer.par_chunks_mut(width as usize * 3_usize) // change a vec into an iter .enumerate() // for_each(): Calls a closure on each element of an iterator. // which is very similar to the js arrow funciton .for_each( |(y, mut row)| { // Iterate through all pixels in this row for x in 0..width { let x = x as usize; // Check if it's in the mandelbrot set match is_point_in_set(x as u32, y, iterations_per_pixel, width, height) { Some(itr) => { // calculate the rgb value let mut r_val = (itr as f32 / iterations_per_pixel as f32) * 255_f32 * 2_f32; let mut gb_val = (1_f32 - (itr as f32 / iterations_per_pixel as f32)) * -255_f32 + 255_f32; r_val = min_float!(r_val, 255_f32); gb_val = max_float!(gb_val, 0_f32); row[x * 3_usize] = r_val as u8; row[x * 3_usize + 1_usize] = gb_val as u8; row[x * 3_usize + 2_usize] = gb_val as u8; } None => { row[x * 3_usize] = 255_u8; row[x * 3_usize + 1_usize] = 255_u8; row[x * 3_usize + 2_usize] = 255_u8; } } } }); // Save the outcome let final_image: image::RgbImage = image::ImageBuffer::from_vec(width, height, buffer).unwrap(); final_image.save("mandelbrot.png").unwrap(); } fn is_point_in_set(x: u32, y: usize, iterations: u32, width: u32, height: u32) -> Option<u32> { let c = num::complex::Complex::new( (x as f64 / (width - 1) as f64) * 3.5_f64 - 2.5_f64, (y as f64 / (height - 1) as f64) * 1.96875_f64 - 0.984375_f64); let mut z = num::complex::Complex::new(0_f64, 0_f64); for i in 0..iterations { z = z * z + c; if z.norm_sqr() > 4.0 { return Some(i); } } None }
//! Repeated constants used around pathfinder use crate::macro_prelude::block_hash; use crate::BlockHash; /// Vergen string pub const VERGEN_GIT_DESCRIBE: &str = env!("VERGEN_GIT_DESCRIBE"); /// User agent used in http clients pub const USER_AGENT: &str = concat!("starknet-pathfinder/", env!("VERGEN_GIT_DESCRIBE")); pub const TESTNET_GENESIS_HASH: BlockHash = block_hash!("07d328a71faf48c5c3857e99f20a77b18522480956d1cd5bff1ff2df3c8b427b"); pub const MAINNET_GENESIS_HASH: BlockHash = block_hash!("047C3637B57C2B079B93C61539950C17E868A28F46CDEF28F88521067F21E943"); pub const INTEGRATION_GENESIS_HASH: BlockHash = block_hash!("03ae41b0f023e53151b0c8ab8b9caafb7005d5f41c9ab260276d5bdc49726279"); pub const TESTNET2_GENESIS_HASH: BlockHash = block_hash!("04163f64ea0258f21fd05b478e2306ab2daeb541bdbd3bf29a9874dc5cd4b64e");
#![no_std] #![no_main] extern crate panic_halt; use dot_games::games::SelectionScreen; #[arduino_uno::entry] fn main() -> ! { let mut components = dot_games::get_components(); // Run the Selection Screen. let selected_game_loop = SelectionScreen::new().run(&mut components); // Run the Game Loop. selected_game_loop(components) }
use crate::user::User; use amiquip::{Connection, ConsumerMessage, ConsumerOptions, QueueDeclareOptions, Result}; use bincode::deserialize; pub fn consume() -> Result<()> { let mut conn = Connection::insecure_open("amqp://localhost:5672")?; let ch = conn.open_channel(None)?; let queue = ch.queue_declare("user", QueueDeclareOptions::default())?; let consumer = queue.consume(ConsumerOptions::default())?; println!("Consumer will exit after receiving one message"); for (_i, message) in consumer.receiver().iter().enumerate() { match message { ConsumerMessage::Delivery(delivery) => { let usr: User = deserialize(&delivery.body).unwrap(); consumer.ack(delivery)?; println!("Received: {:?}", usr); } _ => {} } break; } conn.close() }
fn main() { let stdin = std::io::stdin(); let mut rd = ProconReader::new(stdin.lock()); let a: i32 = rd.get(); let b: i32 = rd.get(); for x in -1000..=1000 { for y in -1000..=1000 { if x + y == a && x - y == b { println!("{} {}", x, y); return; } } } unreachable!(); } pub struct ProconReader<R: std::io::Read> { reader: R, } impl<R: std::io::Read> ProconReader<R> { pub fn new(reader: R) -> Self { Self { reader } } pub fn get<T: std::str::FromStr>(&mut self) -> T { use std::io::Read; let buf = self .reader .by_ref() .bytes() .map(|b| b.unwrap()) .skip_while(|&byte| byte == b' ' || byte == b'\n' || byte == b'\r') .take_while(|&byte| byte != b' ' && byte != b'\n' && byte != b'\r') .collect::<Vec<_>>(); std::str::from_utf8(&buf) .unwrap() .parse() .ok() .expect("Parse Error.") } }
//! Database for long clauses. use std::mem::transmute; use partial_ref::{partial, PartialRef}; use varisat_formula::Lit; use crate::{ context::{parts::*, Context}, prop::Reason, }; use super::{header::HEADER_LEN, ClauseAlloc, ClauseHeader, ClauseRef}; /// Partitions of the clause database. /// /// The long clauses are partitioned into 4 [`Tier`]s. This follows the approach described by /// Chanseok Oh in ["Between SAT and UNSAT: The Fundamental Difference in CDCL /// SAT"](https://doi.org/10.1007/978-3-319-24318-4_23), section 4. #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[repr(u8)] pub enum Tier { Irred = 0, Core = 1, Mid = 2, Local = 3, } impl Tier { /// Total number of tiers. pub const fn count() -> usize { 4 } /// Cast an index into the corresponding tier. pub unsafe fn from_index(index: usize) -> Tier { debug_assert!(index < Tier::count()); transmute(index as u8) } } /// Database for long clauses. /// /// Removal of clauses from the `clauses` and the `by_tier` fields can be delayed. The clause /// header's deleted and tier fields need to be checked when iterating over these. `by_tier` may /// also contain duplicate entries. #[derive(Default)] pub struct ClauseDb { /// May contain deleted clauses, see above pub(super) clauses: Vec<ClauseRef>, /// May contain deleted and moved clauses, see above pub(super) by_tier: [Vec<ClauseRef>; Tier::count()], /// These counts should always be up to date pub(super) count_by_tier: [usize; Tier::count()], /// Size of deleted but not collected clauses pub(super) garbage_size: usize, } impl ClauseDb { /// The number of long clauses of a given tier. pub fn count_by_tier(&self, tier: Tier) -> usize { self.count_by_tier[tier as usize] } } /// Add a long clause to the database. pub fn add_clause( mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP, mut WatchlistsP), header: ClauseHeader, lits: &[Lit], ) -> ClauseRef { let tier = header.tier(); let cref = ctx.part_mut(ClauseAllocP).add_clause(header, lits); ctx.part_mut(WatchlistsP) .watch_clause(cref, [lits[0], lits[1]]); let db = ctx.part_mut(ClauseDbP); db.clauses.push(cref); db.by_tier[tier as usize].push(cref); db.count_by_tier[tier as usize] += 1; cref } /// Change the tier of a long clause. /// /// This is a noop for a clause already of the specified tier. pub fn set_clause_tier( mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP), cref: ClauseRef, tier: Tier, ) { let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP); let db = ctx.part_mut(ClauseDbP); let old_tier = alloc.header(cref).tier(); if old_tier != tier { db.count_by_tier[old_tier as usize] -= 1; db.count_by_tier[tier as usize] += 1; alloc.header_mut(cref).set_tier(tier); db.by_tier[tier as usize].push(cref); } } /// Delete a long clause from the database. pub fn delete_clause( mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP, mut WatchlistsP), cref: ClauseRef, ) { // TODO Don't force a rebuild of all watchlists here ctx.part_mut(WatchlistsP).disable(); let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP); let db = ctx.part_mut(ClauseDbP); let header = alloc.header_mut(cref); debug_assert!( !header.deleted(), "delete_clause for already deleted clause" ); header.set_deleted(true); db.count_by_tier[header.tier() as usize] -= 1; db.garbage_size += header.len() + HEADER_LEN; } /// Delete a long clause from the database unless it is asserting. /// /// Returns true if the clause was deleted. pub fn try_delete_clause( mut ctx: partial!( Context, mut ClauseAllocP, mut ClauseDbP, mut WatchlistsP, ImplGraphP, AssignmentP, ), cref: ClauseRef, ) -> bool { let initial_lit = ctx.part(ClauseAllocP).clause(cref).lits()[0]; let asserting = ctx.part(AssignmentP).lit_is_true(initial_lit) && ctx.part(ImplGraphP).reason(initial_lit.var()) == &Reason::Long(cref); if !asserting { delete_clause(ctx.borrow(), cref); } !asserting } /// Iterator over all long clauses. /// /// This filters deleted (but uncollected) clauses on the fly. pub fn clauses_iter<'a>( ctx: &'a partial!('a Context, ClauseAllocP, ClauseDbP), ) -> impl Iterator<Item = ClauseRef> + 'a { let alloc = ctx.part(ClauseAllocP); ctx.part(ClauseDbP) .clauses .iter() .cloned() .filter(move |&cref| !alloc.header(cref).deleted()) } /// Iterate over all and remove some long clauses. /// /// Takes a closure that returns true for each clause that should be kept and false for each that /// should be deleted. pub fn filter_clauses<F>( mut ctx: partial!(Context, mut ClauseAllocP, mut ClauseDbP, mut WatchlistsP), mut filter: F, ) where F: FnMut(&mut ClauseAlloc, ClauseRef) -> bool, { ctx.part_mut(WatchlistsP).disable(); let (alloc, mut ctx) = ctx.split_part_mut(ClauseAllocP); let db = ctx.part_mut(ClauseDbP); let count_by_tier = &mut db.count_by_tier; let garbage_size = &mut db.garbage_size; db.clauses.retain(|&cref| { if alloc.header(cref).deleted() { false } else if filter(alloc, cref) { true } else { let header = alloc.header_mut(cref); header.set_deleted(true); count_by_tier[header.tier() as usize] -= 1; *garbage_size += header.len() + HEADER_LEN; false } }) } #[cfg(test)] mod tests { use super::*; use partial_ref::IntoPartialRefMut; use varisat_formula::cnf_formula; use crate::context::set_var_count; #[test] fn set_tiers_and_deletes() { let mut ctx = Context::default(); let mut ctx = ctx.into_partial_ref_mut(); let clauses = cnf_formula![ 1, 2, 3; 4, -5, 6; -2, 3, -4; -3, 5, 2, 7, 5; ]; set_var_count(ctx.borrow(), clauses.var_count()); let tiers = vec![Tier::Irred, Tier::Core, Tier::Mid, Tier::Local]; let new_tiers = vec![Tier::Irred, Tier::Local, Tier::Local, Tier::Core]; let mut crefs = vec![]; for (clause, &tier) in clauses.iter().zip(tiers.iter()) { let mut header = ClauseHeader::new(); header.set_tier(tier); let cref = add_clause(ctx.borrow(), header, clause); crefs.push(cref); } for (&cref, &tier) in crefs.iter().rev().zip(new_tiers.iter().rev()) { set_clause_tier(ctx.borrow(), cref, tier); } // We only check presence, as deletion from these lists is delayed assert!(ctx.part(ClauseDbP).by_tier[Tier::Irred as usize].contains(&crefs[0])); assert!(ctx.part(ClauseDbP).by_tier[Tier::Core as usize].contains(&crefs[3])); assert!(ctx.part(ClauseDbP).by_tier[Tier::Local as usize].contains(&crefs[1])); assert!(ctx.part(ClauseDbP).by_tier[Tier::Local as usize].contains(&crefs[2])); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Irred), 1); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Core), 1); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Mid), 0); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Local), 2); delete_clause(ctx.borrow(), crefs[0]); delete_clause(ctx.borrow(), crefs[2]); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Irred), 0); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Core), 1); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Mid), 0); assert_eq!(ctx.part(ClauseDbP).count_by_tier(Tier::Local), 1); } }
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. /// A delegation signer. pub struct DelegationSigner<'a> { /// Key tag. pub key_tag: KeyTag, /// Security algorithm. pub security_algorithm: SecurityAlgorithm, /// Digest. pub digest: DnsSecDigest<'a>, }
#![feature(libc)] extern crate libc; extern { //fn plus(x: libc::c_int, y: libc::c_int) -> libc::c_int; fn plusone(x: libc::c_int) -> libc::c_int; fn current_timestamp() -> libc::c_long; } use std::env; fn run(count: libc::c_int) { unsafe { // start immediately let start = current_timestamp(); let mut x = 0; while x < count { x = plusone(x); } let end = current_timestamp(); let elapsed = end - start; println!("{}", elapsed); } } fn main() { let args: Vec<String> = env::args().collect(); if args.len() == 1 { println!("First arg (0 - 2000000000) is required."); return; } let count = args[1].parse::<i32>().unwrap(); if count <= 0 || count > 2000000000 { println!("Must be a positive number not exceeding 2 billion."); return; } run(count as libc::c_int); }
use criterion::{BatchSize, BenchmarkId, Criterion, criterion_group, criterion_main}; use galois_2p8; use galois_2p8::Field; use rand; use rand::{Rng, RngCore}; use reed_solomon_erasure; use g2p; g2p::g2p!(GF256, 8); fn g2p_addition(a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = (GF256::from(l) + GF256::from(r)).into() } } fn galois_2p8_addition(field: &galois_2p8::PrimitivePolynomialField, a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); field.add_multiword(dest, a); field.add_multiword(dest, b); } fn reed_solomon_erasure_addition(a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = reed_solomon_erasure::galois_8::add(l, r); } } fn g2p_multiplication(a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = (GF256::from(l) * GF256::from(r)).into() } } fn galois_2p8_multiplication(field: &galois_2p8::PrimitivePolynomialField, a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = field.mult(l, r) } } fn reed_solomon_erasure_multiplication(a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = reed_solomon_erasure::galois_8::mul(l, r); } } fn g2p_multiplication_const(a: &[u8], b: u8, dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); for (&l, d) in Iterator::zip(a.into_iter(), dest) { *d = (GF256::from(l) * GF256::from(b)).into() } } fn galois_2p8_multiplication_const(field: &galois_2p8::PrimitivePolynomialField, a: &[u8], b: u8, dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); dest.copy_from_slice(a); field.mult_multiword(dest, b); } fn reed_solomon_erasure_multiplication_const(a: &[u8], b: u8, dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); reed_solomon_erasure::galois_8::mul_slice(b, a, dest); } fn g2p_division(a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = (GF256::from(l) / GF256::from(r)).into() } } fn galois_2p8_division(field: &galois_2p8::PrimitivePolynomialField, a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = field.div(l, r) } } fn reed_solomon_erasure_division(a: &[u8], b: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), b.len()); assert_eq!(b.len(), dest.len()); for ((&l, &r), d) in Iterator::zip(Iterator::zip(a.into_iter(), b), dest) { *d = reed_solomon_erasure::galois_8::div(l, r); } } fn g2p_division_const(a: &[u8], b: u8, dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); for (&l, d) in Iterator::zip(a.into_iter(), dest) { *d = (GF256::from(l) / GF256::from(b)).into() } } fn galois_2p8_division_const(field: &galois_2p8::PrimitivePolynomialField, a: &[u8], b: u8, dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); dest.copy_from_slice(a); field.div_multiword(dest, b); } fn reed_solomon_erasure_division_const(a: &[u8], b: u8, dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); for (&l, d) in Iterator::zip(a.into_iter(), dest) { *d = reed_solomon_erasure::galois_8::div(l, b); } } fn g2p_inverse(a: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); for (&inv, d) in Iterator::zip(a.into_iter(), dest) { *d = (GF256::from(1) / GF256::from(inv)).into() } } fn galois_2p8_inverse(field: &galois_2p8::PrimitivePolynomialField, a: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); for (&inv, d) in Iterator::zip(a.into_iter(), dest) { *d = field.div(1, inv); } } fn reed_solomon_erasure_inverse(a: &[u8], dest: &mut [u8]) { assert_eq!(a.len(), dest.len()); for (&inv, d) in Iterator::zip(a.into_iter(), dest) { *d = reed_solomon_erasure::galois_8::div(1, inv); } } fn all_benches(c: &mut Criterion) { let mut rng = rand::thread_rng(); let galois_2p8_field = galois_2p8::PrimitivePolynomialField::new_might_panic(galois_2p8::IrreducablePolynomial::Poly84320); let input_sizes = [64, 1_024, 16_384]; let mut group = c.benchmark_group("addition"); for &i in input_sizes.iter() { let mut a = vec![0; i]; let mut b = vec![0; i]; let dest = vec![0; i]; rng.fill_bytes(&mut a[..]); rng.fill_bytes(&mut b[..]); group.bench_function( BenchmarkId::new("g2p", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { g2p_addition(&a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("galois_2p8", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { galois_2p8_addition(&galois_2p8_field, &a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("reed_solomon_erasure", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { reed_solomon_erasure_addition(&a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); } group.finish(); let mut group = c.benchmark_group("multiplication"); for &i in input_sizes.iter() { let mut a = vec![0; i]; let mut b = vec![0; i]; let dest = vec![0; i]; rng.fill_bytes(&mut a[..]); rng.fill_bytes(&mut b[..]); group.bench_function( BenchmarkId::new("g2p", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { g2p_multiplication(&a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("galois_2p8", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { galois_2p8_multiplication(&galois_2p8_field, &a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("reed_solomon_erasure", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { reed_solomon_erasure_multiplication(&a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); } group.finish(); let mut group = c.benchmark_group("multiplication_const"); for &i in input_sizes.iter() { let mut a = vec![0; i]; let b = rng.gen(); let dest = vec![0; i]; rng.fill_bytes(&mut a[..]); group.bench_function( BenchmarkId::new("g2p", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { g2p_multiplication_const(&a, b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("galois_2p8", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { galois_2p8_multiplication_const(&galois_2p8_field, &a, b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("reed_solomon_erasure", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { reed_solomon_erasure_multiplication_const(&a, b, &mut dest); dest }, BatchSize::SmallInput, ) }); } group.finish(); let mut group = c.benchmark_group("inverse"); for &i in input_sizes.iter() { let mut a = vec![0; i]; let dest = vec![0; i]; rng.fill_bytes(&mut a[..]); for divisor in &mut a { while *divisor == 0 { *divisor = rng.gen(); } } group.bench_function( BenchmarkId::new("g2p", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { g2p_inverse(&a, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("galois_2p8", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { galois_2p8_inverse(&galois_2p8_field, &a, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("reed_solomon_erasure", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { reed_solomon_erasure_inverse(&a, &mut dest); dest }, BatchSize::SmallInput, ) }); } group.finish(); let mut group = c.benchmark_group("division"); for &i in input_sizes.iter() { let mut a = vec![0; i]; let mut b = vec![0; i]; let dest = vec![0; i]; rng.fill_bytes(&mut a[..]); rng.fill_bytes(&mut b[..]); for divisor in &mut b { while *divisor == 0 { *divisor = rng.gen(); } } group.bench_function( BenchmarkId::new("g2p", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { g2p_division(&a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("galois_2p8", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { galois_2p8_division(&galois_2p8_field, &a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("reed_solomon_erasure", i), |bencher| { bencher.iter_batched( || (a.clone(), b.clone(), dest.clone()), |(a, b, mut dest)| { reed_solomon_erasure_division(&a, &b, &mut dest); dest }, BatchSize::SmallInput, ) }); } group.finish(); let mut group = c.benchmark_group("division_const"); for &i in input_sizes.iter() { let mut a = vec![0; i]; let b = rng.gen_range(1..=255); let dest = vec![0; i]; rng.fill_bytes(&mut a[..]); group.bench_function( BenchmarkId::new("g2p", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { g2p_division_const(&a, b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("galois_2p8", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { galois_2p8_division_const(&galois_2p8_field, &a, b, &mut dest); dest }, BatchSize::SmallInput, ) }); group.bench_function( BenchmarkId::new("reed_solomon_erasure", i), |bencher| { bencher.iter_batched( || (a.clone(), dest.clone()), |(a, mut dest)| { reed_solomon_erasure_division_const(&a, b, &mut dest); dest }, BatchSize::SmallInput, ) }); } group.finish(); } criterion_group!(benches, all_benches); criterion_main!(benches);
use crate::calc::raw_log::RawLog; use crate::TimeTrackerError; use std::cmp::max; use std::cmp::min; use std::collections::HashMap; use std::convert::TryFrom; use std::fmt; use std::fmt::Display; use std::fmt::Formatter; const MAX_SECONDS_BETWEEN_RECORDS_IN_SPAN: u64 = 5 * 60; pub struct Span { pub name: String, pub start: u64, pub end: u64, } impl Span { pub fn duration(&self) -> u64 { self.end - self.start } } impl Display for Span { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{}/{}/{}", self.name, self.start, self.end,) } } pub fn spans_from(processed_data: &str) -> Result<Vec<Span>, TimeTrackerError> { let mut spans = vec![]; for line in processed_data.lines() { spans.push(Span::try_from(line)?); } Ok(spans) } impl<'a> TryFrom<&'a str> for Span { type Error = TimeTrackerError; fn try_from(raw_data: &'a str) -> Result<Self, Self::Error> { let mut parts = raw_data.split('/'); let name = match parts.next() { Some(v) => v.to_string(), None => return Err(TimeTrackerError::InvalidLineError(raw_data.to_string())), }; let start = match parts.next() { Some(v) => match v.parse::<u64>() { Ok(parsed) => parsed, Err(_) => return Err(TimeTrackerError::InvalidTimestampError(v.to_string())), }, None => return Err(TimeTrackerError::InvalidLineError(raw_data.to_string())), }; let end = match parts.next() { Some(v) => match v.parse::<u64>() { Ok(parsed) => parsed, Err(_) => return Err(TimeTrackerError::InvalidTimestampError(v.to_string())), }, None => return Err(TimeTrackerError::InvalidLineError(raw_data.to_string())), }; Ok(Span { name, start, end }) } } pub fn get_spans_from(mut raw_logs: Vec<RawLog>) -> Vec<Span> { if raw_logs.is_empty() { return vec![]; } let mut spans = vec![]; let first_log = raw_logs.remove(0); let mut span = Span { name: first_log.name, start: first_log.timestamp, end: first_log.timestamp, }; for log in raw_logs { let same_name = log.name == span.name; let small_time_gap = log.timestamp.saturating_sub(span.end) < MAX_SECONDS_BETWEEN_RECORDS_IN_SPAN; match (same_name, small_time_gap) { (true, true) => span.end = max(log.timestamp, span.end), (false, true) => { let mid_point_time = (max(log.timestamp, span.end) - min(log.timestamp, span.end)) / 2 + min(log.timestamp, span.end); span.end = mid_point_time; spans.push(span); span = Span { name: log.name, start: mid_point_time, end: log.timestamp, }; } (_, false) => { spans.push(span); span = Span { name: log.name, start: log.timestamp, end: log.timestamp, }; } }; } spans.push(span); spans } pub fn get_last_timestamp_per_project(spans: &[Span]) -> HashMap<String, u64> { let mut map = HashMap::new(); for span in spans { // TODO is there a more efficient way to do this? match map.remove(&span.name) { None => { map.insert(span.name.clone(), span.end); } Some(map_time) => { if map_time < span.end { map.insert(span.name.clone(), span.end); } else { map.insert(span.name.clone(), map_time); } } } } map } pub fn get_vec_raw_logs_from_map_last_timestamp_per_project( map: HashMap<String, u64>, ) -> Vec<RawLog> { let mut raw_logs: Vec<RawLog> = map .into_iter() .map(|(project_name, timestamp)| RawLog { name: project_name, timestamp, }) .collect(); raw_logs.sort_by(|a, b| a.timestamp.cmp(&b.timestamp)); raw_logs } #[cfg(test)] mod tests { use super::*; #[test] fn raw_log_to_span_no_logs() { let spans = get_spans_from(vec![]); assert_eq!(0, spans.len()) } #[test] fn raw_log_to_span_single_project() { let project_name = "test_proj"; let raw_log_1 = RawLog { name: String::from(project_name), timestamp: 0, }; let raw_log_2 = RawLog { name: String::from(project_name), timestamp: 5, }; let raw_log_3 = RawLog { name: String::from(project_name), timestamp: 20, }; let raw_logs = vec![raw_log_1, raw_log_2, raw_log_3]; let mut spans = get_spans_from(raw_logs); assert_eq!(1, spans.len()); let span = spans.pop().unwrap(); assert_eq!(project_name, span.name); assert_eq!(20, span.duration()) } #[test] fn raw_log_to_span_two_project() { let project_name = "test_proj"; let project_2_name = "test_proj2"; let raw_log_1 = RawLog { name: String::from(project_name), timestamp: 0, }; let raw_log_2 = RawLog { name: String::from(project_name), timestamp: 6, }; let raw_log_3 = RawLog { name: String::from(project_2_name), timestamp: 18, }; let raw_log_4 = RawLog { name: String::from(project_2_name), timestamp: 26, }; let raw_logs = vec![raw_log_1, raw_log_2, raw_log_3, raw_log_4]; let mut spans = get_spans_from(raw_logs); assert_eq!(2, spans.len()); let span_1 = spans.remove(0); assert_eq!(project_name, span_1.name); assert_eq!(6 + 6, span_1.duration()); // time between project timestamps is split equally between projects let span_2 = spans.remove(0); assert_eq!(project_2_name, span_2.name); assert_eq!(8 + 6, span_2.duration()); } #[test] fn raw_log_to_span_two_projects_interleaved() { let project_1_name = "test_proj"; let project_2_name = "test_proj2"; let raw_log_1 = RawLog { name: String::from(project_1_name), timestamp: 0, }; let raw_log_2 = RawLog { name: String::from(project_1_name), timestamp: 5, }; let raw_log_3 = RawLog { name: String::from(project_2_name), timestamp: 20, }; let raw_log_4 = RawLog { name: String::from(project_2_name), timestamp: 24, }; let raw_log_5 = RawLog { name: String::from(project_1_name), timestamp: 30, }; let raw_log_6 = RawLog { name: String::from(project_1_name), timestamp: 36, }; let raw_logs = vec![ raw_log_1, raw_log_2, raw_log_3, raw_log_4, raw_log_5, raw_log_6, ]; let mut spans = get_spans_from(raw_logs); assert_eq!(3, spans.len()); let span_1 = spans.remove(0); assert_eq!(project_1_name, span_1.name); assert_eq!(12, span_1.duration()); let span_2 = spans.remove(0); assert_eq!(project_2_name, span_2.name); assert_eq!(15, span_2.duration()); let span_3 = spans.remove(0); assert_eq!(project_1_name, span_3.name); assert_eq!(9, span_3.duration()); assert_eq!( 36, span_1.duration() + span_2.duration() + span_3.duration() ); } #[test] fn raw_log_to_span_large_timegap() { let project_name = "test_proj"; let raw_log_1 = RawLog { name: String::from(project_name), timestamp: 0, }; let raw_log_2 = RawLog { name: String::from(project_name), timestamp: 5, }; let raw_log_3 = RawLog { name: String::from(project_name), timestamp: 555520, }; let raw_log_4 = RawLog { name: String::from(project_name), timestamp: 555526, }; let raw_logs = vec![raw_log_1, raw_log_2, raw_log_3, raw_log_4]; let mut spans = get_spans_from(raw_logs); assert_eq!(2, spans.len()); let span_1 = spans.remove(0); assert_eq!(project_name, span_1.name); assert_eq!(5, span_1.duration()); let span_2 = spans.remove(0); assert_eq!(project_name, span_2.name); assert_eq!(6, span_2.duration()); } #[test] fn get_last_timestamp_per_project_no_spans() { let spans = vec![]; let last_timestamp_per_project = get_last_timestamp_per_project(&spans); assert!(last_timestamp_per_project.is_empty()); } #[test] fn get_last_timestamp_per_project_several_spans() { let mut spans = vec![]; let span1a = Span { name: String::from("testproj1"), start: 0, end: 30, }; let span1b = Span { name: String::from("testproj1"), start: 10030, end: 10060, }; let span2a = Span { name: String::from("testproj2"), start: 530, end: 560, }; spans.push(span1a); spans.push(span1b); spans.push(span2a); let last_timestamp_per_project = get_last_timestamp_per_project(&spans); assert_eq!( &10060u64, last_timestamp_per_project .get("testproj1") .expect("testproj1 not found") ); } #[test] fn get_vec_raw_logs_from_map_last_timestamp_per_project_empty() { let last_timestamp_per_project = HashMap::new(); assert_eq!( 0, get_vec_raw_logs_from_map_last_timestamp_per_project(last_timestamp_per_project).len() ); } #[test] fn get_vec_raw_logs_from_map_last_timestamp_per_project_several_projects() { let mut last_timestamp_per_project = HashMap::new(); last_timestamp_per_project.insert(String::from("proj1"), 1); last_timestamp_per_project.insert(String::from("proj2"), 2); last_timestamp_per_project.insert(String::from("proj3"), 3); let last_timestamp_as_vec = get_vec_raw_logs_from_map_last_timestamp_per_project(last_timestamp_per_project); assert_eq!(3, last_timestamp_as_vec.len()); assert_eq!(1, last_timestamp_as_vec.get(0).unwrap().timestamp); } }
/*! galbi is a library that provides shortcut types for idiomatic nested types. ## install If cargo-edit is installed, you can install it like this: ```sh cargo add galbi ``` If not, you have to manually add the dependency to Cargo.toml. ```toml [dependencies] galbi = "0.2.1" ``` ## use It can be used in the following format. Since the automatic dereferencing trait is implemented, you can use the inner methods right away. ```rust use galbi::*; fn main() { let shared = ArcMutex::new(15); let get = shared.lock().unwrap(); println!("{}", *get); } ``` ## features - Rc<RefCell&lt;T>> -> RcCell<T> - Arc<Mutex&lt;T>> -> ArcMutex<T> - Option<Box&lt;T>> -> OptionBox<T> - ... more later ## link - [document](https://docs.rs/galbi) - [repository](https://github.com/myyrakle/galbi) */ #[path = "./impl/option_box.rs"] pub mod option_box; pub use option_box::*; #[path = "./impl/rc_cell.rs"] pub mod rc_cell; pub use rc_cell::*; #[path = "./impl/arc_mutex.rs"] pub mod arc_mutex; pub use arc_mutex::*;
foreigner_class!(class Boo { self_type Boo; constructor Boo::new() -> Boo; method Boo::something(&self) -> i32; }); foreign_enum!( enum ControlItem { GNSS = ControlItem::GnssWorking, GPS_PROVIDER = ControlItem::AndroidGPSOn, } ); foreigner_class!(class Foo { self_type Foo; constructor Foo::default() -> Foo; method Foo::f1(&self) -> Option<Boo>; method Foo::f2(&self) -> Option<f64>; method Foo::f3(&self) -> Option<u32>; method Foo::f4(&self) -> Option<usize>; method Foo::f5(&self) -> Option<&Boo>; method Foo::f6(&self) -> Option<ControlItem>; method Foo::f7(&self) -> Option<u64>; method Foo::f8(&self) -> Option<&str>; method Foo::f9(&self) -> Option<String>; method Foo::f10(&self) -> Option<bool>; });
pub mod generated_types { pub use generated_types::influxdata::platform::storage::{read_group_request::Group, *}; } use snafu::Snafu; use self::generated_types::*; use super::response::{ FIELD_TAG_KEY_BIN, FIELD_TAG_KEY_TEXT, MEASUREMENT_TAG_KEY_BIN, MEASUREMENT_TAG_KEY_TEXT, }; use ::generated_types::{aggregate::AggregateType, google::protobuf::*}; #[derive(Debug, Snafu)] pub enum Error { #[snafu(display("duration {:?} too large", d))] Duration { d: std::time::Duration }, } pub type Result<T, E = Error> = std::result::Result<T, E>; pub fn measurement_fields( org_bucket: Any, measurement: String, start: i64, stop: i64, predicate: std::option::Option<Predicate>, ) -> MeasurementFieldsRequest { generated_types::MeasurementFieldsRequest { source: Some(org_bucket), measurement, range: Some(TimestampRange { start, end: stop }), predicate, } } pub fn measurement_tag_keys( org_bucket: Any, measurement: String, start: i64, stop: i64, predicate: std::option::Option<Predicate>, ) -> MeasurementTagKeysRequest { generated_types::MeasurementTagKeysRequest { source: Some(org_bucket), measurement, range: Some(TimestampRange { start, end: stop }), predicate, } } pub fn read_filter( org_bucket: Any, start: i64, stop: i64, predicate: std::option::Option<Predicate>, ) -> ReadFilterRequest { generated_types::ReadFilterRequest { predicate, read_source: Some(org_bucket), range: Some(TimestampRange { start, end: stop }), key_sort: read_filter_request::KeySort::Unspecified as i32, // IOx doesn't support any other sort tag_key_meta_names: TagKeyMetaNames::Binary as i32, } } pub fn read_group( org_bucket: Any, start: i64, stop: i64, predicate: std::option::Option<Predicate>, aggregate: std::option::Option<AggregateType>, group: Group, group_keys: Vec<String>, ) -> ReadGroupRequest { generated_types::ReadGroupRequest { predicate, read_source: Some(org_bucket), range: Some(TimestampRange { start, end: stop }), aggregate: aggregate.map(|a| Aggregate { r#type: a as i32 }), group: group as i32, group_keys, } } #[allow(clippy::too_many_arguments)] pub fn read_window_aggregate( org_bucket: Any, start: i64, stop: i64, predicate: std::option::Option<Predicate>, every: std::time::Duration, offset: std::time::Duration, aggregates: Vec<AggregateType>, window: std::option::Option<Window>, ) -> Result<ReadWindowAggregateRequest, Error> { let window_every = if every.as_nanos() > i64::MAX as u128 { return DurationSnafu { d: every }.fail(); } else { every.as_nanos() as i64 }; let offset = if offset.as_nanos() > i64::MAX as u128 { return DurationSnafu { d: offset }.fail(); } else { offset.as_nanos() as i64 }; // wrap in the PB message type for aggregates. let aggregate = aggregates .into_iter() .map(|a| Aggregate { r#type: a as i32 }) .collect::<Vec<_>>(); Ok(generated_types::ReadWindowAggregateRequest { predicate, read_source: Some(org_bucket), range: Some(TimestampRange { start, end: stop }), window_every, offset, aggregate, window, tag_key_meta_names: TagKeyMetaNames::Text as i32, }) } pub fn tag_values( org_bucket: Any, start: i64, stop: i64, predicate: std::option::Option<Predicate>, tag_key: String, ) -> TagValuesRequest { let tag_key = if tag_key_is_measurement(tag_key.as_bytes()) { MEASUREMENT_TAG_KEY_BIN.to_vec() } else if tag_key_is_field(tag_key.as_bytes()) { FIELD_TAG_KEY_BIN.to_vec() } else { tag_key.as_bytes().to_vec() }; generated_types::TagValuesRequest { predicate, tags_source: Some(org_bucket), range: Some(TimestampRange { start, end: stop }), tag_key, } } pub(crate) fn tag_key_is_measurement(key: &[u8]) -> bool { (key == MEASUREMENT_TAG_KEY_TEXT) || (key == MEASUREMENT_TAG_KEY_BIN) } pub(crate) fn tag_key_is_field(key: &[u8]) -> bool { (key == FIELD_TAG_KEY_TEXT) || (key == FIELD_TAG_KEY_BIN) } #[cfg(test)] mod test_super { use std::num::NonZeroU64; use influxdb_storage_client::{Client, OrgAndBucket}; use super::*; #[test] fn test_read_window_aggregate_durations() { let org_bucket = Client::read_source( &OrgAndBucket::new( NonZeroU64::new(123_u64).unwrap(), NonZeroU64::new(456_u64).unwrap(), ), 0, ); let got = read_window_aggregate( org_bucket.clone(), 1, 10, None, std::time::Duration::from_millis(3), std::time::Duration::from_millis(2), vec![], None, ) .unwrap(); assert_eq!(got.window_every, 3_000_000); assert_eq!(got.offset, 2_000_000); let got = read_window_aggregate( org_bucket.clone(), 1, 10, None, std::time::Duration::from_secs(u64::MAX), std::time::Duration::from_millis(2), vec![], None, ); assert!(got.is_err()); let got = read_window_aggregate( org_bucket, 1, 10, None, std::time::Duration::from_secs(3), std::time::Duration::from_secs(u64::MAX), vec![], None, ); assert!(got.is_err()); } } // TODO Add the following helpers for building requests: // // * read_group // * tag_keys // * tag_values_with_measurement_and_key // * measurement_names // * measurement_tag_keys // * measurement_tag_values
/* * CoSyNe: A CoSyNe implementation in Rust * * https://people.idsia.ch/~juergen/gomez08a.pdf */ use crate::vectorizable::Vectorizable; use rand::prelude::*; #[derive(Clone, Debug)] pub struct Cosyne<T: Vectorizable> { // NxM matrix, one column is one parameter. population_raw: Vec<f64>, dimension: usize, settings: CosyneSettings, ctx: T::Context, } #[derive(Clone, Debug)] pub struct CosyneSettings { subpop_size: usize, num_pop_replacement: usize, sigma: f64, shrinkage_multiplier: f64, sampler: CosyneSampler, } #[derive(Clone, Copy, Debug)] pub enum CosyneSampler { Uniform, Gaussian, Cauchy, } impl CosyneSampler { pub fn sample<R: Rng>( &self, value: f64, sigma: f64, shrinkage_multiplier: f64, rng: &mut R, ) -> f64 { match self { CosyneSampler::Uniform => { let mut low_sigma = -sigma; let mut high_sigma = sigma; if value < 0.0 { low_sigma *= shrinkage_multiplier; } else { high_sigma *= shrinkage_multiplier; } value + rng.gen_range(low_sigma..high_sigma) } CosyneSampler::Gaussian => { let dist = rand_distr::Normal::new(0.0, sigma).unwrap(); value + dist.sample(rng) * shrinkage_multiplier } CosyneSampler::Cauchy => { let dist = rand_distr::Cauchy::new(0.0, sigma).unwrap(); value + dist.sample(rng) * shrinkage_multiplier } } } } #[derive(Clone, Debug)] pub struct CosyneCandidate<T> { item: T, score: f64, idx: usize, } impl<T: Clone> CosyneCandidate<T> { pub fn item(&self) -> &T { &self.item } pub fn score(&self) -> f64 { self.score } pub fn set_score(&mut self, score: f64) { self.score = score; } } impl CosyneSettings { pub fn default() -> Self { CosyneSettings { subpop_size: 16, num_pop_replacement: 10, sigma: 1.0, shrinkage_multiplier: 1.0, sampler: CosyneSampler::Cauchy, } } // shrinkage multiplier is a type of regularization // // If the current value of some parameter is X, then, when generating a new mutation of X, the // new value is: // // if X >= 0: random_range [-X..X * shrinkage_multiplier] // if X < 0: random_range [-X * shrinkage_multiplier..X] // // Typical values might be something like 0.9 or 0.95, depending on problem. Using values // greater than 1 might cause explosion of parameter values. Using too low value may make it // hard for the model to learn anything as there's very strong tendency towards 0. // // Using multiplier of 1 (which is the default) effectively disables this feature. pub fn shrinkage_multiplier(mut self, multiplier: f64) -> Self { self.shrinkage_multiplier = multiplier; self } pub fn subpop_size(mut self, subpop_size: usize) -> Self { if subpop_size == 0 { panic!("subpop_size must be greater than 0"); } self.subpop_size = subpop_size; self } pub fn num_pop_replacement(mut self, num_pop_replacement: usize) -> Self { if num_pop_replacement == 0 { panic!("num_pop_replacement must be greater than 0"); } self.num_pop_replacement = num_pop_replacement; self } pub fn sigma(mut self, sigma: f64) -> Self { if sigma <= 0.0 { panic!("sigma must be greater than 0"); } self.sigma = sigma; self } pub fn sampler(mut self, sampler: CosyneSampler) -> Self { self.sampler = sampler; self } } impl<T: Clone + Vectorizable> Cosyne<T> { pub fn new(initial: &T, settings: &CosyneSettings) -> Self { assert!(settings.num_pop_replacement <= settings.subpop_size); let (vec, ctx) = initial.to_vec(); let mut pop: Vec<f64> = Vec::with_capacity(settings.subpop_size * vec.len()); let mut rng = thread_rng(); for idx in 0..settings.subpop_size * vec.len() { let individual_idx = idx / settings.subpop_size; let subpop_idx = idx % settings.subpop_size; if subpop_idx == 0 { pop.push(vec[individual_idx]); } else { let sample = settings.sampler.sample( 0.0, settings.sigma, settings.shrinkage_multiplier, &mut rng, ); pop.push(sample); } } Self { settings: settings.clone(), dimension: vec.len(), population_raw: pop, ctx, } } pub fn settings(&self) -> CosyneSettings { self.settings.clone() } pub fn set_sigma(&mut self, sigma: f64) { self.settings.sigma = sigma; } pub fn sigma(&self) -> f64 { self.settings.sigma } pub fn ask(&mut self) -> Vec<CosyneCandidate<T>> { let mut candidates: Vec<CosyneCandidate<T>> = Vec::with_capacity(self.settings.subpop_size); let mut candidate_vec: Vec<f64> = Vec::with_capacity(self.dimension); for idx in 0..self.settings.subpop_size { candidate_vec.truncate(0); for idx2 in 0..self.dimension { let offset = idx + idx2 * self.settings.subpop_size; candidate_vec.push(self.population_raw[offset]); } let item = T::from_vec(&candidate_vec, &self.ctx); candidates.push(CosyneCandidate { item, score: 0.0, idx: idx, }); } candidates } pub fn tell(&mut self, mut candidates: Vec<CosyneCandidate<T>>) { assert_eq!(candidates.len(), self.settings.subpop_size); let mut rng = thread_rng(); candidates.sort_unstable_by(|a, b| { a.score .partial_cmp(&b.score) .unwrap_or(std::cmp::Ordering::Equal) }); for idx in 0..self.settings.num_pop_replacement { let (cand_vec, _) = candidates[idx].item.to_vec(); let ridx = idx + (self.settings.subpop_size - self.settings.num_pop_replacement); for idx2 in 0..self.dimension { let old_value = cand_vec[idx2]; let sample = self.settings.sampler.sample( old_value, self.settings.sigma, self.settings.shrinkage_multiplier, &mut rng, ); let offset = candidates[ridx].idx + idx2 * self.settings.subpop_size; self.population_raw[offset] = sample; } } for idx in 0..self.dimension { let offset_start = idx * self.settings.subpop_size; let offset_end = (idx + 1) * self.settings.subpop_size; // Shuffle self.population_raw[offset_start..offset_end].shuffle(&mut rng); } } } #[cfg(test)] mod tests { use super::*; #[derive(Clone, Debug)] struct TwoPoly { x: f64, y: f64, } impl Vectorizable for TwoPoly { type Context = (); fn to_vec(&self) -> (Vec<f64>, Self::Context) { (vec![self.x, self.y], ()) } fn from_vec(vec: &[f64], _: &Self::Context) -> Self { TwoPoly { x: vec[0], y: vec[1], } } } #[test] pub fn test_2polynomial() { let mut cosyne = Cosyne::new(&TwoPoly { x: 5.0, y: 6.0 }, &CosyneSettings::default()); let mut best_seen = 100000.0; let mut optimized = TwoPoly { x: 100.0, y: 100.0 }; for _ in 0..10000 { let mut cands = cosyne.ask(); for cand in cands.iter_mut() { let item = cand.item(); let score = (item.x - 2.0).abs() + (item.y - 8.0).abs(); if score < best_seen { best_seen = score; optimized = item.clone(); } cand.set_score(score); } cosyne.tell(cands); } assert!((optimized.x - 2.0).abs() < 0.01); assert!((optimized.y - 8.0).abs() < 0.01); } }
use std::cell::RefCell; use std::rc::Rc; use fan::evaluator::NamedFans; use input::evaluator::InputEvaluator; use parser::{ Evaluator, Node }; use output::Output; use util; // Output evaluator pub struct OutputEvaluator { named_fans : Rc<RefCell<NamedFans>>, input_evaluator : Rc<RefCell<InputEvaluator>>, } impl OutputEvaluator { pub fn new( named_fans_v : Rc<RefCell<NamedFans>>, input_evaluator_v : Rc<RefCell<InputEvaluator>>) -> OutputEvaluator { OutputEvaluator { named_fans : named_fans_v, input_evaluator : input_evaluator_v } } } impl Evaluator<Output> for OutputEvaluator { fn parse_nodes(&self, nodes: &[Node]) -> Result<Output, String> { let fan_name = try!(util::get_text_node("output", nodes, 0)); let input_node = try!(util::get_node ("output", nodes, 1)); let mut named_fans = self.named_fans.borrow_mut(); let fan = try!(named_fans.remove(fan_name).ok_or(format!("No such sensor: {}", fan_name))); Ok(Output::new(fan, try!(self.input_evaluator.borrow().parse_node(input_node)))) } }
pub mod effect; pub mod reducer; pub mod refs; pub mod state;
use crate::prelude::*; use std::os::raw::c_void; use std::ptr; #[repr(C)] #[derive(Debug)] pub struct VkPhysicalDeviceImageFormatInfo2KHR { pub sType: VkStructureType, pub pNext: *const c_void, pub format: VkFormat, pub imageType: VkImageType, pub tiling: VkImageTiling, pub usage: VkImageUsageFlagBits, pub flags: VkImageCreateFlagBits, } impl VkPhysicalDeviceImageFormatInfo2KHR { pub fn new<T, U>( format: VkFormat, image_type: VkImageType, tiling: VkImageTiling, usage: T, flags: U, ) -> Self where T: Into<VkImageUsageFlagBits>, U: Into<VkImageCreateFlagBits>, { VkPhysicalDeviceImageFormatInfo2KHR { sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, pNext: ptr::null(), format, imageType: image_type, tiling, usage: usage.into(), flags: flags.into(), } } }
use crate::twins::mission_twin::types::Waypoint; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DefineMissionEvent { pub id: String, pub name: String, pub waypoints: Vec<Waypoint>, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ShowMissionEvent { pub id: String, pub visible: bool, } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(tag = "eventType")] #[serde(rename_all = "camelCase")] pub enum MissionEvent { DefineMission(DefineMissionEvent), ShowMission(ShowMissionEvent), }
mod utill; extern crate serde; extern crate serde_pickle; use std::collections::BTreeMap; use std::fs; use std::f32::consts::PI; fn main() { do_all(); } #[no_mangle] pub fn do_all(){ //initializing stuff let mut map = BTreeMap::new(); //initialize all numbers for scenario of h0 = 0 println!("Enter v value"); let v = utill::next_long(); println!("Enter starting height"); let h = utill::next_long(); println!("Enter angle of trajectory"); let th = utill::next_long(); let th = th * (PI/180.0); //Math calculations and assigning to map map.insert("v".to_string(), v); map.insert("h".to_string(), h); map.insert("th".to_string(), th); let serialized = serde_pickle::to_vec(&map, true).unwrap(); fs::write("info", serialized).expect("Unable to write file"); }
//! Day 2 trait Solution { fn part_1(&self) -> usize; fn part_2(&self) -> usize; } impl Solution for str { fn part_1(&self) -> usize { filter_valid( &parsers::input(self).expect("Failed to parse the input"), part_1_rule, ) .count() } fn part_2(&self) -> usize { filter_valid( &parsers::input(self).expect("Failed to parse the input"), part_2_rule, ) .count() } } fn filter_valid<'a>( data: &'a [(Policy, &str)], is_valid: impl Fn(&Policy, &str) -> bool, ) -> impl Iterator<Item = &'a str> { data.iter().filter_map(move |(policy, password)| { if is_valid(policy, password) { Some(*password) } else { None } }) } fn part_1_rule(policy: &Policy, password: &str) -> bool { let count = password.chars().filter(|c| *c == policy.letter).count(); count >= policy.range[0] && count <= policy.range[1] } fn part_2_rule(policy: &Policy, password: &str) -> bool { password .char_indices() .filter(|(ix, c)| policy.range.contains(&(ix + 1)) && *c == policy.letter) .count() == 1 } #[derive(Debug, PartialEq)] pub struct Policy { range: [usize; 2], letter: char, } mod parsers { use nom::{ bytes::complete::tag, character::complete::{alpha1, anychar, char, line_ending}, error::Error, multi::separated_list0, sequence::separated_pair, IResult, }; use crate::parsers::{finished_parser, integer}; use super::Policy; pub fn input(s: &str) -> Result<Vec<(Policy, &str)>, Error<&str>> { finished_parser(separated_list0( line_ending, separated_pair(policy, tag(": "), alpha1), ))(s) } pub fn policy(s: &str) -> IResult<&str, Policy> { let (s, first) = integer(s)?; let (s, _) = char('-')(s)?; let (s, second) = integer(s)?; let (s, _) = char(' ')(s)?; let (s, letter) = anychar(s)?; Ok(( s, Policy { range: [first, second], letter, }, )) } } #[cfg(test)] mod tests { use super::*; #[test] fn example_policy() { assert_eq!( parsers::policy("1-3 a"), Ok(( "", Policy { range: [1, 3], letter: 'a', } )) ) } #[test] fn example_input() { assert_eq!( parsers::input( "\ 1-3 a: abcde 1-3 b: cdefg 2-9 c: ccccccccc" ), Ok(vec![ ( Policy { range: [1, 3], letter: 'a', }, "abcde" ), ( Policy { range: [1, 3], letter: 'b', }, "cdefg" ), ( Policy { range: [2, 9], letter: 'c', }, "ccccccccc" ), ]) ) } #[test] fn example_part_1_rule() { assert!(part_1_rule( &Policy { range: [1, 3], letter: 'a', }, "abcde" )); assert!(!part_1_rule( &Policy { range: [1, 3], letter: 'b', }, "cdefg" )) } #[test] fn example_1() { assert_eq!( filter_valid( &parsers::input( "\ 1-3 a: abcde 1-3 b: cdefg 2-9 c: ccccccccc" ) .unwrap(), part_1_rule ) .collect::<Vec<_>>(), vec!["abcde", "ccccccccc"] ) } #[test] fn part_1() { assert_eq!(include_str!("inputs/day_2").part_1(), 517); } #[test] fn example_part_2_rule() { assert!(part_2_rule( &Policy { range: [1, 3], letter: 'a', }, "abcde" )); assert!(!part_2_rule( &Policy { range: [2, 9], letter: 'c', }, "ccccccccc" )) } #[test] fn example_2() { assert_eq!( filter_valid( &parsers::input( "\ 1-3 a: abcde 1-3 b: cdefg 2-9 c: ccccccccc" ) .unwrap(), part_2_rule ) .collect::<Vec<_>>(), vec!["abcde"] ) } #[test] fn part_2() { assert_eq!(include_str!("inputs/day_2").part_2(), 284); } }
//! Types for Relay-compliant server mod connection_type; mod cursor; mod edge; mod page_info; use std::{fmt::Display, future::Future}; pub use connection_type::Connection; pub use cursor::{CursorType, OpaqueCursor}; pub use edge::Edge; pub use page_info::PageInfo; use crate::{Error, ObjectType, OutputType, Result, SimpleObject}; /// Empty additional fields #[derive(SimpleObject)] #[graphql(internal, fake)] pub struct EmptyFields; /// Used to specify the edge name. pub trait EdgeNameType: Send + Sync { /// Returns the edge type name. fn type_name<T: OutputType>() -> String; } /// Name the edge type by default with the default format. pub struct DefaultEdgeName; impl EdgeNameType for DefaultEdgeName { fn type_name<T: OutputType>() -> String { format!("{}Edge", T::type_name()) } } /// Used to specify the connection name. pub trait ConnectionNameType: Send + Sync { /// Returns the connection type name. fn type_name<T: OutputType>() -> String; } /// Name the connection type by default with the default format. pub struct DefaultConnectionName; impl ConnectionNameType for DefaultConnectionName { fn type_name<T: OutputType>() -> String { format!("{}Connection", T::type_name()) } } /// Parses the parameters and executes the query. /// /// # Examples /// /// ```rust /// use std::borrow::Cow; /// /// use async_graphql::*; /// use async_graphql::types::connection::*; /// /// struct Query; /// /// struct Numbers; /// /// #[derive(SimpleObject)] /// struct Diff { /// diff: i32, /// } /// /// #[Object] /// impl Query { /// async fn numbers(&self, /// after: Option<String>, /// before: Option<String>, /// first: Option<i32>, /// last: Option<i32> /// ) -> Result<Connection<usize, i32, EmptyFields, Diff>> { /// query(after, before, first, last, |after, before, first, last| async move { /// let mut start = after.map(|after| after + 1).unwrap_or(0); /// let mut end = before.unwrap_or(10000); /// if let Some(first) = first { /// end = (start + first).min(end); /// } /// if let Some(last) = last { /// start = if last > end - start { /// end /// } else { /// end - last /// }; /// } /// let mut connection = Connection::new(start > 0, end < 10000); /// connection.edges.extend( /// (start..end).into_iter().map(|n| /// Edge::with_additional_fields(n, n as i32, Diff{ diff: (10000 - n) as i32 })), /// ); /// Ok::<_, Error>(connection) /// }).await /// } /// } /// /// # tokio::runtime::Runtime::new().unwrap().block_on(async { /// let schema = Schema::new(Query, EmptyMutation, EmptySubscription); /// /// assert_eq!(schema.execute("{ numbers(first: 2) { edges { node diff } } }").await.into_result().unwrap().data, value!({ /// "numbers": { /// "edges": [ /// {"node": 0, "diff": 10000}, /// {"node": 1, "diff": 9999}, /// ] /// }, /// })); /// /// assert_eq!(schema.execute("{ numbers(last: 2) { edges { node diff } } }").await.into_result().unwrap().data, value!({ /// "numbers": { /// "edges": [ /// {"node": 9998, "diff": 2}, /// {"node": 9999, "diff": 1}, /// ] /// }, /// })); /// # }); /// ``` /// /// # Custom connection and edge type names /// /// ``` /// use async_graphql::{connection::*, *}; /// /// #[derive(SimpleObject)] /// struct MyObj { /// a: i32, /// b: String, /// } /// /// // Use to custom connection name /// struct MyConnectionName; /// /// impl ConnectionNameType for MyConnectionName { /// fn type_name<T: OutputType>() -> String { /// "MyConnection".to_string() /// } /// } /// /// // Use to custom edge name /// struct MyEdgeName; /// /// impl EdgeNameType for MyEdgeName { /// fn type_name<T: OutputType>() -> String { /// "MyEdge".to_string() /// } /// } /// /// struct Query; /// /// #[Object] /// impl Query { /// async fn numbers( /// &self, /// after: Option<String>, /// before: Option<String>, /// first: Option<i32>, /// last: Option<i32>, /// ) -> Connection<usize, MyObj, EmptyFields, EmptyFields, MyConnectionName, MyEdgeName> { /// let mut connection = Connection::new(false, false); /// connection.edges.push(Edge::new(1, MyObj { a: 100, b: "abc".to_string() })); /// connection /// } /// } /// /// # tokio::runtime::Runtime::new().unwrap().block_on(async { /// let schema = Schema::new(Query, EmptyMutation, EmptySubscription); /// /// let query = r#"{ /// numbers(first: 2) { /// __typename /// edges { __typename node { a b } } /// } /// }"#; /// let data = schema.execute(query).await.into_result().unwrap().data; /// assert_eq!(data, value!({ /// "numbers": { /// "__typename": "MyConnection", /// "edges": [ /// {"__typename": "MyEdge", "node": { "a": 100, "b": "abc" }}, /// ] /// }, /// })); /// # }); /// ``` pub async fn query<Name, EdgeName, Cursor, Node, ConnectionFields, EdgeFields, F, R, E>( after: Option<String>, before: Option<String>, first: Option<i32>, last: Option<i32>, f: F, ) -> Result<Connection<Cursor, Node, ConnectionFields, EdgeFields, Name, EdgeName>> where Name: ConnectionNameType, EdgeName: EdgeNameType, Cursor: CursorType + Send + Sync, <Cursor as CursorType>::Error: Display + Send + Sync + 'static, Node: OutputType, ConnectionFields: ObjectType, EdgeFields: ObjectType, F: FnOnce(Option<Cursor>, Option<Cursor>, Option<usize>, Option<usize>) -> R, R: Future< Output = Result<Connection<Cursor, Node, ConnectionFields, EdgeFields, Name, EdgeName>, E>, >, E: Into<Error>, { query_with(after, before, first, last, f).await } /// Parses the parameters and executes the query and return a custom /// `Connection` type. /// /// `Connection<T>` and `Edge<T>` have certain limitations. For example, you /// cannot customize the name of the type, so you can use this function to /// execute the query and return a customized `Connection` type. /// /// # Examples /// /// ```rust /// /// use async_graphql::*; /// use async_graphql::types::connection::*; /// /// #[derive(SimpleObject)] /// struct MyEdge { /// cursor: usize, /// node: i32, /// diff: i32, /// } /// /// #[derive(SimpleObject)] /// struct MyConnection { /// edges: Vec<MyEdge>, /// page_info: PageInfo, /// } /// /// struct Query; /// /// #[Object] /// impl Query { /// async fn numbers(&self, /// after: Option<String>, /// before: Option<String>, /// first: Option<i32>, /// last: Option<i32> /// ) -> Result<MyConnection> { /// query_with(after, before, first, last, |after, before, first, last| async move { /// let mut start = after.map(|after| after + 1).unwrap_or(0); /// let mut end = before.unwrap_or(10000); /// if let Some(first) = first { /// end = (start + first).min(end); /// } /// if let Some(last) = last { /// start = if last > end - start { /// end /// } else { /// end - last /// }; /// } /// let connection = MyConnection { /// edges: (start..end).into_iter().map(|n| MyEdge { /// cursor: n, /// node: n as i32, /// diff: (10000 - n) as i32, /// }).collect(), /// page_info: PageInfo { /// has_previous_page: start > 0, /// has_next_page: end < 10000, /// start_cursor: Some(start.encode_cursor()), /// end_cursor: Some(end.encode_cursor()), /// }, /// }; /// Ok::<_, Error>(connection) /// }).await /// } /// } /// /// # tokio::runtime::Runtime::new().unwrap().block_on(async { /// let schema = Schema::new(Query, EmptyMutation, EmptySubscription); /// /// assert_eq!(schema.execute("{ numbers(first: 2) { edges { node diff } } }").await.into_result().unwrap().data, value!({ /// "numbers": { /// "edges": [ /// {"node": 0, "diff": 10000}, /// {"node": 1, "diff": 9999}, /// ] /// }, /// })); /// /// assert_eq!(schema.execute("{ numbers(last: 2) { edges { node diff } } }").await.into_result().unwrap().data, value!({ /// "numbers": { /// "edges": [ /// {"node": 9998, "diff": 2}, /// {"node": 9999, "diff": 1}, /// ] /// }, /// })); /// # }); /// ``` pub async fn query_with<Cursor, T, F, R, E>( after: Option<String>, before: Option<String>, first: Option<i32>, last: Option<i32>, f: F, ) -> Result<T> where Cursor: CursorType + Send + Sync, <Cursor as CursorType>::Error: Display + Send + Sync + 'static, F: FnOnce(Option<Cursor>, Option<Cursor>, Option<usize>, Option<usize>) -> R, R: Future<Output = Result<T, E>>, E: Into<Error>, { if first.is_some() && last.is_some() { return Err("The \"first\" and \"last\" parameters cannot exist at the same time".into()); } let first = match first { Some(first) if first < 0 => { return Err("The \"first\" parameter must be a non-negative number".into()); } Some(first) => Some(first as usize), None => None, }; let last = match last { Some(last) if last < 0 => { return Err("The \"last\" parameter must be a non-negative number".into()); } Some(last) => Some(last as usize), None => None, }; let before = match before { Some(before) => Some(Cursor::decode_cursor(&before)?), None => None, }; let after = match after { Some(after) => Some(Cursor::decode_cursor(&after)?), None => None, }; f(after, before, first, last).await.map_err(Into::into) }
::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtClose ( hdrt : *const ::core::ffi::c_void ) -> ( ) ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtContinueSearch ( hsearchcontext : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Security_Cryptography\"`*"] fn DrtCreateDerivedKey ( plocalcert : *const super::super::Security::Cryptography:: CERT_CONTEXT , pkey : *mut DRT_DATA ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Security_Cryptography\"`*"] fn DrtCreateDerivedKeySecurityProvider ( prootcert : *const super::super::Security::Cryptography:: CERT_CONTEXT , plocalcert : *const super::super::Security::Cryptography:: CERT_CONTEXT , ppsecurityprovider : *mut *mut DRT_SECURITY_PROVIDER ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtCreateDnsBootstrapResolver ( port : u16 , pwszaddress : :: windows_sys::core::PCWSTR , ppmodule : *mut *mut DRT_BOOTSTRAP_PROVIDER ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drttransport.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtCreateIpv6UdpTransport ( scope : DRT_SCOPE , dwscopeid : u32 , dwlocalitythreshold : u32 , pwport : *mut u16 , phtransport : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtCreateNullSecurityProvider ( ppsecurityprovider : *mut *mut DRT_SECURITY_PROVIDER ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn DrtCreatePnrpBootstrapResolver ( fpublish : super::super::Foundation:: BOOL , pwzpeername : :: windows_sys::core::PCWSTR , pwzcloudname : :: windows_sys::core::PCWSTR , pwzpublishingidentity : :: windows_sys::core::PCWSTR , ppresolver : *mut *mut DRT_BOOTSTRAP_PROVIDER ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtDeleteDerivedKeySecurityProvider ( psecurityprovider : *const DRT_SECURITY_PROVIDER ) -> ( ) ); ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtDeleteDnsBootstrapResolver ( presolver : *const DRT_BOOTSTRAP_PROVIDER ) -> ( ) ); ::windows_sys::core::link ! ( "drttransport.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtDeleteIpv6UdpTransport ( htransport : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtDeleteNullSecurityProvider ( psecurityprovider : *const DRT_SECURITY_PROVIDER ) -> ( ) ); ::windows_sys::core::link ! ( "drtprov.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtDeletePnrpBootstrapResolver ( presolver : *const DRT_BOOTSTRAP_PROVIDER ) -> ( ) ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtEndSearch ( hsearchcontext : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn DrtGetEventData ( hdrt : *const ::core::ffi::c_void , uleventdatalen : u32 , peventdata : *mut DRT_EVENT_DATA ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtGetEventDataSize ( hdrt : *const ::core::ffi::c_void , puleventdatalen : *mut u32 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtGetInstanceName ( hdrt : *const ::core::ffi::c_void , ulcbinstancenamesize : u32 , pwzdrtinstancename : :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtGetInstanceNameSize ( hdrt : *const ::core::ffi::c_void , pulcbinstancenamesize : *mut u32 ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn DrtGetSearchPath ( hsearchcontext : *const ::core::ffi::c_void , ulsearchpathsize : u32 , psearchpath : *mut DRT_ADDRESS_LIST ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtGetSearchPathSize ( hsearchcontext : *const ::core::ffi::c_void , pulsearchpathsize : *mut u32 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtGetSearchResult ( hsearchcontext : *const ::core::ffi::c_void , ulsearchresultsize : u32 , psearchresult : *mut DRT_SEARCH_RESULT ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtGetSearchResultSize ( hsearchcontext : *const ::core::ffi::c_void , pulsearchresultsize : *mut u32 ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn DrtOpen ( psettings : *const DRT_SETTINGS , hevent : super::super::Foundation:: HANDLE , pvcontext : *const ::core::ffi::c_void , phdrt : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtRegisterKey ( hdrt : *const ::core::ffi::c_void , pregistration : *const DRT_REGISTRATION , pvkeycontext : *const ::core::ffi::c_void , phkeyregistration : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn DrtStartSearch ( hdrt : *const ::core::ffi::c_void , pkey : *const DRT_DATA , pinfo : *const DRT_SEARCH_INFO , timeout : u32 , hevent : super::super::Foundation:: HANDLE , pvcontext : *const ::core::ffi::c_void , hsearchcontext : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtUnregisterKey ( hkeyregistration : *const ::core::ffi::c_void ) -> ( ) ); ::windows_sys::core::link ! ( "drt.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn DrtUpdateKey ( hkeyregistration : *const ::core::ffi::c_void , pappdata : *const DRT_DATA ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabAddContact ( pwzcontactdata : :: windows_sys::core::PCWSTR , ppcontact : *mut *mut PEER_CONTACT ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabAsyncInviteContact ( pccontact : *const PEER_CONTACT , pcendpoint : *const PEER_ENDPOINT , pcinvitation : *const PEER_INVITATION , hevent : super::super::Foundation:: HANDLE , phinvitation : *mut super::super::Foundation:: HANDLE ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabAsyncInviteEndpoint ( pcendpoint : *const PEER_ENDPOINT , pcinvitation : *const PEER_INVITATION , hevent : super::super::Foundation:: HANDLE , phinvitation : *mut super::super::Foundation:: HANDLE ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabCancelInvitation ( hinvitation : super::super::Foundation:: HANDLE ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabCloseHandle ( hinvitation : super::super::Foundation:: HANDLE ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabDeleteContact ( pwzpeername : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabDeleteEndpointData ( pcendpoint : *const PEER_ENDPOINT ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabDeleteObject ( pobjectid : *const :: windows_sys::core::GUID ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabEnumApplicationRegistrationInfo ( registrationtype : PEER_APPLICATION_REGISTRATION_TYPE , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabEnumApplications ( pcendpoint : *const PEER_ENDPOINT , papplicationid : *const :: windows_sys::core::GUID , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabEnumContacts ( phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabEnumEndpoints ( pccontact : *const PEER_CONTACT , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabEnumObjects ( pcendpoint : *const PEER_ENDPOINT , pobjectid : *const :: windows_sys::core::GUID , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabEnumPeopleNearMe ( phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabExportContact ( pwzpeername : :: windows_sys::core::PCWSTR , ppwzcontactdata : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabGetAppLaunchInfo ( pplaunchinfo : *mut *mut PEER_APP_LAUNCH_INFO ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabGetApplicationRegistrationInfo ( papplicationid : *const :: windows_sys::core::GUID , registrationtype : PEER_APPLICATION_REGISTRATION_TYPE , ppapplication : *mut *mut PEER_APPLICATION_REGISTRATION_INFO ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabGetContact ( pwzpeername : :: windows_sys::core::PCWSTR , ppcontact : *mut *mut PEER_CONTACT ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabGetEndpointName ( ppwzendpointname : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabGetEventData ( hpeerevent : *const ::core::ffi::c_void , ppeventdata : *mut *mut PEER_COLLAB_EVENT_DATA ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabGetInvitationResponse ( hinvitation : super::super::Foundation:: HANDLE , ppinvitationresponse : *mut *mut PEER_INVITATION_RESPONSE ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabGetPresenceInfo ( pcendpoint : *const PEER_ENDPOINT , pppresenceinfo : *mut *mut PEER_PRESENCE_INFO ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabGetSigninOptions ( pdwsigninoptions : *mut u32 ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabInviteContact ( pccontact : *const PEER_CONTACT , pcendpoint : *const PEER_ENDPOINT , pcinvitation : *const PEER_INVITATION , ppresponse : *mut *mut PEER_INVITATION_RESPONSE ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabInviteEndpoint ( pcendpoint : *const PEER_ENDPOINT , pcinvitation : *const PEER_INVITATION , ppresponse : *mut *mut PEER_INVITATION_RESPONSE ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabParseContact ( pwzcontactdata : :: windows_sys::core::PCWSTR , ppcontact : *mut *mut PEER_CONTACT ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabQueryContactData ( pcendpoint : *const PEER_ENDPOINT , ppwzcontactdata : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabRefreshEndpointData ( pcendpoint : *const PEER_ENDPOINT ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabRegisterApplication ( pcapplication : *const PEER_APPLICATION_REGISTRATION_INFO , registrationtype : PEER_APPLICATION_REGISTRATION_TYPE ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabRegisterEvent ( hevent : super::super::Foundation:: HANDLE , ceventregistration : u32 , peventregistrations : *const PEER_COLLAB_EVENT_REGISTRATION , phpeerevent : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabSetEndpointName ( pwzendpointname : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabSetObject ( pcobject : *const PEER_OBJECT ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabSetPresenceInfo ( pcpresenceinfo : *const PEER_PRESENCE_INFO ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabShutdown ( ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabSignin ( hwndparent : super::super::Foundation:: HWND , dwsigninoptions : u32 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabSignout ( dwsigninoptions : u32 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabStartup ( wversionrequested : u16 ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabSubscribeEndpointData ( pcendpoint : *const PEER_ENDPOINT ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabUnregisterApplication ( papplicationid : *const :: windows_sys::core::GUID , registrationtype : PEER_APPLICATION_REGISTRATION_TYPE ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCollabUnregisterEvent ( hpeerevent : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerCollabUnsubscribeEndpointData ( pcendpoint : *const PEER_ENDPOINT ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerCollabUpdateContact ( pcontact : *const PEER_CONTACT ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerCreatePeerName ( pwzidentity : :: windows_sys::core::PCWSTR , pwzclassifier : :: windows_sys::core::PCWSTR , ppwzpeername : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistClientAddContentInformation ( hpeerdist : isize , hcontenthandle : isize , cbnumberofbytes : u32 , pbuffer : *const u8 , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistClientAddData ( hpeerdist : isize , hcontenthandle : isize , cbnumberofbytes : u32 , pbuffer : *const u8 , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistClientBlockRead ( hpeerdist : isize , hcontenthandle : isize , cbmaxnumberofbytes : u32 , pbuffer : *mut u8 , dwtimeoutinmilliseconds : u32 , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistClientCancelAsyncOperation ( hpeerdist : isize , hcontenthandle : isize , poverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistClientCloseContent ( hpeerdist : isize , hcontenthandle : isize ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistClientCompleteContentInformation ( hpeerdist : isize , hcontenthandle : isize , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistClientFlushContent ( hpeerdist : isize , pcontenttag : *const PEERDIST_CONTENT_TAG , hcompletionport : super::super::Foundation:: HANDLE , ulcompletionkey : usize , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistClientGetInformationByHandle ( hpeerdist : isize , hcontenthandle : isize , peerdistclientinfoclass : PEERDIST_CLIENT_INFO_BY_HANDLE_CLASS , dwbuffersize : u32 , lpinformation : *mut ::core::ffi::c_void ) -> u32 ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerDistClientOpenContent ( hpeerdist : isize , pcontenttag : *const PEERDIST_CONTENT_TAG , hcompletionport : super::super::Foundation:: HANDLE , ulcompletionkey : usize , phcontenthandle : *mut isize ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistClientStreamRead ( hpeerdist : isize , hcontenthandle : isize , cbmaxnumberofbytes : u32 , pbuffer : *mut u8 , dwtimeoutinmilliseconds : u32 , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistGetOverlappedResult ( lpoverlapped : *const super::super::System::IO:: OVERLAPPED , lpnumberofbytestransferred : *mut u32 , bwait : super::super::Foundation:: BOOL ) -> super::super::Foundation:: BOOL ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistGetStatus ( hpeerdist : isize , ppeerdiststatus : *mut PEERDIST_STATUS ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistGetStatusEx ( hpeerdist : isize , ppeerdiststatus : *mut PEERDIST_STATUS_INFO ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistRegisterForStatusChangeNotification ( hpeerdist : isize , hcompletionport : super::super::Foundation:: HANDLE , ulcompletionkey : usize , lpoverlapped : *const super::super::System::IO:: OVERLAPPED , ppeerdiststatus : *mut PEERDIST_STATUS ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistRegisterForStatusChangeNotificationEx ( hpeerdist : isize , hcompletionport : super::super::Foundation:: HANDLE , ulcompletionkey : usize , lpoverlapped : *const super::super::System::IO:: OVERLAPPED , ppeerdiststatus : *mut PEERDIST_STATUS_INFO ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistServerCancelAsyncOperation ( hpeerdist : isize , cbcontentidentifier : u32 , pcontentidentifier : *const u8 , poverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistServerCloseContentInformation ( hpeerdist : isize , hcontentinfo : isize ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistServerCloseStreamHandle ( hpeerdist : isize , hstream : isize ) -> u32 ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerDistServerOpenContentInformation ( hpeerdist : isize , cbcontentidentifier : u32 , pcontentidentifier : *const u8 , ullcontentoffset : u64 , cbcontentlength : u64 , hcompletionport : super::super::Foundation:: HANDLE , ulcompletionkey : usize , phcontentinfo : *mut isize ) -> u32 ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerDistServerOpenContentInformationEx ( hpeerdist : isize , cbcontentidentifier : u32 , pcontentidentifier : *const u8 , ullcontentoffset : u64 , cbcontentlength : u64 , pretrievaloptions : *const PEERDIST_RETRIEVAL_OPTIONS , hcompletionport : super::super::Foundation:: HANDLE , ulcompletionkey : usize , phcontentinfo : *mut isize ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistServerPublishAddToStream ( hpeerdist : isize , hstream : isize , cbnumberofbytes : u32 , pbuffer : *const u8 , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistServerPublishCompleteStream ( hpeerdist : isize , hstream : isize , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerDistServerPublishStream ( hpeerdist : isize , cbcontentidentifier : u32 , pcontentidentifier : *const u8 , cbcontentlength : u64 , ppublishoptions : *const PEERDIST_PUBLICATION_OPTIONS , hcompletionport : super::super::Foundation:: HANDLE , ulcompletionkey : usize , phstream : *mut isize ) -> u32 ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_IO"))] ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_System_IO\"`*"] fn PeerDistServerRetrieveContentInformation ( hpeerdist : isize , hcontentinfo : isize , cbmaxnumberofbytes : u32 , pbuffer : *mut u8 , lpoverlapped : *const super::super::System::IO:: OVERLAPPED ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistServerUnpublish ( hpeerdist : isize , cbcontentidentifier : u32 , pcontentidentifier : *const u8 ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistShutdown ( hpeerdist : isize ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistStartup ( dwversionrequested : u32 , phpeerdist : *mut isize , pdwsupportedversion : *mut u32 ) -> u32 ); ::windows_sys::core::link ! ( "peerdist.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerDistUnregisterForStatusChangeNotification ( hpeerdist : isize ) -> u32 ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerEndEnumeration ( hpeerenum : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerEnumGroups ( pwzidentity : :: windows_sys::core::PCWSTR , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerEnumIdentities ( phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerFreeData ( pvdata : *const ::core::ffi::c_void ) -> ( ) ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGetItemCount ( hpeerenum : *const ::core::ffi::c_void , pcount : *mut u32 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGetNextItem ( hpeerenum : *const ::core::ffi::c_void , pcount : *mut u32 , pppvitems : *mut *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphAddRecord ( hgraph : *const ::core::ffi::c_void , precord : *const PEER_RECORD , precordid : *mut :: windows_sys::core::GUID ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphClose ( hgraph : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphCloseDirectConnection ( hgraph : *const ::core::ffi::c_void , ullconnectionid : u64 ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerGraphConnect ( hgraph : *const ::core::ffi::c_void , pwzpeerid : :: windows_sys::core::PCWSTR , paddress : *const PEER_ADDRESS , pullconnectionid : *mut u64 ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphCreate ( pgraphproperties : *const PEER_GRAPH_PROPERTIES , pwzdatabasename : :: windows_sys::core::PCWSTR , psecurityinterface : *const PEER_SECURITY_INTERFACE , phgraph : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphDelete ( pwzgraphid : :: windows_sys::core::PCWSTR , pwzpeerid : :: windows_sys::core::PCWSTR , pwzdatabasename : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphDeleteRecord ( hgraph : *const ::core::ffi::c_void , precordid : *const :: windows_sys::core::GUID , flocal : super::super::Foundation:: BOOL ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphEndEnumeration ( hpeerenum : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphEnumConnections ( hgraph : *const ::core::ffi::c_void , dwflags : u32 , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphEnumNodes ( hgraph : *const ::core::ffi::c_void , pwzpeerid : :: windows_sys::core::PCWSTR , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphEnumRecords ( hgraph : *const ::core::ffi::c_void , precordtype : *const :: windows_sys::core::GUID , pwzpeerid : :: windows_sys::core::PCWSTR , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphExportDatabase ( hgraph : *const ::core::ffi::c_void , pwzfilepath : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphFreeData ( pvdata : *const ::core::ffi::c_void ) -> ( ) ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphGetEventData ( hpeerevent : *const ::core::ffi::c_void , ppeventdata : *mut *mut PEER_GRAPH_EVENT_DATA ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphGetItemCount ( hpeerenum : *const ::core::ffi::c_void , pcount : *mut u32 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphGetNextItem ( hpeerenum : *const ::core::ffi::c_void , pcount : *mut u32 , pppvitems : *mut *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerGraphGetNodeInfo ( hgraph : *const ::core::ffi::c_void , ullnodeid : u64 , ppnodeinfo : *mut *mut PEER_NODE_INFO ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphGetProperties ( hgraph : *const ::core::ffi::c_void , ppgraphproperties : *mut *mut PEER_GRAPH_PROPERTIES ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphGetRecord ( hgraph : *const ::core::ffi::c_void , precordid : *const :: windows_sys::core::GUID , pprecord : *mut *mut PEER_RECORD ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphGetStatus ( hgraph : *const ::core::ffi::c_void , pdwstatus : *mut u32 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphImportDatabase ( hgraph : *const ::core::ffi::c_void , pwzfilepath : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphListen ( hgraph : *const ::core::ffi::c_void , dwscope : u32 , dwscopeid : u32 , wport : u16 ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphOpen ( pwzgraphid : :: windows_sys::core::PCWSTR , pwzpeerid : :: windows_sys::core::PCWSTR , pwzdatabasename : :: windows_sys::core::PCWSTR , psecurityinterface : *const PEER_SECURITY_INTERFACE , crecordtypesyncprecedence : u32 , precordtypesyncprecedence : *const :: windows_sys::core::GUID , phgraph : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerGraphOpenDirectConnection ( hgraph : *const ::core::ffi::c_void , pwzpeerid : :: windows_sys::core::PCWSTR , paddress : *const PEER_ADDRESS , pullconnectionid : *mut u64 ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphPeerTimeToUniversalTime ( hgraph : *const ::core::ffi::c_void , pftpeertime : *const super::super::Foundation:: FILETIME , pftuniversaltime : *mut super::super::Foundation:: FILETIME ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphRegisterEvent ( hgraph : *const ::core::ffi::c_void , hevent : super::super::Foundation:: HANDLE , ceventregistrations : u32 , peventregistrations : *const PEER_GRAPH_EVENT_REGISTRATION , phpeerevent : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphSearchRecords ( hgraph : *const ::core::ffi::c_void , pwzcriteria : :: windows_sys::core::PCWSTR , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphSendData ( hgraph : *const ::core::ffi::c_void , ullconnectionid : u64 , ptype : *const :: windows_sys::core::GUID , cbdata : u32 , pvdata : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphSetNodeAttributes ( hgraph : *const ::core::ffi::c_void , pwzattributes : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphSetPresence ( hgraph : *const ::core::ffi::c_void , fpresent : super::super::Foundation:: BOOL ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphSetProperties ( hgraph : *const ::core::ffi::c_void , pgraphproperties : *const PEER_GRAPH_PROPERTIES ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphShutdown ( ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphStartup ( wversionrequested : u16 , pversiondata : *mut PEER_VERSION_DATA ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphUniversalTimeToPeerTime ( hgraph : *const ::core::ffi::c_void , pftuniversaltime : *const super::super::Foundation:: FILETIME , pftpeertime : *mut super::super::Foundation:: FILETIME ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphUnregisterEvent ( hpeerevent : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGraphUpdateRecord ( hgraph : *const ::core::ffi::c_void , precord : *const PEER_RECORD ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2pgraph.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGraphValidateDeferredRecords ( hgraph : *const ::core::ffi::c_void , crecordids : u32 , precordids : *const :: windows_sys::core::GUID ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupAddRecord ( hgroup : *const ::core::ffi::c_void , precord : *const PEER_RECORD , precordid : *mut :: windows_sys::core::GUID ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupClose ( hgroup : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupCloseDirectConnection ( hgroup : *const ::core::ffi::c_void , ullconnectionid : u64 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupConnect ( hgroup : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerGroupConnectByAddress ( hgroup : *const ::core::ffi::c_void , caddresses : u32 , paddresses : *const PEER_ADDRESS ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupCreate ( pproperties : *const PEER_GROUP_PROPERTIES , phgroup : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupCreateInvitation ( hgroup : *const ::core::ffi::c_void , pwzidentityinfo : :: windows_sys::core::PCWSTR , pftexpiration : *const super::super::Foundation:: FILETIME , croles : u32 , proles : *const :: windows_sys::core::GUID , ppwzinvitation : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupCreatePasswordInvitation ( hgroup : *const ::core::ffi::c_void , ppwzinvitation : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupDelete ( pwzidentity : :: windows_sys::core::PCWSTR , pwzgrouppeername : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupDeleteRecord ( hgroup : *const ::core::ffi::c_void , precordid : *const :: windows_sys::core::GUID ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupEnumConnections ( hgroup : *const ::core::ffi::c_void , dwflags : u32 , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupEnumMembers ( hgroup : *const ::core::ffi::c_void , dwflags : u32 , pwzidentity : :: windows_sys::core::PCWSTR , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupEnumRecords ( hgroup : *const ::core::ffi::c_void , precordtype : *const :: windows_sys::core::GUID , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupExportConfig ( hgroup : *const ::core::ffi::c_void , pwzpassword : :: windows_sys::core::PCWSTR , ppwzxml : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupExportDatabase ( hgroup : *const ::core::ffi::c_void , pwzfilepath : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupGetEventData ( hpeerevent : *const ::core::ffi::c_void , ppeventdata : *mut *mut PEER_GROUP_EVENT_DATA ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupGetProperties ( hgroup : *const ::core::ffi::c_void , ppproperties : *mut *mut PEER_GROUP_PROPERTIES ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupGetRecord ( hgroup : *const ::core::ffi::c_void , precordid : *const :: windows_sys::core::GUID , pprecord : *mut *mut PEER_RECORD ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupGetStatus ( hgroup : *const ::core::ffi::c_void , pdwstatus : *mut u32 ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupImportConfig ( pwzxml : :: windows_sys::core::PCWSTR , pwzpassword : :: windows_sys::core::PCWSTR , foverwrite : super::super::Foundation:: BOOL , ppwzidentity : *mut :: windows_sys::core::PWSTR , ppwzgroup : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupImportDatabase ( hgroup : *const ::core::ffi::c_void , pwzfilepath : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Security_Cryptography\"`*"] fn PeerGroupIssueCredentials ( hgroup : *const ::core::ffi::c_void , pwzsubjectidentity : :: windows_sys::core::PCWSTR , pcredentialinfo : *const PEER_CREDENTIAL_INFO , dwflags : u32 , ppwzinvitation : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupJoin ( pwzidentity : :: windows_sys::core::PCWSTR , pwzinvitation : :: windows_sys::core::PCWSTR , pwzcloud : :: windows_sys::core::PCWSTR , phgroup : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupOpen ( pwzidentity : :: windows_sys::core::PCWSTR , pwzgrouppeername : :: windows_sys::core::PCWSTR , pwzcloud : :: windows_sys::core::PCWSTR , phgroup : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Networking_WinSock")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerGroupOpenDirectConnection ( hgroup : *const ::core::ffi::c_void , pwzidentity : :: windows_sys::core::PCWSTR , paddress : *const PEER_ADDRESS , pullconnectionid : *mut u64 ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Security_Cryptography\"`*"] fn PeerGroupParseInvitation ( pwzinvitation : :: windows_sys::core::PCWSTR , ppinvitationinfo : *mut *mut PEER_INVITATION_INFO ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupPasswordJoin ( pwzidentity : :: windows_sys::core::PCWSTR , pwzinvitation : :: windows_sys::core::PCWSTR , pwzpassword : :: windows_sys::core::PCWSTR , pwzcloud : :: windows_sys::core::PCWSTR , phgroup : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupPeerTimeToUniversalTime ( hgroup : *const ::core::ffi::c_void , pftpeertime : *const super::super::Foundation:: FILETIME , pftuniversaltime : *mut super::super::Foundation:: FILETIME ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupRegisterEvent ( hgroup : *const ::core::ffi::c_void , hevent : super::super::Foundation:: HANDLE , ceventregistration : u32 , peventregistrations : *const PEER_GROUP_EVENT_REGISTRATION , phpeerevent : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupResumePasswordAuthentication ( hgroup : *const ::core::ffi::c_void , hpeereventhandle : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupSearchRecords ( hgroup : *const ::core::ffi::c_void , pwzcriteria : :: windows_sys::core::PCWSTR , phpeerenum : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupSendData ( hgroup : *const ::core::ffi::c_void , ullconnectionid : u64 , ptype : *const :: windows_sys::core::GUID , cbdata : u32 , pvdata : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupSetProperties ( hgroup : *const ::core::ffi::c_void , pproperties : *const PEER_GROUP_PROPERTIES ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupShutdown ( ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupStartup ( wversionrequested : u16 , pversiondata : *mut PEER_VERSION_DATA ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupUniversalTimeToPeerTime ( hgroup : *const ::core::ffi::c_void , pftuniversaltime : *const super::super::Foundation:: FILETIME , pftpeertime : *mut super::super::Foundation:: FILETIME ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerGroupUnregisterEvent ( hpeerevent : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerGroupUpdateRecord ( hgroup : *const ::core::ffi::c_void , precord : *const PEER_RECORD ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerHostNameToPeerName ( pwzhostname : :: windows_sys::core::PCWSTR , ppwzpeername : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityCreate ( pwzclassifier : :: windows_sys::core::PCWSTR , pwzfriendlyname : :: windows_sys::core::PCWSTR , hcryptprov : usize , ppwzidentity : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityDelete ( pwzidentity : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityExport ( pwzidentity : :: windows_sys::core::PCWSTR , pwzpassword : :: windows_sys::core::PCWSTR , ppwzexportxml : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityGetCryptKey ( pwzidentity : :: windows_sys::core::PCWSTR , phcryptprov : *mut usize ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityGetDefault ( ppwzpeername : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityGetFriendlyName ( pwzidentity : :: windows_sys::core::PCWSTR , ppwzfriendlyname : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityGetXML ( pwzidentity : :: windows_sys::core::PCWSTR , ppwzidentityxml : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentityImport ( pwzimportxml : :: windows_sys::core::PCWSTR , pwzpassword : :: windows_sys::core::PCWSTR , ppwzidentity : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerIdentitySetFriendlyName ( pwzidentity : :: windows_sys::core::PCWSTR , pwzfriendlyname : :: windows_sys::core::PCWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerNameToPeerHostName ( pwzpeername : :: windows_sys::core::PCWSTR , ppwzhostname : *mut :: windows_sys::core::PWSTR ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerPnrpEndResolve ( hresolve : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerPnrpGetCloudInfo ( pcnumclouds : *mut u32 , ppcloudinfo : *mut *mut PEER_PNRP_CLOUD_INFO ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerPnrpGetEndpoint ( hresolve : *const ::core::ffi::c_void , ppendpoint : *mut *mut PEER_PNRP_ENDPOINT_INFO ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerPnrpRegister ( pcwzpeername : :: windows_sys::core::PCWSTR , pregistrationinfo : *const PEER_PNRP_REGISTRATION_INFO , phregistration : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerPnrpResolve ( pcwzpeername : :: windows_sys::core::PCWSTR , pcwzcloudname : :: windows_sys::core::PCWSTR , pcendpoints : *mut u32 , ppendpoints : *mut *mut PEER_PNRP_ENDPOINT_INFO ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerPnrpShutdown ( ) -> :: windows_sys::core::HRESULT ); #[cfg(feature = "Win32_Foundation")] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] fn PeerPnrpStartResolve ( pcwzpeername : :: windows_sys::core::PCWSTR , pcwzcloudname : :: windows_sys::core::PCWSTR , cmaxendpoints : u32 , hevent : super::super::Foundation:: HANDLE , phresolve : *mut *mut ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerPnrpStartup ( wversionrequested : u16 ) -> :: windows_sys::core::HRESULT ); ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] fn PeerPnrpUnregister ( hregistration : *const ::core::ffi::c_void ) -> :: windows_sys::core::HRESULT ); #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] ::windows_sys::core::link ! ( "p2p.dll""system" #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] fn PeerPnrpUpdateRegistration ( hregistration : *const ::core::ffi::c_void , pregistrationinfo : *const PEER_PNRP_REGISTRATION_INFO ) -> :: windows_sys::core::HRESULT ); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_BOOTSTRAPPROVIDER_IN_USE: ::windows_sys::core::HRESULT = -2141052914i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_BOOTSTRAPPROVIDER_NOT_ATTACHED: ::windows_sys::core::HRESULT = -2141052913i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_CAPABILITY_MISMATCH: ::windows_sys::core::HRESULT = -2141052657i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_DUPLICATE_KEY: ::windows_sys::core::HRESULT = -2141052919i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_FAULTED: ::windows_sys::core::HRESULT = -2141052662i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INSUFFICIENT_BUFFER: ::windows_sys::core::HRESULT = -2141052660i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_ADDRESS: ::windows_sys::core::HRESULT = -2141052923i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_BOOTSTRAP_PROVIDER: ::windows_sys::core::HRESULT = -2141052924i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_CERT_CHAIN: ::windows_sys::core::HRESULT = -2141057020i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_INSTANCE_PREFIX: ::windows_sys::core::HRESULT = -2141052659i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_KEY: ::windows_sys::core::HRESULT = -2141057015i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_KEY_SIZE: ::windows_sys::core::HRESULT = -2141057022i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_MAX_ADDRESSES: ::windows_sys::core::HRESULT = -2141057017i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_MAX_ENDPOINTS: ::windows_sys::core::HRESULT = -2141057007i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_MESSAGE: ::windows_sys::core::HRESULT = -2141057019i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_PORT: ::windows_sys::core::HRESULT = -2141052928i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_SCOPE: ::windows_sys::core::HRESULT = -2141052922i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_SEARCH_INFO: ::windows_sys::core::HRESULT = -2141052663i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_SEARCH_RANGE: ::windows_sys::core::HRESULT = -2141057006i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_SECURITY_MODE: ::windows_sys::core::HRESULT = -2141052658i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_SECURITY_PROVIDER: ::windows_sys::core::HRESULT = -2141052926i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_SETTINGS: ::windows_sys::core::HRESULT = -2141052664i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_INVALID_TRANSPORT_PROVIDER: ::windows_sys::core::HRESULT = -2141052927i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_NO_ADDRESSES_AVAILABLE: ::windows_sys::core::HRESULT = -2141052920i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_NO_MORE: ::windows_sys::core::HRESULT = -2141057018i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_SEARCH_IN_PROGRESS: ::windows_sys::core::HRESULT = -2141057016i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_SECURITYPROVIDER_IN_USE: ::windows_sys::core::HRESULT = -2141052916i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_SECURITYPROVIDER_NOT_ATTACHED: ::windows_sys::core::HRESULT = -2141052915i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_STILL_IN_USE: ::windows_sys::core::HRESULT = -2141052925i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TIMEOUT: ::windows_sys::core::HRESULT = -2141057023i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORTPROVIDER_IN_USE: ::windows_sys::core::HRESULT = -2141052918i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORTPROVIDER_NOT_ATTACHED: ::windows_sys::core::HRESULT = -2141052917i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_ALREADY_BOUND: ::windows_sys::core::HRESULT = -2141052671i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_ALREADY_EXISTS_FOR_SCOPE: ::windows_sys::core::HRESULT = -2141052665i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_EXECUTING_CALLBACK: ::windows_sys::core::HRESULT = -2141052666i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_INVALID_ARGUMENT: ::windows_sys::core::HRESULT = -2141052668i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_NOT_BOUND: ::windows_sys::core::HRESULT = -2141052670i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_NO_DEST_ADDRESSES: ::windows_sys::core::HRESULT = -2141052667i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_SHUTTING_DOWN: ::windows_sys::core::HRESULT = -2141052921i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_STILL_BOUND: ::windows_sys::core::HRESULT = -2141052661i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_E_TRANSPORT_UNEXPECTED: ::windows_sys::core::HRESULT = -2141052669i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_LINK_LOCAL_ISATAP_SCOPEID: u32 = 4294967295u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_MAX_INSTANCE_PREFIX_LEN: u32 = 128u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_MAX_PAYLOAD_SIZE: u32 = 5120u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_MAX_ROUTING_ADDRESSES: u32 = 20u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_MIN_ROUTING_ADDRESSES: u32 = 1u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_PAYLOAD_REVOKED: u32 = 1u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_S_RETRY: ::windows_sys::core::HRESULT = 6426640i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const FACILITY_DRT: u32 = 98u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const NS_PNRPCLOUD: u32 = 39u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const NS_PNRPNAME: u32 = 38u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const NS_PROVIDER_PNRPCLOUD: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0x03fe89ce_766d_4976_b9c1_bb9bc42c7b4d); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const NS_PROVIDER_PNRPNAME: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0x03fe89cd_766d_4976_b9c1_bb9bc42c7b4d); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_PUBLICATION_OPTIONS_VERSION: i32 = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_PUBLICATION_OPTIONS_VERSION_1: i32 = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_PUBLICATION_OPTIONS_VERSION_2: i32 = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_READ_TIMEOUT_DEFAULT: u32 = 4294967294u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_READ_TIMEOUT_LOCAL_CACHE_ONLY: u32 = 0u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_COLLAB_OBJECTID_USER_PICTURE: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xdd15f41f_fc4e_4922_b035_4c06a754d01d); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_ALREADY_EXISTS: ::windows_sys::core::HRESULT = -2147024713i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_CLIENT_INVALID_COMPARTMENT_ID: ::windows_sys::core::HRESULT = -2147013390i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_CLOUD_DISABLED: ::windows_sys::core::HRESULT = -2147013394i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_CLOUD_IS_DEAD: ::windows_sys::core::HRESULT = -2147013387i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_CLOUD_IS_SEARCH_ONLY: ::windows_sys::core::HRESULT = -2147013391i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_CLOUD_NOT_FOUND: ::windows_sys::core::HRESULT = -2147013395i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_DISK_FULL: ::windows_sys::core::HRESULT = -2147024784i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_DUPLICATE_PEER_NAME: ::windows_sys::core::HRESULT = -2147013388i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_INVALID_IDENTITY: ::windows_sys::core::HRESULT = -2147013393i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_NOT_FOUND: ::windows_sys::core::HRESULT = -2147023728i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_E_TOO_MUCH_LOAD: ::windows_sys::core::HRESULT = -2147013392i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_ROLE_ADMIN: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0x04387127_aa56_450a_8ce5_4f565c6790f4); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_ROLE_INVITING_MEMBER: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0x4370fd89_dc18_4cfb_8dbf_9853a8a9f905); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_ROLE_MEMBER: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xf12dc4c7_0857_4ca0_93fc_b1bb19a3d8c2); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PNRP_ALL_LINK_CLOUDS: ::windows_sys::core::PCWSTR = ::windows_sys::w!("PEER_PNRP_ALL_LINKS"); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRPINFO_HINT: u32 = 1u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_MAX_ENDPOINT_ADDRESSES: u32 = 10u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_MAX_EXTENDED_PAYLOAD_BYTES: u32 = 4096u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const SVCID_PNRPCLOUD: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xc2239ce6_00c0_4fbf_bad6_18139385a49a); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const SVCID_PNRPNAME_V1: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xc2239ce5_00c0_4fbf_bad6_18139385a49a); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const SVCID_PNRPNAME_V2: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xc2239ce7_00c0_4fbf_bad6_18139385a49a); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_CLIENT_INVALID_COMPARTMENT_ID: u32 = 11506u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_CLOUD_DISABLED: u32 = 11502u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_CLOUD_IS_DEAD: u32 = 11509u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_CLOUD_IS_SEARCH_ONLY: u32 = 11505u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_CLOUD_NOT_FOUND: u32 = 11501u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_DUPLICATE_PEER_NAME: u32 = 11508u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_ERROR_BASE: u32 = 11500u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_INVALID_IDENTITY: u32 = 11503u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSA_PNRP_TOO_MUCH_LOAD: u32 = 11504u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSZ_SCOPE_GLOBAL: ::windows_sys::core::PCWSTR = ::windows_sys::w!("GLOBAL"); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSZ_SCOPE_LINKLOCAL: ::windows_sys::core::PCWSTR = ::windows_sys::w!("LINKLOCAL"); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const WSZ_SCOPE_SITELOCAL: ::windows_sys::core::PCWSTR = ::windows_sys::w!("SITELOCAL"); #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_ADDRESS_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_ACCEPTED: DRT_ADDRESS_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_REJECTED: DRT_ADDRESS_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_UNREACHABLE: DRT_ADDRESS_FLAGS = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_LOOP: DRT_ADDRESS_FLAGS = 8i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_TOO_BUSY: DRT_ADDRESS_FLAGS = 16i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_BAD_VALIDATE_ID: DRT_ADDRESS_FLAGS = 32i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_SUSPECT_UNREGISTERED_ID: DRT_ADDRESS_FLAGS = 64i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ADDRESS_FLAG_INQUIRE: DRT_ADDRESS_FLAGS = 128i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_EVENT_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_EVENT_STATUS_CHANGED: DRT_EVENT_TYPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_EVENT_LEAFSET_KEY_CHANGED: DRT_EVENT_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_EVENT_REGISTRATION_STATE_CHANGED: DRT_EVENT_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_LEAFSET_KEY_CHANGE_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_LEAFSET_KEY_ADDED: DRT_LEAFSET_KEY_CHANGE_TYPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_LEAFSET_KEY_DELETED: DRT_LEAFSET_KEY_CHANGE_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_MATCH_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_MATCH_EXACT: DRT_MATCH_TYPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_MATCH_NEAR: DRT_MATCH_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_MATCH_INTERMEDIATE: DRT_MATCH_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_REGISTRATION_STATE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_REGISTRATION_STATE_UNRESOLVEABLE: DRT_REGISTRATION_STATE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_SCOPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_GLOBAL_SCOPE: DRT_SCOPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_SITE_LOCAL_SCOPE: DRT_SCOPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_LINK_LOCAL_SCOPE: DRT_SCOPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_SECURITY_MODE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_SECURE_RESOLVE: DRT_SECURITY_MODE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_SECURE_MEMBERSHIP: DRT_SECURITY_MODE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_SECURE_CONFIDENTIALPAYLOAD: DRT_SECURITY_MODE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type DRT_STATUS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ACTIVE: DRT_STATUS = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_ALONE: DRT_STATUS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_NO_NETWORK: DRT_STATUS = 10i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const DRT_FAULTED: DRT_STATUS = 20i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEERDIST_CLIENT_INFO_BY_HANDLE_CLASS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PeerDistClientBasicInfo: PEERDIST_CLIENT_INFO_BY_HANDLE_CLASS = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const MaximumPeerDistClientInfoByHandlesClass: PEERDIST_CLIENT_INFO_BY_HANDLE_CLASS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_VALUE = u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_1: PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_VALUE = 1u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_2: PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_VALUE = 2u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION: PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_VALUE = 2u32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEERDIST_STATUS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_STATUS_DISABLED: PEERDIST_STATUS = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_STATUS_UNAVAILABLE: PEERDIST_STATUS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEERDIST_STATUS_AVAILABLE: PEERDIST_STATUS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_APPLICATION_REGISTRATION_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_APPLICATION_CURRENT_USER: PEER_APPLICATION_REGISTRATION_TYPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_APPLICATION_ALL_USERS: PEER_APPLICATION_REGISTRATION_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_CHANGE_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_CHANGE_ADDED: PEER_CHANGE_TYPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_CHANGE_DELETED: PEER_CHANGE_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_CHANGE_UPDATED: PEER_CHANGE_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_COLLAB_EVENT_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_WATCHLIST_CHANGED: PEER_COLLAB_EVENT_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_ENDPOINT_CHANGED: PEER_COLLAB_EVENT_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_ENDPOINT_PRESENCE_CHANGED: PEER_COLLAB_EVENT_TYPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_ENDPOINT_APPLICATION_CHANGED: PEER_COLLAB_EVENT_TYPE = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_ENDPOINT_OBJECT_CHANGED: PEER_COLLAB_EVENT_TYPE = 5i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_MY_ENDPOINT_CHANGED: PEER_COLLAB_EVENT_TYPE = 6i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_MY_PRESENCE_CHANGED: PEER_COLLAB_EVENT_TYPE = 7i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_MY_APPLICATION_CHANGED: PEER_COLLAB_EVENT_TYPE = 8i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_MY_OBJECT_CHANGED: PEER_COLLAB_EVENT_TYPE = 9i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_PEOPLE_NEAR_ME_CHANGED: PEER_COLLAB_EVENT_TYPE = 10i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_EVENT_REQUEST_STATUS_CHANGED: PEER_COLLAB_EVENT_TYPE = 11i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_CONNECTION_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_CONNECTION_NEIGHBOR: PEER_CONNECTION_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_CONNECTION_DIRECT: PEER_CONNECTION_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_CONNECTION_STATUS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_CONNECTED: PEER_CONNECTION_STATUS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_DISCONNECTED: PEER_CONNECTION_STATUS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_CONNECTION_FAILED: PEER_CONNECTION_STATUS = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GRAPH_EVENT_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_STATUS_CHANGED: PEER_GRAPH_EVENT_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_PROPERTY_CHANGED: PEER_GRAPH_EVENT_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_RECORD_CHANGED: PEER_GRAPH_EVENT_TYPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_DIRECT_CONNECTION: PEER_GRAPH_EVENT_TYPE = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_NEIGHBOR_CONNECTION: PEER_GRAPH_EVENT_TYPE = 5i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_INCOMING_DATA: PEER_GRAPH_EVENT_TYPE = 6i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_CONNECTION_REQUIRED: PEER_GRAPH_EVENT_TYPE = 7i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_NODE_CHANGED: PEER_GRAPH_EVENT_TYPE = 8i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_EVENT_SYNCHRONIZED: PEER_GRAPH_EVENT_TYPE = 9i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GRAPH_PROPERTY_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_PROPERTY_HEARTBEATS: PEER_GRAPH_PROPERTY_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_PROPERTY_DEFER_EXPIRATION: PEER_GRAPH_PROPERTY_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GRAPH_SCOPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_SCOPE_ANY: PEER_GRAPH_SCOPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_SCOPE_GLOBAL: PEER_GRAPH_SCOPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_SCOPE_SITELOCAL: PEER_GRAPH_SCOPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_SCOPE_LINKLOCAL: PEER_GRAPH_SCOPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_SCOPE_LOOPBACK: PEER_GRAPH_SCOPE = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GRAPH_STATUS_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_STATUS_LISTENING: PEER_GRAPH_STATUS_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_STATUS_HAS_CONNECTIONS: PEER_GRAPH_STATUS_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GRAPH_STATUS_SYNCHRONIZED: PEER_GRAPH_STATUS_FLAGS = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GROUP_AUTHENTICATION_SCHEME = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_GMC_AUTHENTICATION: PEER_GROUP_AUTHENTICATION_SCHEME = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_PASSWORD_AUTHENTICATION: PEER_GROUP_AUTHENTICATION_SCHEME = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GROUP_EVENT_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_STATUS_CHANGED: PEER_GROUP_EVENT_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_PROPERTY_CHANGED: PEER_GROUP_EVENT_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_RECORD_CHANGED: PEER_GROUP_EVENT_TYPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_DIRECT_CONNECTION: PEER_GROUP_EVENT_TYPE = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_NEIGHBOR_CONNECTION: PEER_GROUP_EVENT_TYPE = 5i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_INCOMING_DATA: PEER_GROUP_EVENT_TYPE = 6i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_MEMBER_CHANGED: PEER_GROUP_EVENT_TYPE = 8i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_CONNECTION_FAILED: PEER_GROUP_EVENT_TYPE = 10i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_EVENT_AUTHENTICATION_FAILED: PEER_GROUP_EVENT_TYPE = 11i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GROUP_ISSUE_CREDENTIAL_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_STORE_CREDENTIALS: PEER_GROUP_ISSUE_CREDENTIAL_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GROUP_PROPERTY_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_MEMBER_DATA_OPTIONAL: PEER_GROUP_PROPERTY_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_DISABLE_PRESENCE: PEER_GROUP_PROPERTY_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_DEFER_EXPIRATION: PEER_GROUP_PROPERTY_FLAGS = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_GROUP_STATUS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_STATUS_LISTENING: PEER_GROUP_STATUS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_GROUP_STATUS_HAS_CONNECTIONS: PEER_GROUP_STATUS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_INVITATION_RESPONSE_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_INVITATION_RESPONSE_DECLINED: PEER_INVITATION_RESPONSE_TYPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_INVITATION_RESPONSE_ACCEPTED: PEER_INVITATION_RESPONSE_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_INVITATION_RESPONSE_EXPIRED: PEER_INVITATION_RESPONSE_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_INVITATION_RESPONSE_ERROR: PEER_INVITATION_RESPONSE_TYPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_MEMBER_CHANGE_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_MEMBER_CONNECTED: PEER_MEMBER_CHANGE_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_MEMBER_DISCONNECTED: PEER_MEMBER_CHANGE_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_MEMBER_UPDATED: PEER_MEMBER_CHANGE_TYPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_MEMBER_JOINED: PEER_MEMBER_CHANGE_TYPE = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_MEMBER_LEFT: PEER_MEMBER_CHANGE_TYPE = 5i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_MEMBER_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_MEMBER_PRESENT: PEER_MEMBER_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_NODE_CHANGE_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_NODE_CHANGE_CONNECTED: PEER_NODE_CHANGE_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_NODE_CHANGE_DISCONNECTED: PEER_NODE_CHANGE_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_NODE_CHANGE_UPDATED: PEER_NODE_CHANGE_TYPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_PRESENCE_STATUS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_OFFLINE: PEER_PRESENCE_STATUS = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_OUT_TO_LUNCH: PEER_PRESENCE_STATUS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_AWAY: PEER_PRESENCE_STATUS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_BE_RIGHT_BACK: PEER_PRESENCE_STATUS = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_IDLE: PEER_PRESENCE_STATUS = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_BUSY: PEER_PRESENCE_STATUS = 5i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_ON_THE_PHONE: PEER_PRESENCE_STATUS = 6i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PRESENCE_ONLINE: PEER_PRESENCE_STATUS = 7i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_PUBLICATION_SCOPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PUBLICATION_SCOPE_NONE: PEER_PUBLICATION_SCOPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PUBLICATION_SCOPE_NEAR_ME: PEER_PUBLICATION_SCOPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PUBLICATION_SCOPE_INTERNET: PEER_PUBLICATION_SCOPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_PUBLICATION_SCOPE_ALL: PEER_PUBLICATION_SCOPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_RECORD_CHANGE_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_RECORD_ADDED: PEER_RECORD_CHANGE_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_RECORD_UPDATED: PEER_RECORD_CHANGE_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_RECORD_DELETED: PEER_RECORD_CHANGE_TYPE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_RECORD_EXPIRED: PEER_RECORD_CHANGE_TYPE = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_RECORD_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_RECORD_FLAG_AUTOREFRESH: PEER_RECORD_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_RECORD_FLAG_DELETED: PEER_RECORD_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_SIGNIN_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_SIGNIN_NONE: PEER_SIGNIN_FLAGS = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_SIGNIN_NEAR_ME: PEER_SIGNIN_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_SIGNIN_INTERNET: PEER_SIGNIN_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_SIGNIN_ALL: PEER_SIGNIN_FLAGS = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PEER_WATCH_PERMISSION = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_WATCH_BLOCKED: PEER_WATCH_PERMISSION = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PEER_WATCH_ALLOWED: PEER_WATCH_PERMISSION = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PNRP_CLOUD_FLAGS = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_NO_FLAGS: PNRP_CLOUD_FLAGS = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_NAME_LOCAL: PNRP_CLOUD_FLAGS = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_RESOLVE_ONLY: PNRP_CLOUD_FLAGS = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_FULL_PARTICIPANT: PNRP_CLOUD_FLAGS = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PNRP_CLOUD_STATE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_STATE_VIRTUAL: PNRP_CLOUD_STATE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_STATE_SYNCHRONISING: PNRP_CLOUD_STATE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_STATE_ACTIVE: PNRP_CLOUD_STATE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_STATE_DEAD: PNRP_CLOUD_STATE = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_STATE_DISABLED: PNRP_CLOUD_STATE = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_STATE_NO_NET: PNRP_CLOUD_STATE = 5i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_CLOUD_STATE_ALONE: PNRP_CLOUD_STATE = 6i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PNRP_EXTENDED_PAYLOAD_TYPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_EXTENDED_PAYLOAD_TYPE_NONE: PNRP_EXTENDED_PAYLOAD_TYPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_EXTENDED_PAYLOAD_TYPE_BINARY: PNRP_EXTENDED_PAYLOAD_TYPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_EXTENDED_PAYLOAD_TYPE_STRING: PNRP_EXTENDED_PAYLOAD_TYPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PNRP_REGISTERED_ID_STATE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_REGISTERED_ID_STATE_OK: PNRP_REGISTERED_ID_STATE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_REGISTERED_ID_STATE_PROBLEM: PNRP_REGISTERED_ID_STATE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PNRP_RESOLVE_CRITERIA = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_RESOLVE_CRITERIA_DEFAULT: PNRP_RESOLVE_CRITERIA = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_RESOLVE_CRITERIA_REMOTE_PEER_NAME: PNRP_RESOLVE_CRITERIA = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_RESOLVE_CRITERIA_NEAREST_REMOTE_PEER_NAME: PNRP_RESOLVE_CRITERIA = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_RESOLVE_CRITERIA_NON_CURRENT_PROCESS_PEER_NAME: PNRP_RESOLVE_CRITERIA = 3i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_RESOLVE_CRITERIA_NEAREST_NON_CURRENT_PROCESS_PEER_NAME: PNRP_RESOLVE_CRITERIA = 4i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_RESOLVE_CRITERIA_ANY_PEER_NAME: PNRP_RESOLVE_CRITERIA = 5i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_RESOLVE_CRITERIA_NEAREST_PEER_NAME: PNRP_RESOLVE_CRITERIA = 6i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PNRP_SCOPE = i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_SCOPE_ANY: PNRP_SCOPE = 0i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_GLOBAL_SCOPE: PNRP_SCOPE = 1i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_SITE_LOCAL_SCOPE: PNRP_SCOPE = 2i32; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub const PNRP_LINK_LOCAL_SCOPE: PNRP_SCOPE = 3i32; #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct DRT_ADDRESS { pub socketAddress: super::super::Networking::WinSock::SOCKADDR_STORAGE, pub flags: u32, pub nearness: i32, pub latency: u32, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_ADDRESS {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_ADDRESS { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct DRT_ADDRESS_LIST { pub AddressCount: u32, pub AddressList: [DRT_ADDRESS; 1], } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_ADDRESS_LIST {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_ADDRESS_LIST { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct DRT_BOOTSTRAP_PROVIDER { pub pvContext: *mut ::core::ffi::c_void, pub Attach: isize, pub Detach: isize, pub InitResolve: isize, pub IssueResolve: isize, pub EndResolve: isize, pub Register: isize, pub Unregister: isize, } impl ::core::marker::Copy for DRT_BOOTSTRAP_PROVIDER {} impl ::core::clone::Clone for DRT_BOOTSTRAP_PROVIDER { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct DRT_DATA { pub cb: u32, pub pb: *mut u8, } impl ::core::marker::Copy for DRT_DATA {} impl ::core::clone::Clone for DRT_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct DRT_EVENT_DATA { pub r#type: DRT_EVENT_TYPE, pub hr: ::windows_sys::core::HRESULT, pub pvContext: *mut ::core::ffi::c_void, pub Anonymous: DRT_EVENT_DATA_0, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_EVENT_DATA {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_EVENT_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub union DRT_EVENT_DATA_0 { pub leafsetKeyChange: DRT_EVENT_DATA_0_0, pub registrationStateChange: DRT_EVENT_DATA_0_1, pub statusChange: DRT_EVENT_DATA_0_2, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_EVENT_DATA_0 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_EVENT_DATA_0 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct DRT_EVENT_DATA_0_0 { pub change: DRT_LEAFSET_KEY_CHANGE_TYPE, pub localKey: DRT_DATA, pub remoteKey: DRT_DATA, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_EVENT_DATA_0_0 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_EVENT_DATA_0_0 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct DRT_EVENT_DATA_0_1 { pub state: DRT_REGISTRATION_STATE, pub localKey: DRT_DATA, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_EVENT_DATA_0_1 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_EVENT_DATA_0_1 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct DRT_EVENT_DATA_0_2 { pub status: DRT_STATUS, pub bootstrapAddresses: DRT_EVENT_DATA_0_2_0, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_EVENT_DATA_0_2 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_EVENT_DATA_0_2 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct DRT_EVENT_DATA_0_2_0 { pub cntAddress: u32, pub pAddresses: *mut super::super::Networking::WinSock::SOCKADDR_STORAGE, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for DRT_EVENT_DATA_0_2_0 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for DRT_EVENT_DATA_0_2_0 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct DRT_REGISTRATION { pub key: DRT_DATA, pub appData: DRT_DATA, } impl ::core::marker::Copy for DRT_REGISTRATION {} impl ::core::clone::Clone for DRT_REGISTRATION { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub struct DRT_SEARCH_INFO { pub dwSize: u32, pub fIterative: super::super::Foundation::BOOL, pub fAllowCurrentInstanceMatch: super::super::Foundation::BOOL, pub fAnyMatchInRange: super::super::Foundation::BOOL, pub cMaxEndpoints: u32, pub pMaximumKey: *mut DRT_DATA, pub pMinimumKey: *mut DRT_DATA, } #[cfg(feature = "Win32_Foundation")] impl ::core::marker::Copy for DRT_SEARCH_INFO {} #[cfg(feature = "Win32_Foundation")] impl ::core::clone::Clone for DRT_SEARCH_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct DRT_SEARCH_RESULT { pub dwSize: u32, pub r#type: DRT_MATCH_TYPE, pub pvContext: *mut ::core::ffi::c_void, pub registration: DRT_REGISTRATION, } impl ::core::marker::Copy for DRT_SEARCH_RESULT {} impl ::core::clone::Clone for DRT_SEARCH_RESULT { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct DRT_SECURITY_PROVIDER { pub pvContext: *mut ::core::ffi::c_void, pub Attach: isize, pub Detach: isize, pub RegisterKey: isize, pub UnregisterKey: isize, pub ValidateAndUnpackPayload: isize, pub SecureAndPackPayload: isize, pub FreeData: isize, pub EncryptData: isize, pub DecryptData: isize, pub GetSerializedCredential: isize, pub ValidateRemoteCredential: isize, pub SignData: isize, pub VerifyData: isize, } impl ::core::marker::Copy for DRT_SECURITY_PROVIDER {} impl ::core::clone::Clone for DRT_SECURITY_PROVIDER { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct DRT_SETTINGS { pub dwSize: u32, pub cbKey: u32, pub bProtocolMajorVersion: u8, pub bProtocolMinorVersion: u8, pub ulMaxRoutingAddresses: u32, pub pwzDrtInstancePrefix: ::windows_sys::core::PWSTR, pub hTransport: *mut ::core::ffi::c_void, pub pSecurityProvider: *mut DRT_SECURITY_PROVIDER, pub pBootstrapProvider: *mut DRT_BOOTSTRAP_PROVIDER, pub eSecurityMode: DRT_SECURITY_MODE, } impl ::core::marker::Copy for DRT_SETTINGS {} impl ::core::clone::Clone for DRT_SETTINGS { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub struct PEERDIST_CLIENT_BASIC_INFO { pub fFlashCrowd: super::super::Foundation::BOOL, } #[cfg(feature = "Win32_Foundation")] impl ::core::marker::Copy for PEERDIST_CLIENT_BASIC_INFO {} #[cfg(feature = "Win32_Foundation")] impl ::core::clone::Clone for PEERDIST_CLIENT_BASIC_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEERDIST_CONTENT_TAG { pub Data: [u8; 16], } impl ::core::marker::Copy for PEERDIST_CONTENT_TAG {} impl ::core::clone::Clone for PEERDIST_CONTENT_TAG { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEERDIST_PUBLICATION_OPTIONS { pub dwVersion: u32, pub dwFlags: u32, } impl ::core::marker::Copy for PEERDIST_PUBLICATION_OPTIONS {} impl ::core::clone::Clone for PEERDIST_PUBLICATION_OPTIONS { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEERDIST_RETRIEVAL_OPTIONS { pub cbSize: u32, pub dwContentInfoMinVersion: u32, pub dwContentInfoMaxVersion: u32, pub dwReserved: u32, } impl ::core::marker::Copy for PEERDIST_RETRIEVAL_OPTIONS {} impl ::core::clone::Clone for PEERDIST_RETRIEVAL_OPTIONS { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEERDIST_STATUS_INFO { pub cbSize: u32, pub status: PEERDIST_STATUS, pub dwMinVer: PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_VALUE, pub dwMaxVer: PEERDIST_RETRIEVAL_OPTIONS_CONTENTINFO_VERSION_VALUE, } impl ::core::marker::Copy for PEERDIST_STATUS_INFO {} impl ::core::clone::Clone for PEERDIST_STATUS_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(feature = "Win32_Networking_WinSock")] pub struct PEER_ADDRESS { pub dwSize: u32, pub sin6: super::super::Networking::WinSock::SOCKADDR_IN6, } #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::marker::Copy for PEER_ADDRESS {} #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::clone::Clone for PEER_ADDRESS { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_APPLICATION { pub id: ::windows_sys::core::GUID, pub data: PEER_DATA, pub pwzDescription: ::windows_sys::core::PWSTR, } impl ::core::marker::Copy for PEER_APPLICATION {} impl ::core::clone::Clone for PEER_APPLICATION { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_APPLICATION_REGISTRATION_INFO { pub application: PEER_APPLICATION, pub pwzApplicationToLaunch: ::windows_sys::core::PWSTR, pub pwzApplicationArguments: ::windows_sys::core::PWSTR, pub dwPublicationScope: u32, } impl ::core::marker::Copy for PEER_APPLICATION_REGISTRATION_INFO {} impl ::core::clone::Clone for PEER_APPLICATION_REGISTRATION_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_APP_LAUNCH_INFO { pub pContact: *mut PEER_CONTACT, pub pEndpoint: *mut PEER_ENDPOINT, pub pInvitation: *mut PEER_INVITATION, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_APP_LAUNCH_INFO {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_APP_LAUNCH_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_COLLAB_EVENT_DATA { pub eventType: PEER_COLLAB_EVENT_TYPE, pub Anonymous: PEER_COLLAB_EVENT_DATA_0, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_COLLAB_EVENT_DATA {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_COLLAB_EVENT_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub union PEER_COLLAB_EVENT_DATA_0 { pub watchListChangedData: PEER_EVENT_WATCHLIST_CHANGED_DATA, pub presenceChangedData: PEER_EVENT_PRESENCE_CHANGED_DATA, pub applicationChangedData: PEER_EVENT_APPLICATION_CHANGED_DATA, pub objectChangedData: PEER_EVENT_OBJECT_CHANGED_DATA, pub endpointChangedData: PEER_EVENT_ENDPOINT_CHANGED_DATA, pub peopleNearMeChangedData: PEER_EVENT_PEOPLE_NEAR_ME_CHANGED_DATA, pub requestStatusChangedData: PEER_EVENT_REQUEST_STATUS_CHANGED_DATA, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_COLLAB_EVENT_DATA_0 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_COLLAB_EVENT_DATA_0 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_COLLAB_EVENT_REGISTRATION { pub eventType: PEER_COLLAB_EVENT_TYPE, pub pInstance: *mut ::windows_sys::core::GUID, } impl ::core::marker::Copy for PEER_COLLAB_EVENT_REGISTRATION {} impl ::core::clone::Clone for PEER_COLLAB_EVENT_REGISTRATION { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(feature = "Win32_Networking_WinSock")] pub struct PEER_CONNECTION_INFO { pub dwSize: u32, pub dwFlags: u32, pub ullConnectionId: u64, pub ullNodeId: u64, pub pwzPeerId: ::windows_sys::core::PWSTR, pub address: PEER_ADDRESS, } #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::marker::Copy for PEER_CONNECTION_INFO {} #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::clone::Clone for PEER_CONNECTION_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub struct PEER_CONTACT { pub pwzPeerName: ::windows_sys::core::PWSTR, pub pwzNickName: ::windows_sys::core::PWSTR, pub pwzDisplayName: ::windows_sys::core::PWSTR, pub pwzEmailAddress: ::windows_sys::core::PWSTR, pub fWatch: super::super::Foundation::BOOL, pub WatcherPermissions: PEER_WATCH_PERMISSION, pub credentials: PEER_DATA, } #[cfg(feature = "Win32_Foundation")] impl ::core::marker::Copy for PEER_CONTACT {} #[cfg(feature = "Win32_Foundation")] impl ::core::clone::Clone for PEER_CONTACT { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Security_Cryptography\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] pub struct PEER_CREDENTIAL_INFO { pub dwSize: u32, pub dwFlags: u32, pub pwzFriendlyName: ::windows_sys::core::PWSTR, pub pPublicKey: *mut super::super::Security::Cryptography::CERT_PUBLIC_KEY_INFO, pub pwzIssuerPeerName: ::windows_sys::core::PWSTR, pub pwzIssuerFriendlyName: ::windows_sys::core::PWSTR, pub ftValidityStart: super::super::Foundation::FILETIME, pub ftValidityEnd: super::super::Foundation::FILETIME, pub cRoles: u32, pub pRoles: *mut ::windows_sys::core::GUID, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] impl ::core::marker::Copy for PEER_CREDENTIAL_INFO {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] impl ::core::clone::Clone for PEER_CREDENTIAL_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_DATA { pub cbData: u32, pub pbData: *mut u8, } impl ::core::marker::Copy for PEER_DATA {} impl ::core::clone::Clone for PEER_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(feature = "Win32_Networking_WinSock")] pub struct PEER_ENDPOINT { pub address: PEER_ADDRESS, pub pwzEndpointName: ::windows_sys::core::PWSTR, } #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::marker::Copy for PEER_ENDPOINT {} #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::clone::Clone for PEER_ENDPOINT { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_EVENT_APPLICATION_CHANGED_DATA { pub pContact: *mut PEER_CONTACT, pub pEndpoint: *mut PEER_ENDPOINT, pub changeType: PEER_CHANGE_TYPE, pub pApplication: *mut PEER_APPLICATION, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_EVENT_APPLICATION_CHANGED_DATA {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_EVENT_APPLICATION_CHANGED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_EVENT_CONNECTION_CHANGE_DATA { pub dwSize: u32, pub status: PEER_CONNECTION_STATUS, pub ullConnectionId: u64, pub ullNodeId: u64, pub ullNextConnectionId: u64, pub hrConnectionFailedReason: ::windows_sys::core::HRESULT, } impl ::core::marker::Copy for PEER_EVENT_CONNECTION_CHANGE_DATA {} impl ::core::clone::Clone for PEER_EVENT_CONNECTION_CHANGE_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_EVENT_ENDPOINT_CHANGED_DATA { pub pContact: *mut PEER_CONTACT, pub pEndpoint: *mut PEER_ENDPOINT, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_EVENT_ENDPOINT_CHANGED_DATA {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_EVENT_ENDPOINT_CHANGED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_EVENT_INCOMING_DATA { pub dwSize: u32, pub ullConnectionId: u64, pub r#type: ::windows_sys::core::GUID, pub data: PEER_DATA, } impl ::core::marker::Copy for PEER_EVENT_INCOMING_DATA {} impl ::core::clone::Clone for PEER_EVENT_INCOMING_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_EVENT_MEMBER_CHANGE_DATA { pub dwSize: u32, pub changeType: PEER_MEMBER_CHANGE_TYPE, pub pwzIdentity: ::windows_sys::core::PWSTR, } impl ::core::marker::Copy for PEER_EVENT_MEMBER_CHANGE_DATA {} impl ::core::clone::Clone for PEER_EVENT_MEMBER_CHANGE_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_EVENT_NODE_CHANGE_DATA { pub dwSize: u32, pub changeType: PEER_NODE_CHANGE_TYPE, pub ullNodeId: u64, pub pwzPeerId: ::windows_sys::core::PWSTR, } impl ::core::marker::Copy for PEER_EVENT_NODE_CHANGE_DATA {} impl ::core::clone::Clone for PEER_EVENT_NODE_CHANGE_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_EVENT_OBJECT_CHANGED_DATA { pub pContact: *mut PEER_CONTACT, pub pEndpoint: *mut PEER_ENDPOINT, pub changeType: PEER_CHANGE_TYPE, pub pObject: *mut PEER_OBJECT, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_EVENT_OBJECT_CHANGED_DATA {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_EVENT_OBJECT_CHANGED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(feature = "Win32_Networking_WinSock")] pub struct PEER_EVENT_PEOPLE_NEAR_ME_CHANGED_DATA { pub changeType: PEER_CHANGE_TYPE, pub pPeopleNearMe: *mut PEER_PEOPLE_NEAR_ME, } #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::marker::Copy for PEER_EVENT_PEOPLE_NEAR_ME_CHANGED_DATA {} #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::clone::Clone for PEER_EVENT_PEOPLE_NEAR_ME_CHANGED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_EVENT_PRESENCE_CHANGED_DATA { pub pContact: *mut PEER_CONTACT, pub pEndpoint: *mut PEER_ENDPOINT, pub changeType: PEER_CHANGE_TYPE, pub pPresenceInfo: *mut PEER_PRESENCE_INFO, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_EVENT_PRESENCE_CHANGED_DATA {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_EVENT_PRESENCE_CHANGED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_EVENT_RECORD_CHANGE_DATA { pub dwSize: u32, pub changeType: PEER_RECORD_CHANGE_TYPE, pub recordId: ::windows_sys::core::GUID, pub recordType: ::windows_sys::core::GUID, } impl ::core::marker::Copy for PEER_EVENT_RECORD_CHANGE_DATA {} impl ::core::clone::Clone for PEER_EVENT_RECORD_CHANGE_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(feature = "Win32_Networking_WinSock")] pub struct PEER_EVENT_REQUEST_STATUS_CHANGED_DATA { pub pEndpoint: *mut PEER_ENDPOINT, pub hrChange: ::windows_sys::core::HRESULT, } #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::marker::Copy for PEER_EVENT_REQUEST_STATUS_CHANGED_DATA {} #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::clone::Clone for PEER_EVENT_REQUEST_STATUS_CHANGED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_EVENT_SYNCHRONIZED_DATA { pub dwSize: u32, pub recordType: ::windows_sys::core::GUID, } impl ::core::marker::Copy for PEER_EVENT_SYNCHRONIZED_DATA {} impl ::core::clone::Clone for PEER_EVENT_SYNCHRONIZED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub struct PEER_EVENT_WATCHLIST_CHANGED_DATA { pub pContact: *mut PEER_CONTACT, pub changeType: PEER_CHANGE_TYPE, } #[cfg(feature = "Win32_Foundation")] impl ::core::marker::Copy for PEER_EVENT_WATCHLIST_CHANGED_DATA {} #[cfg(feature = "Win32_Foundation")] impl ::core::clone::Clone for PEER_EVENT_WATCHLIST_CHANGED_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_GRAPH_EVENT_DATA { pub eventType: PEER_GRAPH_EVENT_TYPE, pub Anonymous: PEER_GRAPH_EVENT_DATA_0, } impl ::core::marker::Copy for PEER_GRAPH_EVENT_DATA {} impl ::core::clone::Clone for PEER_GRAPH_EVENT_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub union PEER_GRAPH_EVENT_DATA_0 { pub dwStatus: PEER_GRAPH_STATUS_FLAGS, pub incomingData: PEER_EVENT_INCOMING_DATA, pub recordChangeData: PEER_EVENT_RECORD_CHANGE_DATA, pub connectionChangeData: PEER_EVENT_CONNECTION_CHANGE_DATA, pub nodeChangeData: PEER_EVENT_NODE_CHANGE_DATA, pub synchronizedData: PEER_EVENT_SYNCHRONIZED_DATA, } impl ::core::marker::Copy for PEER_GRAPH_EVENT_DATA_0 {} impl ::core::clone::Clone for PEER_GRAPH_EVENT_DATA_0 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_GRAPH_EVENT_REGISTRATION { pub eventType: PEER_GRAPH_EVENT_TYPE, pub pType: *mut ::windows_sys::core::GUID, } impl ::core::marker::Copy for PEER_GRAPH_EVENT_REGISTRATION {} impl ::core::clone::Clone for PEER_GRAPH_EVENT_REGISTRATION { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_GRAPH_PROPERTIES { pub dwSize: u32, pub dwFlags: u32, pub dwScope: u32, pub dwMaxRecordSize: u32, pub pwzGraphId: ::windows_sys::core::PWSTR, pub pwzCreatorId: ::windows_sys::core::PWSTR, pub pwzFriendlyName: ::windows_sys::core::PWSTR, pub pwzComment: ::windows_sys::core::PWSTR, pub ulPresenceLifetime: u32, pub cPresenceMax: u32, } impl ::core::marker::Copy for PEER_GRAPH_PROPERTIES {} impl ::core::clone::Clone for PEER_GRAPH_PROPERTIES { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_GROUP_EVENT_DATA { pub eventType: PEER_GROUP_EVENT_TYPE, pub Anonymous: PEER_GROUP_EVENT_DATA_0, } impl ::core::marker::Copy for PEER_GROUP_EVENT_DATA {} impl ::core::clone::Clone for PEER_GROUP_EVENT_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub union PEER_GROUP_EVENT_DATA_0 { pub dwStatus: PEER_GROUP_STATUS, pub incomingData: PEER_EVENT_INCOMING_DATA, pub recordChangeData: PEER_EVENT_RECORD_CHANGE_DATA, pub connectionChangeData: PEER_EVENT_CONNECTION_CHANGE_DATA, pub memberChangeData: PEER_EVENT_MEMBER_CHANGE_DATA, pub hrConnectionFailedReason: ::windows_sys::core::HRESULT, } impl ::core::marker::Copy for PEER_GROUP_EVENT_DATA_0 {} impl ::core::clone::Clone for PEER_GROUP_EVENT_DATA_0 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_GROUP_EVENT_REGISTRATION { pub eventType: PEER_GROUP_EVENT_TYPE, pub pType: *mut ::windows_sys::core::GUID, } impl ::core::marker::Copy for PEER_GROUP_EVENT_REGISTRATION {} impl ::core::clone::Clone for PEER_GROUP_EVENT_REGISTRATION { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_GROUP_PROPERTIES { pub dwSize: u32, pub dwFlags: u32, pub pwzCloud: ::windows_sys::core::PWSTR, pub pwzClassifier: ::windows_sys::core::PWSTR, pub pwzGroupPeerName: ::windows_sys::core::PWSTR, pub pwzCreatorPeerName: ::windows_sys::core::PWSTR, pub pwzFriendlyName: ::windows_sys::core::PWSTR, pub pwzComment: ::windows_sys::core::PWSTR, pub ulMemberDataLifetime: u32, pub ulPresenceLifetime: u32, pub dwAuthenticationSchemes: u32, pub pwzGroupPassword: ::windows_sys::core::PWSTR, pub groupPasswordRole: ::windows_sys::core::GUID, } impl ::core::marker::Copy for PEER_GROUP_PROPERTIES {} impl ::core::clone::Clone for PEER_GROUP_PROPERTIES { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_INVITATION { pub applicationId: ::windows_sys::core::GUID, pub applicationData: PEER_DATA, pub pwzMessage: ::windows_sys::core::PWSTR, } impl ::core::marker::Copy for PEER_INVITATION {} impl ::core::clone::Clone for PEER_INVITATION { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Security_Cryptography\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] pub struct PEER_INVITATION_INFO { pub dwSize: u32, pub dwFlags: u32, pub pwzCloudName: ::windows_sys::core::PWSTR, pub dwScope: u32, pub dwCloudFlags: u32, pub pwzGroupPeerName: ::windows_sys::core::PWSTR, pub pwzIssuerPeerName: ::windows_sys::core::PWSTR, pub pwzSubjectPeerName: ::windows_sys::core::PWSTR, pub pwzGroupFriendlyName: ::windows_sys::core::PWSTR, pub pwzIssuerFriendlyName: ::windows_sys::core::PWSTR, pub pwzSubjectFriendlyName: ::windows_sys::core::PWSTR, pub ftValidityStart: super::super::Foundation::FILETIME, pub ftValidityEnd: super::super::Foundation::FILETIME, pub cRoles: u32, pub pRoles: *mut ::windows_sys::core::GUID, pub cClassifiers: u32, pub ppwzClassifiers: *mut ::windows_sys::core::PWSTR, pub pSubjectPublicKey: *mut super::super::Security::Cryptography::CERT_PUBLIC_KEY_INFO, pub authScheme: PEER_GROUP_AUTHENTICATION_SCHEME, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] impl ::core::marker::Copy for PEER_INVITATION_INFO {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Security_Cryptography"))] impl ::core::clone::Clone for PEER_INVITATION_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_INVITATION_RESPONSE { pub action: PEER_INVITATION_RESPONSE_TYPE, pub pwzMessage: ::windows_sys::core::PWSTR, pub hrExtendedInfo: ::windows_sys::core::HRESULT, } impl ::core::marker::Copy for PEER_INVITATION_RESPONSE {} impl ::core::clone::Clone for PEER_INVITATION_RESPONSE { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`, `\"Win32_Security_Cryptography\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_Security_Cryptography"))] pub struct PEER_MEMBER { pub dwSize: u32, pub dwFlags: u32, pub pwzIdentity: ::windows_sys::core::PWSTR, pub pwzAttributes: ::windows_sys::core::PWSTR, pub ullNodeId: u64, pub cAddresses: u32, pub pAddresses: *mut PEER_ADDRESS, pub pCredentialInfo: *mut PEER_CREDENTIAL_INFO, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_Security_Cryptography"))] impl ::core::marker::Copy for PEER_MEMBER {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_Security_Cryptography"))] impl ::core::clone::Clone for PEER_MEMBER { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_NAME_PAIR { pub dwSize: u32, pub pwzPeerName: ::windows_sys::core::PWSTR, pub pwzFriendlyName: ::windows_sys::core::PWSTR, } impl ::core::marker::Copy for PEER_NAME_PAIR {} impl ::core::clone::Clone for PEER_NAME_PAIR { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(feature = "Win32_Networking_WinSock")] pub struct PEER_NODE_INFO { pub dwSize: u32, pub ullNodeId: u64, pub pwzPeerId: ::windows_sys::core::PWSTR, pub cAddresses: u32, pub pAddresses: *mut PEER_ADDRESS, pub pwzAttributes: ::windows_sys::core::PWSTR, } #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::marker::Copy for PEER_NODE_INFO {} #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::clone::Clone for PEER_NODE_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_OBJECT { pub id: ::windows_sys::core::GUID, pub data: PEER_DATA, pub dwPublicationScope: u32, } impl ::core::marker::Copy for PEER_OBJECT {} impl ::core::clone::Clone for PEER_OBJECT { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(feature = "Win32_Networking_WinSock")] pub struct PEER_PEOPLE_NEAR_ME { pub pwzNickName: ::windows_sys::core::PWSTR, pub endpoint: PEER_ENDPOINT, pub id: ::windows_sys::core::GUID, } #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::marker::Copy for PEER_PEOPLE_NEAR_ME {} #[cfg(feature = "Win32_Networking_WinSock")] impl ::core::clone::Clone for PEER_PEOPLE_NEAR_ME { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_PNRP_CLOUD_INFO { pub pwzCloudName: ::windows_sys::core::PWSTR, pub dwScope: PNRP_SCOPE, pub dwScopeId: u32, } impl ::core::marker::Copy for PEER_PNRP_CLOUD_INFO {} impl ::core::clone::Clone for PEER_PNRP_CLOUD_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_PNRP_ENDPOINT_INFO { pub pwzPeerName: ::windows_sys::core::PWSTR, pub cAddresses: u32, pub ppAddresses: *mut *mut super::super::Networking::WinSock::SOCKADDR, pub pwzComment: ::windows_sys::core::PWSTR, pub payload: PEER_DATA, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_PNRP_ENDPOINT_INFO {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_PNRP_ENDPOINT_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PEER_PNRP_REGISTRATION_INFO { pub pwzCloudName: ::windows_sys::core::PWSTR, pub pwzPublishingIdentity: ::windows_sys::core::PWSTR, pub cAddresses: u32, pub ppAddresses: *mut *mut super::super::Networking::WinSock::SOCKADDR, pub wPort: u16, pub pwzComment: ::windows_sys::core::PWSTR, pub payload: PEER_DATA, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PEER_PNRP_REGISTRATION_INFO {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PEER_PNRP_REGISTRATION_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_PRESENCE_INFO { pub status: PEER_PRESENCE_STATUS, pub pwzDescriptiveText: ::windows_sys::core::PWSTR, } impl ::core::marker::Copy for PEER_PRESENCE_INFO {} impl ::core::clone::Clone for PEER_PRESENCE_INFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub struct PEER_RECORD { pub dwSize: u32, pub r#type: ::windows_sys::core::GUID, pub id: ::windows_sys::core::GUID, pub dwVersion: u32, pub dwFlags: u32, pub pwzCreatorId: ::windows_sys::core::PWSTR, pub pwzModifiedById: ::windows_sys::core::PWSTR, pub pwzAttributes: ::windows_sys::core::PWSTR, pub ftCreation: super::super::Foundation::FILETIME, pub ftExpiration: super::super::Foundation::FILETIME, pub ftLastModified: super::super::Foundation::FILETIME, pub securityData: PEER_DATA, pub data: PEER_DATA, } #[cfg(feature = "Win32_Foundation")] impl ::core::marker::Copy for PEER_RECORD {} #[cfg(feature = "Win32_Foundation")] impl ::core::clone::Clone for PEER_RECORD { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub struct PEER_SECURITY_INTERFACE { pub dwSize: u32, pub pwzSspFilename: ::windows_sys::core::PWSTR, pub pwzPackageName: ::windows_sys::core::PWSTR, pub cbSecurityInfo: u32, pub pbSecurityInfo: *mut u8, pub pvContext: *mut ::core::ffi::c_void, pub pfnValidateRecord: PFNPEER_VALIDATE_RECORD, pub pfnSecureRecord: PFNPEER_SECURE_RECORD, pub pfnFreeSecurityData: PFNPEER_FREE_SECURITY_DATA, pub pfnAuthFailed: PFNPEER_ON_PASSWORD_AUTH_FAILED, } #[cfg(feature = "Win32_Foundation")] impl ::core::marker::Copy for PEER_SECURITY_INTERFACE {} #[cfg(feature = "Win32_Foundation")] impl ::core::clone::Clone for PEER_SECURITY_INTERFACE { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PEER_VERSION_DATA { pub wVersion: u16, pub wHighestVersion: u16, } impl ::core::marker::Copy for PEER_VERSION_DATA {} impl ::core::clone::Clone for PEER_VERSION_DATA { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PNRPCLOUDINFO { pub dwSize: u32, pub Cloud: PNRP_CLOUD_ID, pub enCloudState: PNRP_CLOUD_STATE, pub enCloudFlags: PNRP_CLOUD_FLAGS, } impl ::core::marker::Copy for PNRPCLOUDINFO {} impl ::core::clone::Clone for PNRPCLOUDINFO { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub struct PNRPINFO_V1 { pub dwSize: u32, pub lpwszIdentity: ::windows_sys::core::PWSTR, pub nMaxResolve: u32, pub dwTimeout: u32, pub dwLifetime: u32, pub enResolveCriteria: PNRP_RESOLVE_CRITERIA, pub dwFlags: u32, pub saHint: super::super::Networking::WinSock::SOCKET_ADDRESS, pub enNameState: PNRP_REGISTERED_ID_STATE, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::marker::Copy for PNRPINFO_V1 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] impl ::core::clone::Clone for PNRPINFO_V1 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`, `\"Win32_System_Com\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_System_Com"))] pub struct PNRPINFO_V2 { pub dwSize: u32, pub lpwszIdentity: ::windows_sys::core::PWSTR, pub nMaxResolve: u32, pub dwTimeout: u32, pub dwLifetime: u32, pub enResolveCriteria: PNRP_RESOLVE_CRITERIA, pub dwFlags: u32, pub saHint: super::super::Networking::WinSock::SOCKET_ADDRESS, pub enNameState: PNRP_REGISTERED_ID_STATE, pub enExtendedPayloadType: PNRP_EXTENDED_PAYLOAD_TYPE, pub Anonymous: PNRPINFO_V2_0, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_System_Com"))] impl ::core::marker::Copy for PNRPINFO_V2 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_System_Com"))] impl ::core::clone::Clone for PNRPINFO_V2 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`, `\"Win32_System_Com\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_System_Com"))] pub union PNRPINFO_V2_0 { pub blobPayload: super::super::System::Com::BLOB, pub pwszPayload: ::windows_sys::core::PWSTR, } #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_System_Com"))] impl ::core::marker::Copy for PNRPINFO_V2_0 {} #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock", feature = "Win32_System_Com"))] impl ::core::clone::Clone for PNRPINFO_V2_0 { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub struct PNRP_CLOUD_ID { pub AddressFamily: i32, pub Scope: PNRP_SCOPE, pub ScopeId: u32, } impl ::core::marker::Copy for PNRP_CLOUD_ID {} impl ::core::clone::Clone for PNRP_CLOUD_ID { fn clone(&self) -> Self { *self } } #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`, `\"Win32_Networking_WinSock\"`*"] #[cfg(all(feature = "Win32_Foundation", feature = "Win32_Networking_WinSock"))] pub type DRT_BOOTSTRAP_RESOLVE_CALLBACK = ::core::option::Option<unsafe extern "system" fn(hr: ::windows_sys::core::HRESULT, pvcontext: *mut ::core::ffi::c_void, paddresses: *mut super::super::Networking::WinSock::SOCKET_ADDRESS_LIST, ffatalerror: super::super::Foundation::BOOL) -> ()>; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PFNPEER_FREE_SECURITY_DATA = ::core::option::Option<unsafe extern "system" fn(hgraph: *const ::core::ffi::c_void, pvcontext: *const ::core::ffi::c_void, psecuritydata: *const PEER_DATA) -> ::windows_sys::core::HRESULT>; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`*"] pub type PFNPEER_ON_PASSWORD_AUTH_FAILED = ::core::option::Option<unsafe extern "system" fn(hgraph: *const ::core::ffi::c_void, pvcontext: *const ::core::ffi::c_void) -> ::windows_sys::core::HRESULT>; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub type PFNPEER_SECURE_RECORD = ::core::option::Option<unsafe extern "system" fn(hgraph: *const ::core::ffi::c_void, pvcontext: *const ::core::ffi::c_void, precord: *const PEER_RECORD, changetype: PEER_RECORD_CHANGE_TYPE, ppsecuritydata: *mut *mut PEER_DATA) -> ::windows_sys::core::HRESULT>; #[doc = "*Required features: `\"Win32_NetworkManagement_P2P\"`, `\"Win32_Foundation\"`*"] #[cfg(feature = "Win32_Foundation")] pub type PFNPEER_VALIDATE_RECORD = ::core::option::Option<unsafe extern "system" fn(hgraph: *const ::core::ffi::c_void, pvcontext: *const ::core::ffi::c_void, precord: *const PEER_RECORD, changetype: PEER_RECORD_CHANGE_TYPE) -> ::windows_sys::core::HRESULT>;
// Copyright 2023 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::marker::PhantomData; use std::sync::Arc; use common_exception::ErrorCode; use common_exception::Result; use common_expression::BlockMetaInfoDowncast; use common_expression::DataBlock; use common_hashtable::FastHash; use common_hashtable::HashtableEntryMutRefLike; use common_hashtable::HashtableEntryRefLike; use common_hashtable::HashtableLike; use common_pipeline_core::pipe::Pipe; use common_pipeline_core::pipe::PipeItem; use common_pipeline_core::processors::port::InputPort; use common_pipeline_core::processors::port::OutputPort; use common_pipeline_core::processors::processor::ProcessorPtr; use common_pipeline_core::Pipeline; use common_pipeline_transforms::processors::transforms::TransformDummy; use common_storage::DataOperator; use strength_reduce::StrengthReducedU64; use crate::api::DataExchange; use crate::api::ExchangeInjector; use crate::api::ExchangeSorting; use crate::api::FlightScatter; use crate::api::MergeExchangeParams; use crate::api::ShuffleExchangeParams; use crate::api::TransformExchangeDeserializer; use crate::pipelines::processors::transforms::aggregator::aggregate_meta::AggregateMeta; use crate::pipelines::processors::transforms::aggregator::aggregate_meta::HashTablePayload; use crate::pipelines::processors::transforms::aggregator::serde::TransformScatterAggregateSerializer; use crate::pipelines::processors::transforms::aggregator::serde::TransformScatterAggregateSpillWriter; use crate::pipelines::processors::transforms::aggregator::serde::TransformScatterGroupBySerializer; use crate::pipelines::processors::transforms::aggregator::serde::TransformScatterGroupBySpillWriter; use crate::pipelines::processors::transforms::group_by::Area; use crate::pipelines::processors::transforms::group_by::ArenaHolder; use crate::pipelines::processors::transforms::group_by::HashMethodBounds; use crate::pipelines::processors::transforms::HashTableCell; use crate::pipelines::processors::transforms::TransformAggregateDeserializer; use crate::pipelines::processors::transforms::TransformAggregateSerializer; use crate::pipelines::processors::transforms::TransformAggregateSpillWriter; use crate::pipelines::processors::transforms::TransformGroupByDeserializer; use crate::pipelines::processors::transforms::TransformGroupBySerializer; use crate::pipelines::processors::transforms::TransformGroupBySpillWriter; use crate::pipelines::processors::AggregatorParams; use crate::sessions::QueryContext; struct AggregateExchangeSorting<Method: HashMethodBounds, V: Send + Sync + 'static> { _phantom: PhantomData<(Method, V)>, } impl<Method: HashMethodBounds, V: Send + Sync + 'static> ExchangeSorting for AggregateExchangeSorting<Method, V> { fn block_number(&self, data_block: &DataBlock) -> Result<isize> { match data_block.get_meta() { None => Ok(-1), Some(block_meta_info) => { match AggregateMeta::<Method, V>::downcast_ref_from(block_meta_info) { None => Err(ErrorCode::Internal(format!( "Internal error, AggregateExchangeSorting only recv AggregateMeta {:?}", serde_json::to_string(block_meta_info) ))), Some(meta_info) => match meta_info { AggregateMeta::Partitioned { .. } => unreachable!(), AggregateMeta::Serialized(v) => Ok(v.bucket), AggregateMeta::HashTable(v) => Ok(v.bucket), AggregateMeta::Spilling(_) | AggregateMeta::Spilled(_) => Ok(-1), }, } } } } } struct HashTableHashScatter<Method: HashMethodBounds, V: Copy + Send + Sync + 'static> { method: Method, buckets: usize, _phantom: PhantomData<V>, } impl<Method: HashMethodBounds, V: Copy + Send + Sync + 'static> HashTableHashScatter<Method, V> { fn scatter( &self, mut payload: HashTablePayload<Method, V>, ) -> Result<Vec<HashTableCell<Method, V>>> { let mut buckets = Vec::with_capacity(self.buckets); for _ in 0..self.buckets { buckets.push(self.method.create_hash_table()?); } for item in payload.cell.hashtable.iter() { let mods = StrengthReducedU64::new(self.buckets as u64); let bucket_index = (item.key().fast_hash() % mods) as usize; unsafe { match buckets[bucket_index].insert_and_entry(item.key()) { Ok(mut entry) => { *entry.get_mut() = *item.get(); } Err(mut entry) => { *entry.get_mut() = *item.get(); } } } } let mut res = Vec::with_capacity(buckets.len()); let dropper = payload.cell._dropper.take(); let arena = std::mem::replace(&mut payload.cell.arena, Area::create()); payload .cell .arena_holders .push(ArenaHolder::create(Some(arena))); for bucket_table in buckets { let mut cell = HashTableCell::<Method, V>::create(bucket_table, dropper.clone().unwrap()); cell.arena_holders .extend(payload.cell.arena_holders.clone()); res.push(cell); } Ok(res) } } impl<Method: HashMethodBounds, V: Copy + Send + Sync + 'static> FlightScatter for HashTableHashScatter<Method, V> { fn execute(&self, mut data_block: DataBlock) -> Result<Vec<DataBlock>> { if let Some(block_meta) = data_block.take_meta() { if let Some(block_meta) = AggregateMeta::<Method, V>::downcast_from(block_meta) { let mut blocks = Vec::with_capacity(self.buckets); match block_meta { AggregateMeta::Spilled(_) => unreachable!(), AggregateMeta::Serialized(_) => unreachable!(), AggregateMeta::Partitioned { .. } => unreachable!(), AggregateMeta::Spilling(payload) => { let bucket = payload.bucket; for hashtable_cell in self.scatter(payload)? { blocks.push(match hashtable_cell.hashtable.len() == 0 { true => DataBlock::empty(), false => DataBlock::empty_with_meta( AggregateMeta::<Method, V>::create_spilling( bucket, hashtable_cell, ), ), }); } } AggregateMeta::HashTable(payload) => { let bucket = payload.bucket; for hashtable_cell in self.scatter(payload)? { blocks.push(match hashtable_cell.hashtable.len() == 0 { true => DataBlock::empty(), false => DataBlock::empty_with_meta( AggregateMeta::<Method, V>::create_hashtable( bucket, hashtable_cell, ), ), }); } } }; return Ok(blocks); } } Err(ErrorCode::Internal( "Internal, HashTableHashScatter only recv AggregateMeta", )) } } pub struct AggregateInjector<Method: HashMethodBounds, V: Copy + Send + Sync + 'static> { method: Method, tenant: String, aggregator_params: Arc<AggregatorParams>, _phantom: PhantomData<V>, } impl<Method: HashMethodBounds, V: Copy + Send + Sync + 'static> AggregateInjector<Method, V> { pub fn create( tenant: String, method: Method, params: Arc<AggregatorParams>, ) -> Arc<dyn ExchangeInjector> { Arc::new(AggregateInjector::<Method, V> { method, tenant, aggregator_params: params, _phantom: Default::default(), }) } } impl<Method: HashMethodBounds, V: Copy + Send + Sync + 'static> ExchangeInjector for AggregateInjector<Method, V> { fn flight_scatter( &self, _: &Arc<QueryContext>, exchange: &DataExchange, ) -> Result<Arc<Box<dyn FlightScatter>>> { match exchange { DataExchange::Merge(_) => unreachable!(), DataExchange::Broadcast(_) => unreachable!(), DataExchange::ShuffleDataExchange(exchange) => { Ok(Arc::new(Box::new(HashTableHashScatter::<Method, V> { method: self.method.clone(), buckets: exchange.destination_ids.len(), _phantom: Default::default(), }))) } } } fn exchange_sorting(&self) -> Option<Arc<dyn ExchangeSorting>> { Some(Arc::new(AggregateExchangeSorting::<Method, V> { _phantom: Default::default(), })) } fn apply_merge_serializer( &self, _: &MergeExchangeParams, pipeline: &mut Pipeline, ) -> Result<()> { let method = &self.method; let params = self.aggregator_params.clone(); let operator = DataOperator::instance().operator(); let location_prefix = format!("_aggregate_spill/{}", self.tenant); pipeline.add_transform(|input, output| { Ok(ProcessorPtr::create( match params.aggregate_functions.is_empty() { true => TransformGroupBySpillWriter::create( input, output, method.clone(), operator.clone(), location_prefix.clone(), ), false => TransformAggregateSpillWriter::create( input, output, method.clone(), operator.clone(), params.clone(), location_prefix.clone(), ), }, )) })?; pipeline.add_transform( |input, output| match params.aggregate_functions.is_empty() { true => TransformGroupBySerializer::try_create(input, output, method.clone()), false => TransformAggregateSerializer::try_create( input, output, method.clone(), params.clone(), ), }, ) } fn apply_shuffle_serializer( &self, shuffle_params: &ShuffleExchangeParams, pipeline: &mut Pipeline, ) -> Result<()> { let method = &self.method; let params = self.aggregator_params.clone(); let operator = DataOperator::instance().operator(); let location_prefix = format!("_aggregate_spill/{}", self.tenant); pipeline.add_transform(|input, output| { Ok(ProcessorPtr::create( match params.aggregate_functions.is_empty() { true => TransformScatterGroupBySpillWriter::create( input, output, method.clone(), operator.clone(), location_prefix.clone(), ), false => TransformScatterAggregateSpillWriter::create( input, output, method.clone(), operator.clone(), location_prefix.clone(), params.clone(), ), }, )) })?; let schema = shuffle_params.schema.clone(); let local_id = &shuffle_params.executor_id; let local_pos = shuffle_params .destination_ids .iter() .position(|x| x == local_id) .unwrap(); pipeline.add_transform( |input, output| match params.aggregate_functions.is_empty() { true => TransformScatterGroupBySerializer::try_create( input, output, method.clone(), schema.clone(), local_pos, ), false => TransformScatterAggregateSerializer::try_create( input, output, method.clone(), schema.clone(), local_pos, params.clone(), ), }, ) } fn apply_merge_deserializer( &self, remote_inputs: usize, params: &MergeExchangeParams, pipeline: &mut Pipeline, ) -> Result<()> { let local_inputs = pipeline.output_len() - remote_inputs; let mut items = Vec::with_capacity(pipeline.output_len()); for _index in 0..local_inputs { let input = InputPort::create(); let output = OutputPort::create(); items.push(PipeItem::create( TransformDummy::create(input.clone(), output.clone()), vec![input], vec![output], )); } for _index in 0..remote_inputs { let input = InputPort::create(); let output = OutputPort::create(); let schema = &params.schema; items.push(PipeItem::create( TransformExchangeDeserializer::create(input.clone(), output.clone(), schema), vec![input], vec![output], )); } pipeline.add_pipe(Pipe::create(items.len(), items.len(), items)); let mut items = Vec::with_capacity(pipeline.output_len()); for _index in 0..local_inputs { let input = InputPort::create(); let output = OutputPort::create(); items.push(PipeItem::create( TransformDummy::create(input.clone(), output.clone()), vec![input], vec![output], )); } for _index in 0..remote_inputs { let input = InputPort::create(); let output = OutputPort::create(); let proc = match self.aggregator_params.aggregate_functions.is_empty() { true => TransformGroupByDeserializer::<Method>::try_create( input.clone(), output.clone(), ), false => TransformAggregateDeserializer::<Method>::try_create( input.clone(), output.clone(), ), }?; items.push(PipeItem::create(proc, vec![input], vec![output])); } pipeline.add_pipe(Pipe::create(items.len(), items.len(), items)); Ok(()) } fn apply_shuffle_deserializer( &self, remote_inputs: usize, params: &ShuffleExchangeParams, pipeline: &mut Pipeline, ) -> Result<()> { let local_inputs = pipeline.output_len() - remote_inputs; let mut items = Vec::with_capacity(pipeline.output_len()); for _index in 0..local_inputs { let input = InputPort::create(); let output = OutputPort::create(); items.push(PipeItem::create( TransformDummy::create(input.clone(), output.clone()), vec![input], vec![output], )); } for _index in 0..remote_inputs { let input = InputPort::create(); let output = OutputPort::create(); let schema = &params.schema; items.push(PipeItem::create( TransformExchangeDeserializer::create(input.clone(), output.clone(), schema), vec![input], vec![output], )); } pipeline.add_pipe(Pipe::create(items.len(), items.len(), items)); let mut items = Vec::with_capacity(pipeline.output_len()); for _index in 0..local_inputs { let input = InputPort::create(); let output = OutputPort::create(); items.push(PipeItem::create( TransformDummy::create(input.clone(), output.clone()), vec![input], vec![output], )); } for _index in 0..remote_inputs { let input = InputPort::create(); let output = OutputPort::create(); let proc = match self.aggregator_params.aggregate_functions.is_empty() { true => TransformGroupByDeserializer::<Method>::try_create( input.clone(), output.clone(), ), false => TransformAggregateDeserializer::<Method>::try_create( input.clone(), output.clone(), ), }?; items.push(PipeItem::create(proc, vec![input], vec![output])); } pipeline.add_pipe(Pipe::create(items.len(), items.len(), items)); Ok(()) } }
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use alloc::string::String; use alloc::vec::Vec; use core::mem; use super::qlib::auxv::*; use super::qlib::common::*; use super::task::*; // StackLayout describes the location of the arguments and environment on the // stack. #[derive(Default)] pub struct StackLayout { // ArgvStart is the beginning of the argument vector. pub ArgvStart: u64, // ArgvEnd is the end of the argument vector. pub ArgvEnd: u64, // EnvvStart is the beginning of the environment vector. pub EnvvStart: u64, // EnvvEnd is the end of the environment vector. pub EvvvEnd: u64, } pub struct Stack { pub sp: u64 } impl Stack { pub fn New(addr: u64) -> Self { return Stack { sp: addr } } pub fn PushType<T: Copy>(&mut self, task: &Task, data: &T) -> Result<u64> { let size = mem::size_of::<T>(); self.sp -= size as u64; task.CopyOutObj(data, self.sp).unwrap(); return Ok(self.sp); } pub fn PopType<T: Copy>(&mut self, task: &Task, data: &mut T) -> Result<u64> { let size = mem::size_of::<T>(); *data = task.CopyInObj(self.sp)?; self.sp += size as u64; return Ok(self.sp); } pub fn PushStr(&mut self, task: &Task, str: &str) -> Result<u64> { let len = str.len(); self.sp = self.sp - len as u64 - 1; task.CopyOutString(self.sp, len + 1, str)?; return Ok(self.sp) } pub fn PushU64(&mut self, task: &Task, val: u64) -> Result<u64> { self.sp = self.sp - 8; task.CopyOutObj(&val, self.sp)?; return Ok(self.sp) } pub fn PushU32(&mut self, task: &Task, val: u32) -> Result<u64> { self.sp = self.sp - 4; task.CopyOutObj(&val, self.sp)?; return Ok(self.sp) } pub fn PushU16(&mut self, task: &Task, val: u16) -> Result<u64> { self.sp = self.sp - 2; task.CopyOutObj(&val, self.sp)?; return Ok(self.sp) } pub fn PushU8(&mut self, task: &Task, val: u8) -> Result<u64> { self.sp = self.sp - 1; task.CopyOutObj(&val, self.sp)?; return Ok(self.sp) } pub fn Pad16(&mut self, _task: &Task) -> Result<u64> { let offset = self.sp & 0xf; self.sp -= offset; return Ok(self.sp) } // LoadEnv pushes the given args, env and aux vector to the stack using the // well-known format for a new executable. It returns the start and end // of the argument and environment vectors. pub fn LoadEnv(&mut self, task: &Task, envs: &[String], args: &[String], auxv: &[AuxEntry]) -> Result<StackLayout> { let mut l = StackLayout::default(); // Make sure we start with a 16-byte alignment. self.Pad16(task)?; // Push the environment vector so the end of the argument vector is adjacent to // the beginning of the environment vector. // While the System V abi for x86_64 does not specify an ordering to the // Information Block (the block holding the arg, env, and aux vectors), // support features like setproctitle(3) naturally expect these segments // to be in this order. See: https://www.uclibc.org/docs/psABI-x86_64.pdf // page 29. l.EvvvEnd = self.sp; let mut envAddrs = Vec::new(); for i in 0..envs.len() { let idx = envs.len() - i - 1; let addr = self.PushStr(task, envs[idx].as_str())?; envAddrs.push(addr); } l.EnvvStart = self.sp; // Push our args. l.ArgvEnd = self.sp; let mut argAddrs: Vec<u64> = Vec::new(); for i in 0..args.len() { let idx = args.len() - i - 1; let addr = self.PushStr(task, args[idx].as_str())?; argAddrs.push(addr); } l.ArgvStart = self.sp; // We need to align the arguments appropriately. // // We must finish on a 16-byte alignment, but we'll play it // conservatively and finish at 32-bytes. It would be nice to be able // to call Align here, but unfortunately we need to align the stack // with all the variable sized arrays pushed. So we just need to do // some calculations. let argvSize = 8 * (args.len() + 1); let envvSize = 8 * (envs.len() + 1); let auxvSize = 8 * 2 * (auxv.len() + 1); let total = argvSize + envvSize + auxvSize + 8; let expectedBottom = self.sp - total as u64; if expectedBottom % 32 != 0 { self.sp -= expectedBottom % 32; } for i in 0..auxv.len() { self.PushU64(task, auxv[i].Val)?; self.PushU64(task, auxv[i].Key as u64)?; } self.PushU64(task, 0)?; /*env*/ for i in 0..envAddrs.len() { self.PushU64(task, envAddrs[i])?; } self.PushU64(task, 0)?; /*argv*/ for i in 0..argAddrs.len() { self.PushU64(task, argAddrs[i])?; } /*argc*/ self.PushU64(task, argAddrs.len() as u64)?; return Ok(l) } }
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. pub fn parse_http_generic_error( response: &http::Response<bytes::Bytes>, ) -> Result<smithy_types::Error, smithy_json::deserialize::Error> { crate::json_errors::parse_generic_error(response.body(), response.headers()) } pub fn deser_structure_concurrent_modification_exceptionjson_err( input: &[u8], mut builder: crate::error::concurrent_modification_exception::Builder, ) -> Result<crate::error::concurrent_modification_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_limit_exceeded_exceptionjson_err( input: &[u8], mut builder: crate::error::limit_exceeded_exception::Builder, ) -> Result<crate::error::limit_exceeded_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_not_found_exceptionjson_err( input: &[u8], mut builder: crate::error::not_found_exception::Builder, ) -> Result<crate::error::not_found_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_device_not_registered_exceptionjson_err( input: &[u8], mut builder: crate::error::device_not_registered_exception::Builder, ) -> Result<crate::error::device_not_registered_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_skill_not_linked_exceptionjson_err( input: &[u8], mut builder: crate::error::skill_not_linked_exception::Builder, ) -> Result<crate::error::skill_not_linked_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_already_exists_exceptionjson_err( input: &[u8], mut builder: crate::error::already_exists_exception::Builder, ) -> Result<crate::error::already_exists_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_address_book( input: &[u8], mut builder: crate::output::create_address_book_output::Builder, ) -> Result<crate::output::create_address_book_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "AddressBookArn" => { builder = builder.set_address_book_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_business_report_schedule( input: &[u8], mut builder: crate::output::create_business_report_schedule_output::Builder, ) -> Result< crate::output::create_business_report_schedule_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ScheduleArn" => { builder = builder.set_schedule_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_conference_provider( input: &[u8], mut builder: crate::output::create_conference_provider_output::Builder, ) -> Result< crate::output::create_conference_provider_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ConferenceProviderArn" => { builder = builder.set_conference_provider_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_contact( input: &[u8], mut builder: crate::output::create_contact_output::Builder, ) -> Result<crate::output::create_contact_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ContactArn" => { builder = builder.set_contact_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_gateway_group( input: &[u8], mut builder: crate::output::create_gateway_group_output::Builder, ) -> Result<crate::output::create_gateway_group_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "GatewayGroupArn" => { builder = builder.set_gateway_group_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_invalid_certificate_authority_exceptionjson_err( input: &[u8], mut builder: crate::error::invalid_certificate_authority_exception::Builder, ) -> Result< crate::error::invalid_certificate_authority_exception::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_invalid_service_linked_role_state_exceptionjson_err( input: &[u8], mut builder: crate::error::invalid_service_linked_role_state_exception::Builder, ) -> Result< crate::error::invalid_service_linked_role_state_exception::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_network_profile( input: &[u8], mut builder: crate::output::create_network_profile_output::Builder, ) -> Result<crate::output::create_network_profile_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "NetworkProfileArn" => { builder = builder.set_network_profile_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_profile( input: &[u8], mut builder: crate::output::create_profile_output::Builder, ) -> Result<crate::output::create_profile_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ProfileArn" => { builder = builder.set_profile_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_room( input: &[u8], mut builder: crate::output::create_room_output::Builder, ) -> Result<crate::output::create_room_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "RoomArn" => { builder = builder.set_room_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_skill_group( input: &[u8], mut builder: crate::output::create_skill_group_output::Builder, ) -> Result<crate::output::create_skill_group_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillGroupArn" => { builder = builder.set_skill_group_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_resource_in_use_exceptionjson_err( input: &[u8], mut builder: crate::error::resource_in_use_exception::Builder, ) -> Result<crate::error::resource_in_use_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ClientRequestToken" => { builder = builder.set_client_request_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_user( input: &[u8], mut builder: crate::output::create_user_output::Builder, ) -> Result<crate::output::create_user_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "UserArn" => { builder = builder.set_user_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_resource_associated_exceptionjson_err( input: &[u8], mut builder: crate::error::resource_associated_exception::Builder, ) -> Result<crate::error::resource_associated_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_address_book( input: &[u8], mut builder: crate::output::get_address_book_output::Builder, ) -> Result<crate::output::get_address_book_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "AddressBook" => { builder = builder.set_address_book( crate::json_deser::deser_structure_address_book(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_conference_preference( input: &[u8], mut builder: crate::output::get_conference_preference_output::Builder, ) -> Result<crate::output::get_conference_preference_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Preference" => { builder = builder.set_preference( crate::json_deser::deser_structure_conference_preference(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_conference_provider( input: &[u8], mut builder: crate::output::get_conference_provider_output::Builder, ) -> Result<crate::output::get_conference_provider_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ConferenceProvider" => { builder = builder.set_conference_provider( crate::json_deser::deser_structure_conference_provider(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_contact( input: &[u8], mut builder: crate::output::get_contact_output::Builder, ) -> Result<crate::output::get_contact_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Contact" => { builder = builder .set_contact(crate::json_deser::deser_structure_contact(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_device( input: &[u8], mut builder: crate::output::get_device_output::Builder, ) -> Result<crate::output::get_device_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Device" => { builder = builder.set_device(crate::json_deser::deser_structure_device(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_gateway( input: &[u8], mut builder: crate::output::get_gateway_output::Builder, ) -> Result<crate::output::get_gateway_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Gateway" => { builder = builder .set_gateway(crate::json_deser::deser_structure_gateway(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_gateway_group( input: &[u8], mut builder: crate::output::get_gateway_group_output::Builder, ) -> Result<crate::output::get_gateway_group_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "GatewayGroup" => { builder = builder.set_gateway_group( crate::json_deser::deser_structure_gateway_group(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_invitation_configuration( input: &[u8], mut builder: crate::output::get_invitation_configuration_output::Builder, ) -> Result< crate::output::get_invitation_configuration_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "OrganizationName" => { builder = builder.set_organization_name( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ContactEmail" => { builder = builder.set_contact_email( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "PrivateSkillIds" => { builder = builder.set_private_skill_ids( crate::json_deser::deser_list_short_skill_id_list(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_invalid_secrets_manager_resource_exceptionjson_err( input: &[u8], mut builder: crate::error::invalid_secrets_manager_resource_exception::Builder, ) -> Result< crate::error::invalid_secrets_manager_resource_exception::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_network_profile( input: &[u8], mut builder: crate::output::get_network_profile_output::Builder, ) -> Result<crate::output::get_network_profile_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "NetworkProfile" => { builder = builder.set_network_profile( crate::json_deser::deser_structure_network_profile(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_profile( input: &[u8], mut builder: crate::output::get_profile_output::Builder, ) -> Result<crate::output::get_profile_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Profile" => { builder = builder .set_profile(crate::json_deser::deser_structure_profile(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_room( input: &[u8], mut builder: crate::output::get_room_output::Builder, ) -> Result<crate::output::get_room_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Room" => { builder = builder.set_room(crate::json_deser::deser_structure_room(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_room_skill_parameter( input: &[u8], mut builder: crate::output::get_room_skill_parameter_output::Builder, ) -> Result<crate::output::get_room_skill_parameter_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "RoomSkillParameter" => { builder = builder.set_room_skill_parameter( crate::json_deser::deser_structure_room_skill_parameter(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_get_skill_group( input: &[u8], mut builder: crate::output::get_skill_group_output::Builder, ) -> Result<crate::output::get_skill_group_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillGroup" => { builder = builder.set_skill_group( crate::json_deser::deser_structure_skill_group(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_business_report_schedules( input: &[u8], mut builder: crate::output::list_business_report_schedules_output::Builder, ) -> Result< crate::output::list_business_report_schedules_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "BusinessReportSchedules" => { builder = builder.set_business_report_schedules( crate::json_deser::deser_list_business_report_schedule_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_conference_providers( input: &[u8], mut builder: crate::output::list_conference_providers_output::Builder, ) -> Result<crate::output::list_conference_providers_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ConferenceProviders" => { builder = builder.set_conference_providers( crate::json_deser::deser_list_conference_providers_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_device_events( input: &[u8], mut builder: crate::output::list_device_events_output::Builder, ) -> Result<crate::output::list_device_events_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DeviceEvents" => { builder = builder.set_device_events( crate::json_deser::deser_list_device_event_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_gateway_groups( input: &[u8], mut builder: crate::output::list_gateway_groups_output::Builder, ) -> Result<crate::output::list_gateway_groups_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "GatewayGroups" => { builder = builder.set_gateway_groups( crate::json_deser::deser_list_gateway_group_summaries(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_gateways( input: &[u8], mut builder: crate::output::list_gateways_output::Builder, ) -> Result<crate::output::list_gateways_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Gateways" => { builder = builder .set_gateways(crate::json_deser::deser_list_gateway_summaries(tokens)?); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_skills( input: &[u8], mut builder: crate::output::list_skills_output::Builder, ) -> Result<crate::output::list_skills_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillSummaries" => { builder = builder.set_skill_summaries( crate::json_deser::deser_list_skill_summary_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_skills_store_categories( input: &[u8], mut builder: crate::output::list_skills_store_categories_output::Builder, ) -> Result< crate::output::list_skills_store_categories_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "CategoryList" => { builder = builder.set_category_list( crate::json_deser::deser_list_category_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_skills_store_skills_by_category( input: &[u8], mut builder: crate::output::list_skills_store_skills_by_category_output::Builder, ) -> Result< crate::output::list_skills_store_skills_by_category_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillsStoreSkills" => { builder = builder.set_skills_store_skills( crate::json_deser::deser_list_skills_store_skill_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_smart_home_appliances( input: &[u8], mut builder: crate::output::list_smart_home_appliances_output::Builder, ) -> Result< crate::output::list_smart_home_appliances_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SmartHomeAppliances" => { builder = builder.set_smart_home_appliances( crate::json_deser::deser_list_smart_home_appliance_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_tags( input: &[u8], mut builder: crate::output::list_tags_output::Builder, ) -> Result<crate::output::list_tags_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Tags" => { builder = builder.set_tags(crate::json_deser::deser_list_tag_list(tokens)?); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_unauthorized_exceptionjson_err( input: &[u8], mut builder: crate::error::unauthorized_exception::Builder, ) -> Result<crate::error::unauthorized_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_invalid_device_exceptionjson_err( input: &[u8], mut builder: crate::error::invalid_device_exception::Builder, ) -> Result<crate::error::invalid_device_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_register_avs_device( input: &[u8], mut builder: crate::output::register_avs_device_output::Builder, ) -> Result<crate::output::register_avs_device_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DeviceArn" => { builder = builder.set_device_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_resolve_room( input: &[u8], mut builder: crate::output::resolve_room_output::Builder, ) -> Result<crate::output::resolve_room_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "RoomArn" => { builder = builder.set_room_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "RoomName" => { builder = builder.set_room_name( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "RoomSkillParameters" => { builder = builder.set_room_skill_parameters( crate::json_deser::deser_list_room_skill_parameters(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_address_books( input: &[u8], mut builder: crate::output::search_address_books_output::Builder, ) -> Result<crate::output::search_address_books_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "AddressBooks" => { builder = builder.set_address_books( crate::json_deser::deser_list_address_book_data_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_contacts( input: &[u8], mut builder: crate::output::search_contacts_output::Builder, ) -> Result<crate::output::search_contacts_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Contacts" => { builder = builder .set_contacts(crate::json_deser::deser_list_contact_data_list(tokens)?); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_devices( input: &[u8], mut builder: crate::output::search_devices_output::Builder, ) -> Result<crate::output::search_devices_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Devices" => { builder = builder .set_devices(crate::json_deser::deser_list_device_data_list(tokens)?); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_network_profiles( input: &[u8], mut builder: crate::output::search_network_profiles_output::Builder, ) -> Result<crate::output::search_network_profiles_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "NetworkProfiles" => { builder = builder.set_network_profiles( crate::json_deser::deser_list_network_profile_data_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_profiles( input: &[u8], mut builder: crate::output::search_profiles_output::Builder, ) -> Result<crate::output::search_profiles_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Profiles" => { builder = builder .set_profiles(crate::json_deser::deser_list_profile_data_list(tokens)?); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_rooms( input: &[u8], mut builder: crate::output::search_rooms_output::Builder, ) -> Result<crate::output::search_rooms_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Rooms" => { builder = builder .set_rooms(crate::json_deser::deser_list_room_data_list(tokens)?); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_skill_groups( input: &[u8], mut builder: crate::output::search_skill_groups_output::Builder, ) -> Result<crate::output::search_skill_groups_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillGroups" => { builder = builder.set_skill_groups( crate::json_deser::deser_list_skill_group_data_list(tokens)?, ); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_search_users( input: &[u8], mut builder: crate::output::search_users_output::Builder, ) -> Result<crate::output::search_users_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Users" => { builder = builder .set_users(crate::json_deser::deser_list_user_data_list(tokens)?); } "NextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TotalCount" => { builder = builder.set_total_count( smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_send_announcement( input: &[u8], mut builder: crate::output::send_announcement_output::Builder, ) -> Result<crate::output::send_announcement_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "AnnouncementArn" => { builder = builder.set_announcement_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_invalid_user_status_exceptionjson_err( input: &[u8], mut builder: crate::error::invalid_user_status_exception::Builder, ) -> Result<crate::error::invalid_user_status_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_name_in_use_exceptionjson_err( input: &[u8], mut builder: crate::error::name_in_use_exception::Builder, ) -> Result<crate::error::name_in_use_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn or_empty_doc(data: &[u8]) -> &[u8] { if data.is_empty() { b"{}" } else { data } } pub fn deser_structure_address_book<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::AddressBook>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::AddressBook::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "AddressBookArn" => { builder = builder.set_address_book_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Name" => { builder = builder.set_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_conference_preference<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::ConferencePreference>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::ConferencePreference::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DefaultConferenceProviderArn" => { builder = builder.set_default_conference_provider_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_conference_provider<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::ConferenceProvider>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::ConferenceProvider::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Name" => { builder = builder.set_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Type" => { builder = builder.set_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::ConferenceProviderType::from(u.as_ref()) }) }) .transpose()?, ); } "IPDialIn" => { builder = builder.set_ip_dial_in( crate::json_deser::deser_structure_ip_dial_in(tokens)?, ); } "PSTNDialIn" => { builder = builder.set_pstn_dial_in( crate::json_deser::deser_structure_pstn_dial_in(tokens)?, ); } "MeetingSetting" => { builder = builder.set_meeting_setting( crate::json_deser::deser_structure_meeting_setting(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_contact<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Contact>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Contact::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ContactArn" => { builder = builder.set_contact_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DisplayName" => { builder = builder.set_display_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "FirstName" => { builder = builder.set_first_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "LastName" => { builder = builder.set_last_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "PhoneNumber" => { builder = builder.set_phone_number( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "PhoneNumbers" => { builder = builder.set_phone_numbers( crate::json_deser::deser_list_phone_number_list(tokens)?, ); } "SipAddresses" => { builder = builder.set_sip_addresses( crate::json_deser::deser_list_sip_address_list(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_device<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Device>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Device::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DeviceArn" => { builder = builder.set_device_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceSerialNumber" => { builder = builder.set_device_serial_number( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceType" => { builder = builder.set_device_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceName" => { builder = builder.set_device_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SoftwareVersion" => { builder = builder.set_software_version( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "MacAddress" => { builder = builder.set_mac_address( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "RoomArn" => { builder = builder.set_room_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceStatus" => { builder = builder.set_device_status( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::DeviceStatus::from(u.as_ref())) }) .transpose()?, ); } "DeviceStatusInfo" => { builder = builder.set_device_status_info( crate::json_deser::deser_structure_device_status_info(tokens)?, ); } "NetworkProfileInfo" => { builder = builder.set_network_profile_info( crate::json_deser::deser_structure_device_network_profile_info( tokens, )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_gateway<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Gateway>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Gateway::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Name" => { builder = builder.set_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "GatewayGroupArn" => { builder = builder.set_gateway_group_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SoftwareVersion" => { builder = builder.set_software_version( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_gateway_group<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::GatewayGroup>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::GatewayGroup::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Name" => { builder = builder.set_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_short_skill_id_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_network_profile<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::NetworkProfile>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::NetworkProfile::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "NetworkProfileArn" => { builder = builder.set_network_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "NetworkProfileName" => { builder = builder.set_network_profile_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Ssid" => { builder = builder.set_ssid( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SecurityType" => { builder = builder.set_security_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::NetworkSecurityType::from(u.as_ref()) }) }) .transpose()?, ); } "EapMethod" => { builder = builder.set_eap_method( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::NetworkEapMethod::from(u.as_ref()) }) }) .transpose()?, ); } "CurrentPassword" => { builder = builder.set_current_password( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "NextPassword" => { builder = builder.set_next_password( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "CertificateAuthorityArn" => { builder = builder.set_certificate_authority_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "TrustAnchors" => { builder = builder.set_trust_anchors( crate::json_deser::deser_list_trust_anchor_list(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_profile<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Profile>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Profile::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ProfileArn" => { builder = builder.set_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ProfileName" => { builder = builder.set_profile_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "IsDefault" => { builder = builder.set_is_default( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "Address" => { builder = builder.set_address( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Timezone" => { builder = builder.set_timezone( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DistanceUnit" => { builder = builder.set_distance_unit( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::DistanceUnit::from(u.as_ref())) }) .transpose()?, ); } "TemperatureUnit" => { builder = builder.set_temperature_unit( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::TemperatureUnit::from(u.as_ref()) }) }) .transpose()?, ); } "WakeWord" => { builder = builder.set_wake_word( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::WakeWord::from(u.as_ref())) }) .transpose()?, ); } "Locale" => { builder = builder.set_locale( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SetupModeDisabled" => { builder = builder.set_setup_mode_disabled( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "MaxVolumeLimit" => { builder = builder.set_max_volume_limit( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "PSTNEnabled" => { builder = builder.set_pstn_enabled( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "DataRetentionOptIn" => { builder = builder.set_data_retention_opt_in( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "AddressBookArn" => { builder = builder.set_address_book_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "MeetingRoomConfiguration" => { builder = builder.set_meeting_room_configuration( crate::json_deser::deser_structure_meeting_room_configuration( tokens, )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_room<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Room>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Room::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "RoomArn" => { builder = builder.set_room_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "RoomName" => { builder = builder.set_room_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ProviderCalendarId" => { builder = builder.set_provider_calendar_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ProfileArn" => { builder = builder.set_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_room_skill_parameter<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::RoomSkillParameter>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::RoomSkillParameter::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ParameterKey" => { builder = builder.set_parameter_key( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ParameterValue" => { builder = builder.set_parameter_value( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_skill_group<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SkillGroup>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SkillGroup::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillGroupArn" => { builder = builder.set_skill_group_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SkillGroupName" => { builder = builder.set_skill_group_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_business_report_schedule_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result< Option<std::vec::Vec<crate::model::BusinessReportSchedule>>, smithy_json::deserialize::Error, > where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_business_report_schedule(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_conference_providers_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::ConferenceProvider>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_conference_provider(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_device_event_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::DeviceEvent>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_device_event(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_gateway_group_summaries<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::GatewayGroupSummary>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_gateway_group_summary(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_gateway_summaries<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::GatewaySummary>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_gateway_summary(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_skill_summary_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::SkillSummary>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_skill_summary(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_category_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::Category>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_category(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_skills_store_skill_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::SkillsStoreSkill>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_skills_store_skill(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_smart_home_appliance_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::SmartHomeAppliance>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_smart_home_appliance(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_tag_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::Tag>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_tag(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_room_skill_parameters<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::RoomSkillParameter>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_room_skill_parameter(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_address_book_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::AddressBookData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_address_book_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_contact_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::ContactData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_contact_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_device_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::DeviceData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_device_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_network_profile_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::NetworkProfileData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_network_profile_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_profile_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::ProfileData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_profile_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_room_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::RoomData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_room_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_skill_group_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::SkillGroupData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_skill_group_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_user_data_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::UserData>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_user_data(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_ip_dial_in<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::IpDialIn>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::IpDialIn::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Endpoint" => { builder = builder.set_endpoint( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "CommsProtocol" => { builder = builder.set_comms_protocol( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::CommsProtocol::from(u.as_ref())) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_pstn_dial_in<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::PstnDialIn>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::PstnDialIn::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "CountryCode" => { builder = builder.set_country_code( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "PhoneNumber" => { builder = builder.set_phone_number( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "OneClickIdDelay" => { builder = builder.set_one_click_id_delay( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "OneClickPinDelay" => { builder = builder.set_one_click_pin_delay( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_meeting_setting<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::MeetingSetting>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::MeetingSetting::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "RequirePin" => { builder = builder.set_require_pin( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::RequirePin::from(u.as_ref())) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_phone_number_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::PhoneNumber>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_phone_number(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_sip_address_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::SipAddress>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_sip_address(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_device_status_info<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DeviceStatusInfo>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DeviceStatusInfo::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DeviceStatusDetails" => { builder = builder.set_device_status_details( crate::json_deser::deser_list_device_status_details(tokens)?, ); } "ConnectionStatus" => { builder = builder.set_connection_status( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::ConnectionStatus::from(u.as_ref()) }) }) .transpose()?, ); } "ConnectionStatusUpdatedTime" => { builder = builder.set_connection_status_updated_time( smithy_json::deserialize::token::expect_timestamp_or_null( tokens.next(), smithy_types::instant::Format::EpochSeconds, )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_device_network_profile_info<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DeviceNetworkProfileInfo>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DeviceNetworkProfileInfo::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "NetworkProfileArn" => { builder = builder.set_network_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "CertificateArn" => { builder = builder.set_certificate_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "CertificateExpirationTime" => { builder = builder.set_certificate_expiration_time( smithy_json::deserialize::token::expect_timestamp_or_null( tokens.next(), smithy_types::instant::Format::EpochSeconds, )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_trust_anchor_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_meeting_room_configuration<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::MeetingRoomConfiguration>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::MeetingRoomConfiguration::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "RoomUtilizationMetricsEnabled" => { builder = builder.set_room_utilization_metrics_enabled( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "EndOfMeetingReminder" => { builder = builder.set_end_of_meeting_reminder( crate::json_deser::deser_structure_end_of_meeting_reminder( tokens, )?, ); } "InstantBooking" => { builder = builder.set_instant_booking( crate::json_deser::deser_structure_instant_booking(tokens)?, ); } "RequireCheckIn" => { builder = builder.set_require_check_in( crate::json_deser::deser_structure_require_check_in(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_business_report_schedule<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::BusinessReportSchedule>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::BusinessReportSchedule::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ScheduleArn" => { builder = builder.set_schedule_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ScheduleName" => { builder = builder.set_schedule_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "S3BucketName" => { builder = builder.set_s3_bucket_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "S3KeyPrefix" => { builder = builder.set_s3_key_prefix( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Format" => { builder = builder.set_format( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::BusinessReportFormat::from(u.as_ref()) }) }) .transpose()?, ); } "ContentRange" => { builder = builder.set_content_range( crate::json_deser::deser_structure_business_report_content_range(tokens)? ); } "Recurrence" => { builder = builder.set_recurrence( crate::json_deser::deser_structure_business_report_recurrence( tokens, )?, ); } "LastBusinessReport" => { builder = builder.set_last_business_report( crate::json_deser::deser_structure_business_report(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_device_event<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DeviceEvent>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DeviceEvent::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Type" => { builder = builder.set_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::DeviceEventType::from(u.as_ref()) }) }) .transpose()?, ); } "Value" => { builder = builder.set_value( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Timestamp" => { builder = builder.set_timestamp( smithy_json::deserialize::token::expect_timestamp_or_null( tokens.next(), smithy_types::instant::Format::EpochSeconds, )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_gateway_group_summary<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::GatewayGroupSummary>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::GatewayGroupSummary::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Name" => { builder = builder.set_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_gateway_summary<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::GatewaySummary>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::GatewaySummary::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Name" => { builder = builder.set_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "GatewayGroupArn" => { builder = builder.set_gateway_group_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SoftwareVersion" => { builder = builder.set_software_version( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_skill_summary<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SkillSummary>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SkillSummary::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillId" => { builder = builder.set_skill_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SkillName" => { builder = builder.set_skill_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SupportsLinking" => { builder = builder.set_supports_linking( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "EnablementType" => { builder = builder.set_enablement_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::EnablementType::from(u.as_ref())) }) .transpose()?, ); } "SkillType" => { builder = builder.set_skill_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::SkillType::from(u.as_ref())) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_category<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Category>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Category::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "CategoryId" => { builder = builder.set_category_id( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i64()), ); } "CategoryName" => { builder = builder.set_category_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_skills_store_skill<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SkillsStoreSkill>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SkillsStoreSkill::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillId" => { builder = builder.set_skill_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SkillName" => { builder = builder.set_skill_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ShortDescription" => { builder = builder.set_short_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "IconUrl" => { builder = builder.set_icon_url( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SampleUtterances" => { builder = builder.set_sample_utterances( crate::json_deser::deser_list_sample_utterances(tokens)?, ); } "SkillDetails" => { builder = builder.set_skill_details( crate::json_deser::deser_structure_skill_details(tokens)?, ); } "SupportsLinking" => { builder = builder.set_supports_linking( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_smart_home_appliance<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SmartHomeAppliance>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SmartHomeAppliance::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "FriendlyName" => { builder = builder.set_friendly_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ManufacturerName" => { builder = builder.set_manufacturer_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_tag<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Tag>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Tag::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Key" => { builder = builder.set_key( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Value" => { builder = builder.set_value( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_address_book_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::AddressBookData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::AddressBookData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "AddressBookArn" => { builder = builder.set_address_book_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Name" => { builder = builder.set_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_contact_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::ContactData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::ContactData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ContactArn" => { builder = builder.set_contact_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DisplayName" => { builder = builder.set_display_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "FirstName" => { builder = builder.set_first_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "LastName" => { builder = builder.set_last_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "PhoneNumber" => { builder = builder.set_phone_number( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "PhoneNumbers" => { builder = builder.set_phone_numbers( crate::json_deser::deser_list_phone_number_list(tokens)?, ); } "SipAddresses" => { builder = builder.set_sip_addresses( crate::json_deser::deser_list_sip_address_list(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_device_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DeviceData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DeviceData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DeviceArn" => { builder = builder.set_device_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceSerialNumber" => { builder = builder.set_device_serial_number( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceType" => { builder = builder.set_device_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceName" => { builder = builder.set_device_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SoftwareVersion" => { builder = builder.set_software_version( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "MacAddress" => { builder = builder.set_mac_address( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceStatus" => { builder = builder.set_device_status( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::DeviceStatus::from(u.as_ref())) }) .transpose()?, ); } "NetworkProfileArn" => { builder = builder.set_network_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "NetworkProfileName" => { builder = builder.set_network_profile_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "RoomArn" => { builder = builder.set_room_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "RoomName" => { builder = builder.set_room_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DeviceStatusInfo" => { builder = builder.set_device_status_info( crate::json_deser::deser_structure_device_status_info(tokens)?, ); } "CreatedTime" => { builder = builder.set_created_time( smithy_json::deserialize::token::expect_timestamp_or_null( tokens.next(), smithy_types::instant::Format::EpochSeconds, )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_network_profile_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::NetworkProfileData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::NetworkProfileData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "NetworkProfileArn" => { builder = builder.set_network_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "NetworkProfileName" => { builder = builder.set_network_profile_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Ssid" => { builder = builder.set_ssid( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SecurityType" => { builder = builder.set_security_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::NetworkSecurityType::from(u.as_ref()) }) }) .transpose()?, ); } "EapMethod" => { builder = builder.set_eap_method( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::NetworkEapMethod::from(u.as_ref()) }) }) .transpose()?, ); } "CertificateAuthorityArn" => { builder = builder.set_certificate_authority_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_profile_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::ProfileData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::ProfileData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ProfileArn" => { builder = builder.set_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ProfileName" => { builder = builder.set_profile_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "IsDefault" => { builder = builder.set_is_default( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "Address" => { builder = builder.set_address( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Timezone" => { builder = builder.set_timezone( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "DistanceUnit" => { builder = builder.set_distance_unit( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::DistanceUnit::from(u.as_ref())) }) .transpose()?, ); } "TemperatureUnit" => { builder = builder.set_temperature_unit( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::TemperatureUnit::from(u.as_ref()) }) }) .transpose()?, ); } "WakeWord" => { builder = builder.set_wake_word( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::WakeWord::from(u.as_ref())) }) .transpose()?, ); } "Locale" => { builder = builder.set_locale( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_room_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::RoomData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::RoomData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "RoomArn" => { builder = builder.set_room_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "RoomName" => { builder = builder.set_room_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ProviderCalendarId" => { builder = builder.set_provider_calendar_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ProfileArn" => { builder = builder.set_profile_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ProfileName" => { builder = builder.set_profile_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_skill_group_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SkillGroupData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SkillGroupData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "SkillGroupArn" => { builder = builder.set_skill_group_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "SkillGroupName" => { builder = builder.set_skill_group_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Description" => { builder = builder.set_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_user_data<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::UserData>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::UserData::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "UserArn" => { builder = builder.set_user_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "FirstName" => { builder = builder.set_first_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "LastName" => { builder = builder.set_last_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Email" => { builder = builder.set_email( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "EnrollmentStatus" => { builder = builder.set_enrollment_status( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::EnrollmentStatus::from(u.as_ref()) }) }) .transpose()?, ); } "EnrollmentId" => { builder = builder.set_enrollment_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_phone_number<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::PhoneNumber>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::PhoneNumber::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Number" => { builder = builder.set_number( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Type" => { builder = builder.set_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::PhoneNumberType::from(u.as_ref()) }) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_sip_address<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SipAddress>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SipAddress::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Uri" => { builder = builder.set_uri( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Type" => { builder = builder.set_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::SipType::from(u.as_ref())) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_device_status_details<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::DeviceStatusDetail>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_device_status_detail(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_end_of_meeting_reminder<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::EndOfMeetingReminder>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::EndOfMeetingReminder::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ReminderAtMinutes" => { builder = builder.set_reminder_at_minutes( crate::json_deser::deser_list_end_of_meeting_reminder_minutes_list(tokens)? ); } "ReminderType" => { builder = builder.set_reminder_type( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::EndOfMeetingReminderType::from(u.as_ref()) }) }) .transpose()?, ); } "Enabled" => { builder = builder.set_enabled( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_instant_booking<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::InstantBooking>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::InstantBooking::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DurationInMinutes" => { builder = builder.set_duration_in_minutes( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "Enabled" => { builder = builder.set_enabled( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_require_check_in<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::RequireCheckIn>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::RequireCheckIn::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ReleaseAfterMinutes" => { builder = builder.set_release_after_minutes( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "Enabled" => { builder = builder.set_enabled( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_business_report_content_range<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::BusinessReportContentRange>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::BusinessReportContentRange::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Interval" => { builder = builder.set_interval( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::BusinessReportInterval::from(u.as_ref()) }) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_business_report_recurrence<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::BusinessReportRecurrence>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::BusinessReportRecurrence::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "StartDate" => { builder = builder.set_start_date( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_business_report<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::BusinessReport>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::BusinessReport::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Status" => { builder = builder.set_status( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::BusinessReportStatus::from(u.as_ref()) }) }) .transpose()?, ); } "FailureCode" => { builder = builder.set_failure_code( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::BusinessReportFailureCode::from( u.as_ref(), ) }) }) .transpose()?, ); } "S3Location" => { builder = builder.set_s3_location( crate::json_deser::deser_structure_business_report_s3_location( tokens, )?, ); } "DeliveryTime" => { builder = builder.set_delivery_time( smithy_json::deserialize::token::expect_timestamp_or_null( tokens.next(), smithy_types::instant::Format::EpochSeconds, )?, ); } "DownloadUrl" => { builder = builder.set_download_url( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_sample_utterances<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_skill_details<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SkillDetails>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SkillDetails::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "ProductDescription" => { builder = builder.set_product_description( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "InvocationPhrase" => { builder = builder.set_invocation_phrase( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "ReleaseDate" => { builder = builder.set_release_date( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "EndUserLicenseAgreement" => { builder = builder.set_end_user_license_agreement( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "GenericKeywords" => { builder = builder.set_generic_keywords( crate::json_deser::deser_list_generic_keywords(tokens)?, ); } "BulletPoints" => { builder = builder.set_bullet_points( crate::json_deser::deser_list_bullet_points(tokens)?, ); } "NewInThisVersionBulletPoints" => { builder = builder.set_new_in_this_version_bullet_points( crate::json_deser::deser_list_new_in_this_version_bullet_points(tokens)? ); } "SkillTypes" => { builder = builder.set_skill_types( crate::json_deser::deser_list_skill_types(tokens)?, ); } "Reviews" => { builder = builder .set_reviews(crate::json_deser::deser_map_reviews(tokens)?); } "DeveloperInfo" => { builder = builder.set_developer_info( crate::json_deser::deser_structure_developer_info(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_device_status_detail<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DeviceStatusDetail>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DeviceStatusDetail::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Feature" => { builder = builder.set_feature( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::Feature::from(u.as_ref())) }) .transpose()?, ); } "Code" => { builder = builder.set_code( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::DeviceStatusDetailCode::from(u.as_ref()) }) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_end_of_meeting_reminder_minutes_list<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<i32>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_number_or_null(tokens.next())? .map(|v| v.to_i32()); if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_business_report_s3_location<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::BusinessReportS3Location>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::BusinessReportS3Location::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "Path" => { builder = builder.set_path( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "BucketName" => { builder = builder.set_bucket_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_generic_keywords<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_bullet_points<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_new_in_this_version_bullet_points<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list_skill_types<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_map_reviews<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result< Option<std::collections::HashMap<std::string::String, std::string::String>>, smithy_json::deserialize::Error, > where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { let mut map = std::collections::HashMap::new(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { let key = key.to_unescaped().map(|u| u.into_owned())?; let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { map.insert(key, value); } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(map)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_developer_info<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DeveloperInfo>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DeveloperInfo::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "DeveloperName" => { builder = builder.set_developer_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "PrivacyPolicy" => { builder = builder.set_privacy_policy( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Email" => { builder = builder.set_email( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "Url" => { builder = builder.set_url( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } }
use ethers::contract::Abigen; // TODO: Figure out how to write the rerun-if-changed script properly fn main() { // Only re-run the builder script if the contract changes println!("cargo:rerun-if-changed=./abis/*.json"); // bindgen("BalanceSheet"); // bindgen("UniswapV2Pair"); } #[allow(dead_code)] fn bindgen(file_name: &str) { let bindings = Abigen::new(file_name, format!("./abis/{}.json", file_name)) .expect("Could not instantiate Abigen") .generate() .expect("Could not generate bindings"); bindings .write_to_file(format!("./src/{}.rs", file_name.to_lowercase())) .expect("Could not write bindings to file"); }
use std::any::type_name; #[cfg(feature = "use_serde")] use serde::{Deserialize, Serialize}; use crate::clickhouse::compacted_tables::schema::{ AggregationMethod, ClickhouseDataType, CompressionMethod, ValidateSchema, }; use crate::{Error, Named}; #[derive(Debug, PartialEq, Eq, Clone)] #[cfg_attr(feature = "use_serde", derive(Serialize, Deserialize))] pub enum ColumnDefinition { /// a simple column which just stores data. /// The data will not get modified when the values get aggregated to coarser resolutions. Simple(SimpleColumn), /// a column storing an h3index /// h3 indexes will always be brought the resolution of the coarser table when generating parent /// resolutions H3Index, /// data stored in this column will be aggregated using the specified aggregation /// method when the coarser resolutions are generated /// /// Aggregation only happens **within** the batch written to /// the tables. WithAggregation(SimpleColumn, AggregationMethod), } impl ColumnDefinition { pub fn datatype(&self) -> ClickhouseDataType { match self { Self::H3Index => ClickhouseDataType::UInt64, Self::Simple(sc) => sc.datatype, Self::WithAggregation(sc, _) => sc.datatype, } } /// position in the sorting key (`ORDER BY`) in MergeTree tables /// which can be unterstood as a form of a primary key. Please consult /// https://clickhouse.tech/docs/en/engines/table-engines/mergetree-family/mergetree/ /// for more pub fn order_key_position(&self) -> Option<u8> { match self { Self::H3Index => Some(0), Self::Simple(sc) => sc.order_key_position, Self::WithAggregation(sc, _) => sc.order_key_position, } } pub fn compression_method(&self) -> Option<&CompressionMethod> { match self { ColumnDefinition::Simple(sc) => sc.compression_method.as_ref(), ColumnDefinition::H3Index => None, ColumnDefinition::WithAggregation(sc, _) => sc.compression_method.as_ref(), } } pub fn nullable(&self) -> bool { match self { ColumnDefinition::Simple(sc) => sc.nullable, ColumnDefinition::H3Index => false, ColumnDefinition::WithAggregation(sc, _) => sc.nullable, } } pub fn disables_compaction(&self) -> bool { match self { ColumnDefinition::WithAggregation(_, am) => am.disables_compaction(), _ => false, } } } impl ValidateSchema for ColumnDefinition { fn validate(&self) -> Result<(), Error> { if let Self::WithAggregation(simple_column, aggregation_method) = self { if !(aggregation_method .is_applicable_to_datatype(&simple_column.datatype, simple_column.nullable)) { return Err(Error::SchemaValidationError( type_name::<Self>(), format!( "aggregation {} can not be applied to {} datatype {}", aggregation_method.name(), if simple_column.nullable { "nullable" } else { "non-nullable" }, simple_column.datatype.name() ), )); } } Ok(()) } } #[cfg(feature = "use_serde")] const fn default_nullable() -> bool { false } #[derive(Debug, PartialEq, Eq, Clone)] #[cfg_attr(feature = "use_serde", derive(Serialize, Deserialize))] pub struct SimpleColumn { datatype: ClickhouseDataType, /// position in the sorting key (`ORDER BY`) in MergeTree tables /// which can be unterstood as a form of a primary key. Please consult /// https://clickhouse.tech/docs/en/engines/table-engines/mergetree-family/mergetree/ /// for more order_key_position: Option<u8>, compression_method: Option<CompressionMethod>, #[cfg_attr(feature = "use_serde", serde(default = "default_nullable"))] nullable: bool, } impl SimpleColumn { pub fn new( datatype: ClickhouseDataType, order_key_position: Option<u8>, compression_method: Option<CompressionMethod>, nullable: bool, ) -> Self { Self { datatype, order_key_position, compression_method, nullable, } } }
#![allow(missing_docs)] //! Document tree traversal to walk a shared borrow of a document tree. //! //! Each method of the [`Visit`] trait is a hook that can be overridden //! to customize the behavior when mutating the corresponding type of node. //! By default, every method recursively visits the substructure of the //! input by invoking the right visitor method of each of its fields. //! //! ``` //! # use toml_edit::{Item, ArrayOfTables, Table, Value}; //! //! pub trait Visit<'doc> { //! /* ... */ //! //! fn visit_item(&mut self, i: &'doc Item) { //! visit_item(self, i); //! } //! //! /* ... */ //! # fn visit_value(&mut self, i: &'doc Value); //! # fn visit_table(&mut self, i: &'doc Table); //! # fn visit_array_of_tables(&mut self, i: &'doc ArrayOfTables); //! } //! //! pub fn visit_item<'doc, V>(v: &mut V, node: &'doc Item) //! where //! V: Visit<'doc> + ?Sized, //! { //! match node { //! Item::None => {} //! Item::Value(value) => v.visit_value(value), //! Item::Table(table) => v.visit_table(table), //! Item::ArrayOfTables(array) => v.visit_array_of_tables(array), //! } //! } //! ``` //! //! The API is modeled after [`syn::visit`](https://docs.rs/syn/1/syn/visit). //! //! # Examples //! //! This visitor stores every string in the document. //! //! ``` //! # use toml_edit::*; //! use toml_edit::visit::*; //! //! #[derive(Default)] //! struct StringCollector<'doc> { //! strings: Vec<&'doc str>, //! } //! //! impl<'doc> Visit<'doc> for StringCollector<'doc> { //! fn visit_string(&mut self, node: &'doc Formatted<String>) { //! self.strings.push(node.value().as_str()); //! } //! } //! //! let input = r#" //! laputa = "sky-castle" //! the-force = { value = "surrounds-you" } //! "#; //! //! let mut document: Document = input.parse().unwrap(); //! let mut visitor = StringCollector::default(); //! visitor.visit_document(&document); //! //! assert_eq!(visitor.strings, vec!["sky-castle", "surrounds-you"]); //! ``` //! //! For a more complex example where the visitor has internal state, see `examples/visit.rs` //! [on GitHub](https://github.com/ordian/toml_edit/blob/master/examples/visit.rs). use crate::{ Array, ArrayOfTables, Datetime, Document, Formatted, InlineTable, Item, Table, TableLike, Value, }; /// Document tree traversal to mutate an exclusive borrow of a document tree in-place. /// /// See the [module documentation](self) for details. pub trait Visit<'doc> { fn visit_document(&mut self, node: &'doc Document) { visit_document(self, node); } fn visit_item(&mut self, node: &'doc Item) { visit_item(self, node); } fn visit_table(&mut self, node: &'doc Table) { visit_table(self, node); } fn visit_inline_table(&mut self, node: &'doc InlineTable) { visit_inline_table(self, node) } fn visit_table_like(&mut self, node: &'doc dyn TableLike) { visit_table_like(self, node); } fn visit_table_like_kv(&mut self, key: &'doc str, node: &'doc Item) { visit_table_like_kv(self, key, node); } fn visit_array(&mut self, node: &'doc Array) { visit_array(self, node); } fn visit_array_of_tables(&mut self, node: &'doc ArrayOfTables) { visit_array_of_tables(self, node); } fn visit_value(&mut self, node: &'doc Value) { visit_value(self, node); } fn visit_boolean(&mut self, node: &'doc Formatted<bool>) { visit_boolean(self, node) } fn visit_datetime(&mut self, node: &'doc Formatted<Datetime>) { visit_datetime(self, node); } fn visit_float(&mut self, node: &'doc Formatted<f64>) { visit_float(self, node) } fn visit_integer(&mut self, node: &'doc Formatted<i64>) { visit_integer(self, node) } fn visit_string(&mut self, node: &'doc Formatted<String>) { visit_string(self, node) } } pub fn visit_document<'doc, V>(v: &mut V, node: &'doc Document) where V: Visit<'doc> + ?Sized, { v.visit_table(node.as_table()); } pub fn visit_item<'doc, V>(v: &mut V, node: &'doc Item) where V: Visit<'doc> + ?Sized, { match node { Item::None => {} Item::Value(value) => v.visit_value(value), Item::Table(table) => v.visit_table(table), Item::ArrayOfTables(array) => v.visit_array_of_tables(array), } } pub fn visit_table<'doc, V>(v: &mut V, node: &'doc Table) where V: Visit<'doc> + ?Sized, { v.visit_table_like(node) } pub fn visit_inline_table<'doc, V>(v: &mut V, node: &'doc InlineTable) where V: Visit<'doc> + ?Sized, { v.visit_table_like(node) } pub fn visit_table_like<'doc, V>(v: &mut V, node: &'doc dyn TableLike) where V: Visit<'doc> + ?Sized, { for (key, item) in node.iter() { v.visit_table_like_kv(key, item) } } pub fn visit_table_like_kv<'doc, V>(v: &mut V, _key: &'doc str, node: &'doc Item) where V: Visit<'doc> + ?Sized, { v.visit_item(node) } pub fn visit_array<'doc, V>(v: &mut V, node: &'doc Array) where V: Visit<'doc> + ?Sized, { for value in node.iter() { v.visit_value(value); } } pub fn visit_array_of_tables<'doc, V>(v: &mut V, node: &'doc ArrayOfTables) where V: Visit<'doc> + ?Sized, { for table in node.iter() { v.visit_table(table); } } pub fn visit_value<'doc, V>(v: &mut V, node: &'doc Value) where V: Visit<'doc> + ?Sized, { match node { Value::String(s) => v.visit_string(s), Value::Integer(i) => v.visit_integer(i), Value::Float(f) => v.visit_float(f), Value::Boolean(b) => v.visit_boolean(b), Value::Datetime(dt) => v.visit_datetime(dt), Value::Array(array) => v.visit_array(array), Value::InlineTable(table) => v.visit_inline_table(table), } } macro_rules! empty_visit { ($name: ident, $t: ty) => { fn $name<'doc, V>(_v: &mut V, _node: &'doc $t) where V: Visit<'doc> + ?Sized, { } }; } empty_visit!(visit_boolean, Formatted<bool>); empty_visit!(visit_datetime, Formatted<Datetime>); empty_visit!(visit_float, Formatted<f64>); empty_visit!(visit_integer, Formatted<i64>); empty_visit!(visit_string, Formatted<String>);
use projecteuler::partition; use projecteuler::primes; fn main() { //dbg!(binomial::binomial_coefficient(100, 50)); dbg!(solve(4)); dbg!(solve(5)); dbg!(solve(6)); dbg!(solve(100)); dbg!(solve(5_000)); } fn solve(n: usize) -> usize { let primes: Vec<usize> = primes::primes_iterator() .filter(|p| p.is_prime()) .map(|p| p.get_prime()) //since partition_into overflows for values higher than 416, we dont have to bother with them. .take_while(|p| *p <= 416) .collect(); let mut i = 0; loop { i += 1; if partition::partition_into::<_, _, usize>(i, &primes) > n { return i; } } }
pub(crate) mod future; pub(crate) mod sink; pub(crate) mod stream;
use bitflags::bitflags; #[derive(Debug, Clone, Copy, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum Color { Reset, Black, Red, Green, Yellow, Blue, Magenta, Cyan, Gray, DarkGray, LightRed, LightGreen, LightYellow, LightBlue, LightMagenta, LightCyan, White, Rgb(u8, u8, u8), Indexed(u8), } bitflags! { #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Modifier: u16 { const BOLD = 0b0000_0000_0001; const DIM = 0b0000_0000_0010; const ITALIC = 0b0000_0000_0100; const UNDERLINED = 0b0000_0000_1000; const SLOW_BLINK = 0b0000_0001_0000; const RAPID_BLINK = 0b0000_0010_0000; const REVERSED = 0b0000_0100_0000; const HIDDEN = 0b0000_1000_0000; const CROSSED_OUT = 0b0001_0000_0000; } } #[derive(Debug, Clone, Copy, PartialEq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Style { pub fg: Color, pub bg: Color, pub modifier: Modifier, } impl Default for Style { fn default() -> Style { Style::new() } } impl Style { pub const fn new() -> Self { Style { fg: Color::Reset, bg: Color::Reset, modifier: Modifier::empty(), } } pub fn reset(&mut self) { self.fg = Color::Reset; self.bg = Color::Reset; self.modifier = Modifier::empty(); } pub const fn fg(mut self, color: Color) -> Style { self.fg = color; self } pub const fn bg(mut self, color: Color) -> Style { self.bg = color; self } pub const fn modifier(mut self, modifier: Modifier) -> Style { self.modifier = modifier; self } }
fn main() { println!("Hello, world! Day 1."); }
/// This part of the program is shared between `ibsc` and `ibswm` to easily manage command. If you /// modify this part, please also check the `ibswm` package. use clap::Clap; #[derive(Clap, Debug)] #[clap(version = "1.0", author = "Cyril Mizzi <me@p1ngouin.com>")] pub struct Opts { /// Verbosity. By default, will only log ERROR level. #[clap(short, long, parse(from_occurrences))] pub verbose: i32, #[clap(short, long, default_value = "/tmp/ibswm.sock")] pub socket: String, #[clap(subcommand)] pub command: SubCommand, } #[derive(Clap, Debug)] pub enum SubCommand { /// Configure a monitor. #[clap(version = "1.0", author = "Cyril Mizzi <me@p1ngouin.com>")] Config(Config), /// Define a monitor. #[clap(version = "1.0", author = "Cyril Mizzi <me@p1ngouin.com>")] Monitor(Monitor), /// Apply a window rule. #[clap(version = "1.0", author = "Cyril Mizzi <me@p1ngouin.com>")] Rule(Rule), } #[derive(Clap, Debug)] pub struct Config { /// Apply configuration on a specific monitor. #[clap(short, long)] pub monitor: Option<String>, /// Apply configuration on a specific monitor. pub key: String, /// Apply configuration on a specific monitor. pub value: Option<String>, } #[derive(Clap, Debug)] pub struct Monitor { /// Monitor name. pub monitor: String, /// Custom monitor name. #[clap(short, long)] pub name: Option<String>, /// List of desktops to create. #[clap(short, long)] pub desktops: Vec<String>, } #[derive(Clap, Debug)] pub struct Rule { /// Default states to define on new application matching the <application> name. #[clap(short, long)] pub state: Option<Vec<String>>, /// Default desktop to map new application matching the <application> name. #[clap(short, long)] pub desktop: Option<String>, /// Application X11 name. pub application: String, }
//! Tests that use `liblumen_otp` with external dependencies to check that `lumen` can compile the //! external dependencies and that `liblumen_otp` is supplying all the needed BIFs for those //! external dependencies. These would normally be tests in the dependencies since they are built //! on top of OTP, but since the dependencies don't know that `lumen` exists, we test them here //! instead. #[macro_use] #[path = "test.rs"] mod test; #[path = "external/lumen.rs"] mod lumen;
fn calc_fuel(in_val: i32) -> i32 { (in_val as f32 / 3.0).floor() as i32 - 2 } fn calc_fuel_2(in_val: i32) -> i32 { let mut fuel_total = 0; let mut next_fuel_val = in_val; loop { next_fuel_val = calc_fuel(next_fuel_val); if next_fuel_val < 0 { break; } fuel_total = fuel_total + next_fuel_val; } fuel_total } fn main() { let mass_vals = include_str!("../input/day_1.txt"); println!("Part 1: {}", mass_vals.lines().map(|l| calc_fuel(l.parse::<i32>().unwrap())).sum::<i32>()); println!("Part 2: {}", mass_vals.lines().map(|l| calc_fuel_2(l.parse::<i32>().unwrap())).sum::<i32>()); } #[test] fn part_1_tests() { assert_eq!(calc_fuel(12), 2); assert_eq!(calc_fuel(14), 2); assert_eq!(calc_fuel(1969), 654); assert_eq!(calc_fuel(100756), 33583); } #[test] fn part_2_test() { assert_eq!(calc_fuel_2(14), 2); assert_eq!(calc_fuel_2(1969), 966); assert_eq!(calc_fuel_2(100756), 50346); }
fn main() { println!("vis 0.0.0"); }
use indexmap::IndexMap; use crate::dynamic::{ base::{BaseContainer, BaseField}, schema::SchemaInner, InputObject, Interface, SchemaError, Type, }; impl SchemaInner { pub(crate) fn check(&self) -> Result<(), SchemaError> { self.check_types_exists()?; self.check_root_types()?; self.check_objects()?; self.check_input_objects()?; self.check_interfaces()?; self.check_unions()?; Ok(()) } fn check_root_types(&self) -> Result<(), SchemaError> { if let Some(ty) = self.types.get(&self.env.registry.query_type) { if !matches!(ty, Type::Object(_)) { return Err("The query root must be an object".into()); } } if let Some(mutation_type) = &self.env.registry.mutation_type { if let Some(ty) = self.types.get(mutation_type) { if !matches!(ty, Type::Object(_)) { return Err("The mutation root must be an object".into()); } } } if let Some(subscription_type) = &self.env.registry.subscription_type { if let Some(ty) = self.types.get(subscription_type) { if !matches!(ty, Type::Subscription(_)) { return Err("The subsciprtion root must be an subscription object".into()); } } } Ok(()) } fn check_types_exists(&self) -> Result<(), SchemaError> { fn check<I: IntoIterator<Item = T>, T: AsRef<str>>( types: &IndexMap<String, Type>, type_names: I, ) -> Result<(), SchemaError> { for name in type_names { if !types.contains_key(name.as_ref()) { return Err(format!("Type \"{0}\" not found", name.as_ref()).into()); } } Ok(()) } check( &self.types, std::iter::once(self.env.registry.query_type.as_str()) .chain(self.env.registry.mutation_type.as_deref()), )?; for ty in self.types.values() { match ty { Type::Object(obj) => check( &self.types, obj.fields .values() .map(|field| { std::iter::once(field.ty.type_name()) .chain(field.arguments.values().map(|arg| arg.ty.type_name())) }) .flatten() .chain(obj.implements.iter().map(AsRef::as_ref)), )?, Type::InputObject(obj) => { check( &self.types, obj.fields.values().map(|field| field.ty.type_name()), )?; } Type::Interface(interface) => check( &self.types, interface .fields .values() .map(|field| { std::iter::once(field.ty.type_name()) .chain(field.arguments.values().map(|arg| arg.ty.type_name())) }) .flatten(), )?, Type::Union(union) => check(&self.types, &union.possible_types)?, Type::Subscription(subscription) => check( &self.types, subscription .fields .values() .map(|field| { std::iter::once(field.ty.type_name()) .chain(field.arguments.values().map(|arg| arg.ty.type_name())) }) .flatten(), )?, Type::Scalar(_) | Type::Enum(_) => {} } } Ok(()) } fn check_objects(&self) -> Result<(), SchemaError> { // https://spec.graphql.org/October2021/#sec-Objects.Type-Validation for ty in self.types.values() { if let Type::Object(obj) = ty { // An Object type must define one or more fields. if obj.fields.is_empty() { return Err( format!("Object \"{}\" must define one or more fields", obj.name).into(), ); } for field in obj.fields.values() { // The field must not have a name which begins with the characters "__" (two // underscores) if field.name.starts_with("__") { return Err(format!("Field \"{}.{}\" must not have a name which begins with the characters \"__\" (two underscores)", obj.name, field.name).into()); } // The field must return a type where IsOutputType(fieldType) returns true. if let Some(ty) = self.types.get(field.ty.type_name()) { if !ty.is_output_type() { return Err(format!( "Field \"{}.{}\" must return a output type", obj.name, field.name ) .into()); } } for arg in field.arguments.values() { // The argument must not have a name which begins with the characters "__" // (two underscores). if arg.name.starts_with("__") { return Err(format!("Argument \"{}.{}.{}\" must not have a name which begins with the characters \"__\" (two underscores)", obj.name, field.name, arg.name).into()); } // The argument must accept a type where // IsInputType(argumentType) returns true. if let Some(ty) = self.types.get(arg.ty.type_name()) { if !ty.is_input_type() { return Err(format!( "Argument \"{}.{}.{}\" must accept a input type", obj.name, field.name, arg.name ) .into()); } } } } for interface_name in &obj.implements { if let Some(ty) = self.types.get(interface_name) { let interface = ty.as_interface().ok_or_else(|| { format!("Type \"{}\" is not interface", interface_name) })?; check_is_valid_implementation(obj, interface)?; } } } } Ok(()) } fn check_input_objects(&self) -> Result<(), SchemaError> { // https://spec.graphql.org/October2021/#sec-Input-Objects.Type-Validation for ty in self.types.values() { if let Type::InputObject(obj) = ty { for field in obj.fields.values() { // The field must not have a name which begins with the characters "__" (two // underscores) if field.name.starts_with("__") { return Err(format!("Field \"{}.{}\" must not have a name which begins with the characters \"__\" (two underscores)", obj.name, field.name).into()); } // The input field must accept a type where IsInputType(inputFieldType) returns // true. if let Some(ty) = self.types.get(field.ty.type_name()) { if !ty.is_input_type() { return Err(format!( "Field \"{}.{}\" must accept a input type", obj.name, field.name ) .into()); } } if obj.oneof { // The type of the input field must be nullable. if !field.ty.is_nullable() { return Err(format!( "Field \"{}.{}\" must be nullable", obj.name, field.name ) .into()); } // The input field must not have a default value. if field.default_value.is_some() { return Err(format!( "Field \"{}.{}\" must not have a default value", obj.name, field.name ) .into()); } } } // If an Input Object references itself either directly or // through referenced Input Objects, at least one of the // fields in the chain of references must be either a // nullable or a List type. self.check_input_object_reference(&obj.name, &obj)?; } } Ok(()) } fn check_input_object_reference( &self, current: &str, obj: &InputObject, ) -> Result<(), SchemaError> { for field in obj.fields.values() { if field.ty.type_name() == current { if !field.ty.is_named() && !field.ty.is_list() { return Err(format!("\"{}\" references itself either directly or through referenced Input Objects, at least one of the fields in the chain of references must be either a nullable or a List type.", current).into()); } } else if let Some(obj) = self .types .get(field.ty.type_name()) .and_then(Type::as_input_object) { self.check_input_object_reference(current, obj)?; } } Ok(()) } fn check_interfaces(&self) -> Result<(), SchemaError> { // https://spec.graphql.org/October2021/#sec-Interfaces.Type-Validation for ty in self.types.values() { if let Type::Interface(interface) = ty { for field in interface.fields.values() { // The field must not have a name which begins with the characters "__" (two // underscores) if field.name.starts_with("__") { return Err(format!("Field \"{}.{}\" must not have a name which begins with the characters \"__\" (two underscores)", interface.name, field.name).into()); } // The field must return a type where IsOutputType(fieldType) returns true. if let Some(ty) = self.types.get(field.ty.type_name()) { if !ty.is_output_type() { return Err(format!( "Field \"{}.{}\" must return a output type", interface.name, field.name ) .into()); } } // The field must return a type where IsOutputType(fieldType) returns true. if let Some(ty) = self.types.get(field.ty.type_name()) { if !ty.is_output_type() { return Err(format!( "Field \"{}.{}\" must return a output type", interface.name, field.name ) .into()); } } for arg in field.arguments.values() { // The argument must not have a name which begins with the characters "__" // (two underscores). if arg.name.starts_with("__") { return Err(format!("Argument \"{}.{}.{}\" must not have a name which begins with the characters \"__\" (two underscores)", interface.name, field.name, arg.name).into()); } // The argument must accept a type where // IsInputType(argumentType) returns true. if let Some(ty) = self.types.get(arg.ty.type_name()) { if !ty.is_input_type() { return Err(format!( "Argument \"{}.{}.{}\" must accept a input type", interface.name, field.name, arg.name ) .into()); } } } // An interface type may declare that it implements one or more unique // interfaces, but may not implement itself. if interface.implements.contains(&interface.name) { return Err(format!( "Interface \"{}\" may not implement itself", interface.name ) .into()); } // An interface type must be a super-set of all interfaces // it implements for interface_name in &interface.implements { if let Some(ty) = self.types.get(interface_name) { let implemenented_type = ty.as_interface().ok_or_else(|| { format!("Type \"{}\" is not interface", interface_name) })?; check_is_valid_implementation(interface, implemenented_type)?; } } } } } Ok(()) } fn check_unions(&self) -> Result<(), SchemaError> { // https://spec.graphql.org/October2021/#sec-Unions.Type-Validation for ty in self.types.values() { if let Type::Union(union) = ty { // The member types of a Union type must all be Object base // types; Scalar, Interface and Union types must not be member // types of a Union. Similarly, wrapping types must not be // member types of a Union. for type_name in &union.possible_types { if let Some(ty) = self.types.get(type_name) { if ty.as_object().is_none() { return Err(format!( "Member \"{}\" of union \"{}\" is not an object", type_name, union.name ) .into()); } } } } } Ok(()) } } fn check_is_valid_implementation( implementing_type: &impl BaseContainer, implemented_type: &Interface, ) -> Result<(), SchemaError> { for field in implemented_type.fields.values() { let impl_field = implementing_type.field(&field.name).ok_or_else(|| { format!( "{} \"{}\" requires field \"{}\" defined by interface \"{}\"", implementing_type.graphql_type(), implementing_type.name(), field.name, implemented_type.name ) })?; for arg in field.arguments.values() { let impl_arg = match impl_field.argument(&arg.name) { Some(impl_arg) => impl_arg, None if !arg.ty.is_nullable() => { return Err(format!( "Field \"{}.{}\" requires argument \"{}\" defined by interface \"{}.{}\"", implementing_type.name(), field.name, arg.name, implemented_type.name, field.name, ) .into()); } None => continue, }; if !arg.ty.is_subtype(&impl_arg.ty) { return Err(format!( "Argument \"{}.{}.{}\" is not sub-type of \"{}.{}.{}\"", implemented_type.name, field.name, arg.name, implementing_type.name(), field.name, arg.name ) .into()); } } // field must return a type which is equal to or a sub-type of (covariant) the // return type of implementedField field’s return type if !impl_field.ty().is_subtype(&field.ty) { return Err(format!( "Field \"{}.{}\" is not sub-type of \"{}.{}\"", implementing_type.name(), field.name, implemented_type.name, field.name, ) .into()); } } Ok(()) }
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // This crate attempts to enumerate the various scenarios for how a // type can define fields and methods with various visiblities and // stabilities. // // The basic stability pattern in this file has four cases: // 1. no stability attribute at all // 2. a stable attribute (feature "unit_test") // 3. an unstable attribute that unit test declares (feature "unstable_declared") // 4. an unstable attribute that unit test fails to declare (feature "unstable_undeclared") // // This file also covers four kinds of visibility: private, // pub(module), pub(crate), and pub. // // However, since stability attributes can only be observed in // cross-crate linkage scenarios, there is little reason to take the // cross-product (4 stability cases * 4 visiblity cases), because the // first three visibility cases cannot be accessed outside this crate, // and therefore stability is only relevant when the visibility is pub // to the whole universe. // // (The only reason to do so would be if one were worried about the // compiler having some subtle bug where adding a stability attribute // introduces a privacy violation. As a way to provide evidence that // this is not occurring, I have put stability attributes on some // non-pub fields, marked with SILLY below) #![feature(staged_api)] #![stable(feature = "unit_test", since = "1.0.0")] #[stable(feature = "unit_test", since = "1.0.0")] pub use m::{Record, Trait, Tuple}; mod m { #[derive(Default)] #[stable(feature = "unit_test", since = "1.0.0")] pub struct Record { #[stable(feature = "unit_test", since = "1.0.0")] pub a_stable_pub: i32, #[unstable(feature = "unstable_declared", issue = "38412")] pub a_unstable_declared_pub: i32, #[unstable(feature = "unstable_undeclared", issue = "38412")] pub a_unstable_undeclared_pub: i32, #[unstable(feature = "unstable_undeclared", issue = "38412")] // SILLY pub(crate) b_crate: i32, #[unstable(feature = "unstable_declared", issue = "38412")] // SILLY pub(in m) c_mod: i32, #[stable(feature = "unit_test", since = "1.0.0")] // SILLY d_priv: i32 } #[derive(Default)] #[stable(feature = "unit_test", since = "1.0.0")] pub struct Tuple( #[stable(feature = "unit_test", since = "1.0.0")] pub i32, #[unstable(feature = "unstable_declared", issue = "38412")] pub i32, #[unstable(feature = "unstable_undeclared", issue = "38412")] pub i32, pub(crate) i32, pub(in m) i32, i32); impl Record { #[stable(feature = "unit_test", since = "1.0.0")] pub fn new() -> Self { Default::default() } } impl Tuple { #[stable(feature = "unit_test", since = "1.0.0")] pub fn new() -> Self { Default::default() } } #[stable(feature = "unit_test", since = "1.0.0")] pub trait Trait { #[stable(feature = "unit_test", since = "1.0.0")] type Type; #[stable(feature = "unit_test", since = "1.0.0")] fn stable_trait_method(&self) -> Self::Type; #[unstable(feature = "unstable_undeclared", issue = "38412")] fn unstable_undeclared_trait_method(&self) -> Self::Type; #[unstable(feature = "unstable_declared", issue = "38412")] fn unstable_declared_trait_method(&self) -> Self::Type; } #[stable(feature = "unit_test", since = "1.0.0")] impl Trait for Record { type Type = i32; fn stable_trait_method(&self) -> i32 { self.d_priv } fn unstable_undeclared_trait_method(&self) -> i32 { self.d_priv } fn unstable_declared_trait_method(&self) -> i32 { self.d_priv } } #[stable(feature = "unit_test", since = "1.0.0")] impl Trait for Tuple { type Type = i32; fn stable_trait_method(&self) -> i32 { self.3 } fn unstable_undeclared_trait_method(&self) -> i32 { self.3 } fn unstable_declared_trait_method(&self) -> i32 { self.3 } } impl Record { #[unstable(feature = "unstable_undeclared", issue = "38412")] pub fn unstable_undeclared(&self) -> i32 { self.d_priv } #[unstable(feature = "unstable_declared", issue = "38412")] pub fn unstable_declared(&self) -> i32 { self.d_priv } #[stable(feature = "unit_test", since = "1.0.0")] pub fn stable(&self) -> i32 { self.d_priv } #[unstable(feature = "unstable_undeclared", issue = "38412")] // SILLY pub(crate) fn pub_crate(&self) -> i32 { self.d_priv } #[unstable(feature = "unstable_declared", issue = "38412")] // SILLY pub(in m) fn pub_mod(&self) -> i32 { self.d_priv } #[stable(feature = "unit_test", since = "1.0.0")] // SILLY fn private(&self) -> i32 { self.d_priv } } impl Tuple { #[unstable(feature = "unstable_undeclared", issue = "38412")] pub fn unstable_undeclared(&self) -> i32 { self.0 } #[unstable(feature = "unstable_declared", issue = "38412")] pub fn unstable_declared(&self) -> i32 { self.0 } #[stable(feature = "unit_test", since = "1.0.0")] pub fn stable(&self) -> i32 { self.0 } pub(crate) fn pub_crate(&self) -> i32 { self.0 } pub(in m) fn pub_mod(&self) -> i32 { self.0 } fn private(&self) -> i32 { self.0 } } }
// Massbit dependencies use serde::{Deserialize}; #[allow(dead_code)] pub struct IndexManager { http_addr: String, } #[derive(Clone, Debug, Deserialize)] pub struct DeployParams { pub(crate) index_name: String, pub(crate) config_path: String, pub(crate) mapping_path: String, pub(crate) model_path: String, pub(crate) deploy_type: DeployType, } #[derive(Clone, Debug, Deserialize)] pub enum DeployType { Local, Ipfs, }
#[doc = "Reader of register OR1"] pub type R = crate::R<u32, super::OR1>; #[doc = "Writer for register OR1"] pub type W = crate::W<u32, super::OR1>; #[doc = "Register OR1 `reset()`'s with value 0"] impl crate::ResetValue for super::OR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ITR1_RMP`"] pub type ITR1_RMP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ITR1_RMP`"] pub struct ITR1_RMP_W<'a> { w: &'a mut W, } impl<'a> ITR1_RMP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `TI4_RMP`"] pub type TI4_RMP_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TI4_RMP`"] pub struct TI4_RMP_W<'a> { w: &'a mut W, } impl<'a> TI4_RMP_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 2)) | (((value as u32) & 0x03) << 2); self.w } } #[doc = "Reader of field `ETR1_RMP`"] pub type ETR1_RMP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ETR1_RMP`"] pub struct ETR1_RMP_W<'a> { w: &'a mut W, } impl<'a> ETR1_RMP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } impl R { #[doc = "Bit 0 - Internal trigger 1 remap"] #[inline(always)] pub fn itr1_rmp(&self) -> ITR1_RMP_R { ITR1_RMP_R::new((self.bits & 0x01) != 0) } #[doc = "Bits 2:3 - Input Capture 4 remap"] #[inline(always)] pub fn ti4_rmp(&self) -> TI4_RMP_R { TI4_RMP_R::new(((self.bits >> 2) & 0x03) as u8) } #[doc = "Bit 1 - External trigger remap"] #[inline(always)] pub fn etr1_rmp(&self) -> ETR1_RMP_R { ETR1_RMP_R::new(((self.bits >> 1) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Internal trigger 1 remap"] #[inline(always)] pub fn itr1_rmp(&mut self) -> ITR1_RMP_W { ITR1_RMP_W { w: self } } #[doc = "Bits 2:3 - Input Capture 4 remap"] #[inline(always)] pub fn ti4_rmp(&mut self) -> TI4_RMP_W { TI4_RMP_W { w: self } } #[doc = "Bit 1 - External trigger remap"] #[inline(always)] pub fn etr1_rmp(&mut self) -> ETR1_RMP_W { ETR1_RMP_W { w: self } } }
use crate::actor::Actor; use crate::Battle; use lazy_static::lazy_static; use std::collections::HashMap; macro_rules! new_card { ($name:expr, $parent:expr, $($atr:ident = $set:expr),*) => { CardTemplate { name : $name, $($atr : $set,)* ..(*$parent).clone() // This syntax moves out of the parent struct } } } #[macro_export] macro_rules! pair { ($eff:ident, $target:ident, $mag:expr) => { crate::card::EffectPair::new(Effect::$eff($mag), Target::$target) // Shorthand for effect, target, magnitude construction } } lazy_static! { pub static ref CARDS: Vec<CardTemplate> = { let mut v = Vec::new(); v.push(CardTemplate::new( "Defend", CardType::Skill, vec![pair![Block, Player, 5]], 1, false, false, )); v.push(CardTemplate::new( "Neutralize", CardType::Attack, vec![pair![Attack, Single, 3], pair![Weak, Single, 1]], 0, false, false, )); v.push(CardTemplate::new( "Strike", CardType::Attack, vec![pair![Attack, Single, 6]], 1, false, false, )); v.push(new_card![ "Survivor", &v[0], effects = vec![pair![Block, Player, 8], pair![Discard, Player, 1]] ]); v.push(new_card![ "Acrobatics", &v[0], effects = vec![pair![Draw, Player, 3], pair![Discard, Player, 1]] ]); v }; pub static ref NAMES: HashMap<usize, &'static str> = { let mut h = HashMap::new(); for id in 0..CARDS.len() { h.insert(id, CARDS[id].name); } h }; pub static ref IDS: HashMap<&'static str, usize> = { let mut h = HashMap::new(); for id in 0..CARDS.len() { h.insert(CARDS[id].name, id); } h }; } pub trait Playable: std::fmt::Debug { fn play(&self, env: &mut Battle, target_id: Option<usize>); fn get_type(&self) -> CardType; fn get_name(&self) -> &'static str; } #[derive(Debug, Clone)] pub struct EffectPair { pub effect: Effect, pub target: Target, } impl EffectPair { pub fn new(effect: Effect, target: Target) -> EffectPair { EffectPair { effect, target } } pub fn new_single(eff: Effect) -> EffectPair { Self::new(eff, Target::Single) } pub fn new_multi(eff: Effect) -> EffectPair { Self::new(eff, Target::Multi) } pub fn new_player(eff: Effect) -> EffectPair { Self::new(eff, Target::Player) } } #[derive(Debug, Clone)] pub enum Target { Player, Single, Multi, } #[derive(Debug)] pub enum Debuff { Weak(i32), } #[derive(Clone, Debug)] pub enum Effect { Block(i32), Attack(i32), Weak(i32), Draw(usize), Discard(usize), Strength(i32), } #[derive(Clone, Copy, Debug)] pub enum CardType { Attack, Skill, Power, } #[derive(Clone, Debug)] pub struct CardTemplate { pub name: &'static str, ty: CardType, pub effects: Vec<EffectPair>, base_cost: u32, ethereal: bool, exhaust: bool, } impl CardTemplate { pub fn new( name: &'static str, ty: CardType, effects: Vec<EffectPair>, base_cost: u32, ethereal: bool, exhaust: bool, ) -> CardTemplate { CardTemplate { name, ty, effects, base_cost, ethereal, exhaust, } } }
use std::io::Write; use std::time::Duration; use serialport::{DataBits, StopBits}; fn main(){ let portname = "COM3"; let baudrate = 115200; let databits = DataBits::Eight; let stopbits = StopBits::One; let rate = 1; let string = "rust"; let build = serialport::new(portname,baudrate) .data_bits(databits) .stop_bits(stopbits); println!("{:?}", build); let mut port = build.open().unwrap_or_else(|_e|{ eprintln!("Failed to Open Port"); ::std::process::exit(1); }); println!("Writing Data"); loop{ match port.write(string.as_bytes()){ Ok(_) => { println!("{}", string); std::io::stdout().flush().unwrap(); } Err(ref e) if e.kind() == std::io::ErrorKind::TimedOut => (), Err(e)=> println!("{:?}", e) } if rate==0{ return; } std::thread::sleep(Duration::from_millis((1000.0/(rate as f32)) as u64)); } }
/// Gnome sort pub fn sort<T: Ord + Clone>(list: &Vec<T>) -> Vec<T> { let mut sorted = list.clone(); let mut i = 1; while i < list.len() { if i == 0 || sorted[i - 1] <= sorted[i] { i += 1; } else { sorted.swap(i - 1, i); i -= 1; } } return sorted; }
use crate::thread_worker::Worker; use crate::types::*; use crossbeam_channel::{Receiver, Sender, TryRecvError}; use jsonrpc_core::{self, Call, Output}; use std::collections::HashMap; use std::io::{self, BufRead, BufReader, BufWriter, Error, ErrorKind, Read, Write}; use std::process::{Command, Stdio}; pub struct LanguageServerTransport { // The field order is important as it defines the order of drop. // We want to exit a writer loop first (after sending exit notification), // then close all pipes and wait until child process is finished. // That helps to ensure that reader loop is not stuck trying to read from the language server. pub to_lang_server: Worker<ServerMessage, Void>, pub from_lang_server: Worker<Void, ServerMessage>, pub errors: Worker<Void, Void>, } pub fn start(cmd: &str, args: &[String]) -> Result<LanguageServerTransport, String> { info!("Starting Language server `{} {}`", cmd, args.join(" ")); let mut child = match Command::new(cmd) .args(args) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() { Ok(c) => c, Err(err) => { return Err(match err.kind() { ErrorKind::NotFound | ErrorKind::PermissionDenied => format!("{}: {}", err, cmd), _ => format!("{}", err), }) } }; let writer = BufWriter::new(child.stdin.take().expect("Failed to open stdin")); let reader = BufReader::new(child.stdout.take().expect("Failed to open stdout")); // NOTE 1024 is arbitrary let channel_capacity = 1024; // XXX temporary way of tracing language server errors let mut stderr = BufReader::new(child.stderr.take().expect("Failed to open stderr")); let errors = Worker::spawn( "Language server errors", channel_capacity, move |receiver, _| loop { if let Err(TryRecvError::Disconnected) = receiver.try_recv() { return; } let mut buf = String::new(); match stderr.read_to_string(&mut buf) { Ok(_) => { if buf.is_empty() { return; } error!("Language server error: {}", buf); } Err(e) => { error!("Failed to read from language server stderr: {}", e); return; } } }, ); // XXX let from_lang_server = Worker::spawn( "Messages from language server", channel_capacity, move |receiver, sender| { if let Err(msg) = reader_loop(reader, receiver, &sender) { error!("{}", msg); } }, ); let to_lang_server = Worker::spawn( "Messages to language server", channel_capacity, move |receiver, _| { if writer_loop(writer, &receiver).is_err() { error!("Failed to write message to language server"); } // NOTE prevent zombie debug!("Waiting for language server process end"); drop(child.stdin.take()); drop(child.stdout.take()); drop(child.stderr.take()); std::thread::sleep(std::time::Duration::from_secs(1)); match child.try_wait() { Ok(None) => { std::thread::sleep(std::time::Duration::from_secs(1)); if let Ok(None) = child.try_wait() { // Okay, we asked politely enough and waited long enough. child.kill().unwrap(); } } Err(_) => { error!("Language server wasn't running was it?!"); } _ => {} } }, ); Ok(LanguageServerTransport { from_lang_server, to_lang_server, errors, }) } fn reader_loop( mut reader: impl BufRead, receiver: Receiver<Void>, sender: &Sender<ServerMessage>, ) -> io::Result<()> { let mut headers: HashMap<String, String> = HashMap::default(); loop { if let Err(TryRecvError::Disconnected) = receiver.try_recv() { return Ok(()); } headers.clear(); loop { let mut header = String::new(); if reader.read_line(&mut header)? == 0 { debug!("Language server closed pipe, stopping reading"); return Ok(()); } let header = header.trim(); if header.is_empty() { break; } let parts: Vec<&str> = header.split(": ").collect(); if parts.len() != 2 { return Err(Error::new(ErrorKind::Other, "Failed to parse header")); } headers.insert(parts[0].to_string(), parts[1].to_string()); } let content_len = headers .get("Content-Length") .ok_or_else(|| Error::new(ErrorKind::Other, "Failed to get Content-Length header"))? .parse() .map_err(|_| Error::new(ErrorKind::Other, "Failed to parse Content-Length header"))?; let mut content = vec![0; content_len]; reader.read_exact(&mut content)?; let msg = String::from_utf8(content) .map_err(|_| Error::new(ErrorKind::Other, "Failed to read content as UTF-8 string"))?; debug!("From server: {}", msg); let output: serde_json::Result<Output> = serde_json::from_str(&msg); match output { Ok(output) => { if sender.send(ServerMessage::Response(output)).is_err() { return Err(Error::new(ErrorKind::Other, "Failed to send response")); } } Err(_) => { let msg: Call = serde_json::from_str(&msg).map_err(|_| { Error::new(ErrorKind::Other, "Failed to parse language server message") })?; if sender.send(ServerMessage::Request(msg)).is_err() { return Err(Error::new(ErrorKind::Other, "Failed to send response")); } } } } } fn writer_loop(mut writer: impl Write, receiver: &Receiver<ServerMessage>) -> io::Result<()> { for request in receiver { let request = match request { ServerMessage::Request(request) => serde_json::to_string(&request), ServerMessage::Response(response) => serde_json::to_string(&response), }?; debug!("To server: {}", request); write!( writer, "Content-Length: {}\r\n\r\n{}", request.len(), request )?; writer.flush()?; } // NOTE we rely on the assumption that language server will exit when its stdin is closed // without need to kill child process debug!("Received signal to stop language server, closing pipe"); Ok(()) }
//! Command `tag` use crate::{registry::Registry, result::Result}; /// Exec comamnd `tag` pub fn exec(limit: usize, update: bool) -> Result<()> { let registry = Registry::new()?; if update { println!("Fetching registry..."); registry.update()?; } // Get tags let mut tags = registry.tag()?; let last = if limit < tags.len() || limit < 1 { limit } else { tags.len() }; tags.reverse(); println!("{}", &tags[..last].join("\n")); Ok(()) }
extern crate hound; #[macro_use] extern crate nom; extern crate portaudio as pa; extern crate sample; pub mod expr; pub mod eval; pub mod numeral; pub mod ops; pub mod parser; pub mod player; pub mod signal; pub mod wav;
use serde_json::Result; use std::fmt; //fmt METHOD use serde::{Deserialize, Serialize}; use crate::api::message::meta::*; use crate::api::message::amount::{Amount, string_or_struct}; use std::error::Error; #[derive(Serialize, Deserialize, Debug)] pub struct RequestTxCommand { #[serde(rename="id")] id: u64, #[serde(rename="command")] command: String, #[serde(rename="transaction")] hash: String, } impl RequestTxCommand { pub fn with_params(hash: String) -> Box<Self> { Box::new( RequestTxCommand { id: 1, command: "tx".to_string(), hash: hash, } ) } pub fn to_string(&self) -> Result<String> { let j = serde_json::to_string(&self)?; Ok(j) } } #[derive(Serialize, Deserialize, Debug)] pub struct RequestTxResponse { #[serde(rename="Account")] pub account: String, #[serde(rename="Fee")] pub fee: String, #[serde(rename="Flags")] pub flags: i32, #[serde(rename="Sequence")] pub sequence: u64, #[serde(rename="SigningPubKey")] pub signing_pub_key: String, #[serde(rename="Timestamp")] pub timestamp: u64, #[serde(rename="TransactionType")] pub transaction_type: String, #[serde(rename="TxnSignature")] pub txn_signature: String, #[serde(rename="date")] pub date: u64, #[serde(rename="hash")] pub hash: String, #[serde(rename="inLedger")] pub in_ledger: u64, #[serde(rename="ledger_index")] pub ledger_index: u64, #[serde(rename="meta")] pub meta: Option<Meta>, #[serde(rename="validated")] pub validated: bool, #[serde(rename="TakerGets")] #[serde(deserialize_with = "string_or_struct")] pub taker_gets: Amount, #[serde(rename="TakerPays")] #[serde(deserialize_with = "string_or_struct")] pub taker_pays: Amount, #[serde(rename="Memos")] pub memos: Option<Vec<String>>, } #[derive(Debug, Serialize, Deserialize)] pub struct SpecTxSideKick { pub error : String, pub error_code : i32, pub error_message : String, pub id : u32, pub request : RequestTxCommand, pub status : String, #[serde(rename="type")] pub rtype : String, } impl fmt::Display for SpecTxSideKick { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "SpecTxSideKick is here!") } } impl Error for SpecTxSideKick { fn description(&self) -> &str { "I'm SpecTxSideKick side kick" } }
#![feature(iterator_fold_self)] #[macro_use] extern crate lazy_static; extern crate num; pub mod vigenere;
//! A library for reading and writing TAR archives //! //! This library provides utilities necessary to manage [TAR archives][1] //! abstracted over a reader or writer. Great strides are taken to ensure that //! an archive is never required to be fully resident in memory, and all objects //! provide largely a streaming interface to read bytes from. //! //! [1]: http://en.wikipedia.org/wiki/Tar_%28computing%29 // More docs about the detailed tar format can also be found here: // http://www.freebsd.org/cgi/man.cgi?query=tar&sektion=5&manpath=FreeBSD+8-current // NB: some of the coding patterns and idioms here may seem a little strange. // This is currently attempting to expose a super generic interface while // also not forcing clients to codegen the entire crate each time they use // it. To that end lots of work is done to ensure that concrete // implementations are all found in this crate and the generic functions are // all just super thin wrappers (e.g. easy to codegen). #![doc(html_root_url = "https://docs.rs/tar/0.4")] #![deny(missing_docs)] #![cfg_attr(test, deny(warnings))] use std::io::{Error, ErrorKind}; pub use crate::archive::{Archive, Entries}; pub use crate::builder::Builder; pub use crate::entry::{Entry, Unpacked}; pub use crate::entry_type::EntryType; pub use crate::header::GnuExtSparseHeader; pub use crate::header::{GnuHeader, GnuSparseHeader, Header, HeaderMode, OldHeader, UstarHeader}; pub use crate::pax::{PaxExtension, PaxExtensions}; mod archive; mod builder; mod entry; mod entry_type; mod error; mod header; mod pax; fn other(msg: &str) -> Error { Error::new(ErrorKind::Other, msg) }
fn main(){ proconio::input!{n:u32,m:usize,ss:[(usize, char);m]}; let ans = (0..10i32.pow(n)) .filter(|x| { let x = x.to_string().as_bytes().to_vec(); x.len() == n as usize && ss .iter() .all(|(s,c)| matches!(x.get(*s-1), Some(x) if *x as char == *c)) }) .next() .unwrap_or(-1); println!("{}",ans); }
extern crate rand; #[cfg(feature="blas")] extern crate blas_sys; extern crate time; extern crate libc; extern crate rustc_serialize; extern crate bincode; #[macro_use] extern crate log; mod model; pub use model::Model; pub mod dictionary; pub use dictionary::Dict; pub mod matrix; pub use matrix::Matrix; mod utils; pub use utils::{Argument, parse_arguments, Command}; pub mod train; pub use train::train; const SIGMOID_TABLE_SIZE: usize = 512; const MAX_SIGMOID: f32 = 8f32; const NEGATIVE_TABLE_SIZE: usize = 10000000; const LOG_TABLE_SIZE: usize = 512; mod w2v; pub use w2v::Word2vec; pub use utils::W2vError; #[macro_use] extern crate clap;
use std::string::String; use std::vec::Vec; use unicode_normalization::UnicodeNormalization; const ALPHABET_LENGTH: u8 = 26; pub fn encrypt(message: &String, key: u8) -> String { let mut result: Vec<char> = Vec::new(); for grapheme in message.nfd() { if grapheme.is_alphabetic() { let origin = if grapheme.is_uppercase() { b'A'} else { b'a'}; result.push((origin + (grapheme as u8 - origin + key) % ALPHABET_LENGTH) as char); } else { result.push(grapheme); } } result.iter().map(|c| *c).collect::<String>() } pub fn decrypt(message: &String, key: u8) -> String{ encrypt(message, ALPHABET_LENGTH - key) }
use super::*; #[test] fn with_number_or_atom_second_returns_second() { run!( |arc_process| { ( strategy::term::local_reference(arc_process.clone()), strategy::term::number_or_atom(arc_process.clone()), ) }, |(first, second)| { prop_assert_eq!(result(first, second), second); Ok(()) }, ); } #[test] fn with_lesser_local_reference_second_returns_second() { min(|_, process| process.reference(0), Second); } #[test] fn with_same_local_reference_second_returns_first() { min(|first, _| first, First); } #[test] fn with_same_value_local_reference_second_returns_first() { min(|_, process| process.reference(1), First); } #[test] fn with_greater_local_reference_second_returns_first() { min(|_, process| process.reference(2), First); } #[test] fn with_function_port_pid_tuple_map_list_or_bitstring_second_returns_first() { run!( |arc_process| { ( strategy::term::local_reference(arc_process.clone()), strategy::term::function_port_pid_tuple_map_list_or_bitstring(arc_process.clone()), ) }, |(first, second)| { prop_assert_eq!(result(first, second), first); Ok(()) }, ); } fn min<R>(second: R, which: FirstSecond) where R: FnOnce(Term, &Process) -> Term, { super::min(|process| process.reference(1), second, which); }
use std::collections::LinkedList; #[derive(Debug, Clone)] pub struct FlattenProjectDependency { pub name : String, pub version : String, pub repositoryUrl : String } impl FlattenProjectDependency { pub fn new(name : String, version : String, repositoryUrl : String) -> FlattenProjectDependency { FlattenProjectDependency { name: name, version: version, repositoryUrl: repositoryUrl } } } #[derive(Debug, Clone)] pub struct FlattenProjectDependencyContainer { pub dependencies : LinkedList<FlattenProjectDependency> } impl FlattenProjectDependencyContainer { pub fn new() -> FlattenProjectDependencyContainer { FlattenProjectDependencyContainer { dependencies: LinkedList::new() } } pub fn addFlattenDependency(&mut self, dependency : FlattenProjectDependency) -> () { self.dependencies.push_back(dependency); } }
use criterion::{criterion_group, criterion_main, Criterion}; use weather_util_rust::{weather_data::WeatherData, weather_forecast::WeatherForecast}; pub fn criterion_benchmark(c: &mut Criterion) { let buf = include_str!("../tests/weather.json"); let data: WeatherData = serde_json::from_str(buf).unwrap(); c.bench_function("weather_data", |b| b.iter(|| data.get_current_conditions())); let buf = include_str!("../tests/forecast.json"); let data: WeatherForecast = serde_json::from_str(buf).unwrap(); c.bench_function("weather_forecast", |b| b.iter(|| data.get_forecast())); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches);
use crate::commands::WholeStreamCommand; use crate::context::CommandRegistry; use crate::prelude::*; use nu_errors::ShellError; use nu_protocol::{ColumnPath, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value}; use nu_value_ext::ValueExt; pub struct Edit; #[derive(Deserialize)] pub struct EditArgs { field: ColumnPath, replacement: Value, } impl WholeStreamCommand for Edit { fn name(&self) -> &str { "edit" } fn signature(&self) -> Signature { Signature::build("edit") .required( "field", SyntaxShape::ColumnPath, "the name of the column to edit", ) .required( "replacement value", SyntaxShape::Any, "the new value to give the cell(s)", ) } fn usage(&self) -> &str { "Edit an existing column to have a new value." } fn run( &self, args: CommandArgs, registry: &CommandRegistry, ) -> Result<OutputStream, ShellError> { args.process(registry, edit)?.run() } } fn edit( EditArgs { field, replacement }: EditArgs, RunnableContext { input, .. }: RunnableContext, ) -> Result<OutputStream, ShellError> { let mut input = input; let stream = async_stream! { match input.next().await { Some(obj @ Value { value: UntaggedValue::Row(_), .. }) => match obj.replace_data_at_column_path(&field, replacement.clone()) { Some(v) => yield Ok(ReturnSuccess::Value(v)), None => { yield Err(ShellError::labeled_error( "edit could not find place to insert column", "column name", obj.tag, )) } }, Some(Value { tag, ..}) => { yield Err(ShellError::labeled_error( "Unrecognized type in stream", "original value", tag, )) } _ => {} } }; Ok(stream.to_output_stream()) }
use winapi::um::winuser::*; use event::{KeyId, RawEvent, State}; #[derive(Clone, Debug, PartialEq, Eq)] enum KeyPos { Left, Right, } pub fn process_keyboard_data(raw_data: &RAWKEYBOARD, id: usize) -> Vec<RawEvent> { let mut output: Vec<RawEvent> = Vec::new(); let flags = raw_data.Flags as u32; let key = raw_data.VKey as i32; let mut key_opt: Option<KeyId> = None; let key_state: State; let key_pos: KeyPos; if flags & RI_KEY_BREAK != 0 { key_state = State::Released; } else { key_state = State::Pressed; } if flags & RI_KEY_E0 == 0 { key_pos = KeyPos::Left; } else { key_pos = KeyPos::Right; } if key == VK_TAB { key_opt = Some(KeyId::Tab); } if key == VK_SHIFT { key_opt = Some(KeyId::Shift); } if key == VK_CONTROL && key_pos == KeyPos::Left { key_opt = Some(KeyId::LeftCtrl); } if key == VK_CONTROL && key_pos == KeyPos::Right { key_opt = Some(KeyId::RightCtrl); } if key == VK_MENU && key_pos == KeyPos::Left { key_opt = Some(KeyId::LeftAlt); } if key == VK_MENU && key_pos == KeyPos::Right { key_opt = Some(KeyId::RightAlt); } if key == VK_ESCAPE { key_opt = Some(KeyId::Escape); } if key == VK_RETURN { key_opt = Some(KeyId::Return); } if key == VK_BACK { key_opt = Some(KeyId::Backspace); } if key == VK_LEFT { key_opt = Some(KeyId::Left); } if key == VK_RIGHT { key_opt = Some(KeyId::Right); } if key == VK_UP { key_opt = Some(KeyId::Up); } if key == VK_DOWN { key_opt = Some(KeyId::Down); } if key == VK_SPACE { key_opt = Some(KeyId::Space); } if key == VK_LCONTROL { key_opt = Some(KeyId::LeftCtrl); } if key == VK_RCONTROL { key_opt = Some(KeyId::RightCtrl); } if key == VK_LMENU { key_opt = Some(KeyId::LeftAlt); } if key == VK_RMENU { key_opt = Some(KeyId::RightAlt); } if key == 0x30 { key_opt = Some(KeyId::Zero); } if key == 0x31 { key_opt = Some(KeyId::One); } if key == 0x32 { key_opt = Some(KeyId::Two); } if key == 0x33 { key_opt = Some(KeyId::Three); } if key == 0x34 { key_opt = Some(KeyId::Four); } if key == 0x35 { key_opt = Some(KeyId::Five); } if key == 0x36 { key_opt = Some(KeyId::Six); } if key == 0x37 { key_opt = Some(KeyId::Seven); } if key == 0x38 { key_opt = Some(KeyId::Eight); } if key == 0x39 { key_opt = Some(KeyId::Nine); } if key == 0x41 { key_opt = Some(KeyId::A); } if key == 0x42 { key_opt = Some(KeyId::B); } if key == 0x43 { key_opt = Some(KeyId::C); } if key == 0x44 { key_opt = Some(KeyId::D); } if key == 0x45 { key_opt = Some(KeyId::E); } if key == 0x46 { key_opt = Some(KeyId::F); } if key == 0x47 { key_opt = Some(KeyId::G); } if key == 0x48 { key_opt = Some(KeyId::H); } if key == 0x49 { key_opt = Some(KeyId::I); } if key == 0x4A { key_opt = Some(KeyId::J); } if key == 0x4B { key_opt = Some(KeyId::K); } if key == 0x4C { key_opt = Some(KeyId::L); } if key == 0x4D { key_opt = Some(KeyId::M); } if key == 0x4E { key_opt = Some(KeyId::N); } if key == 0x4F { key_opt = Some(KeyId::O); } if key == 0x50 { key_opt = Some(KeyId::P); } if key == 0x51 { key_opt = Some(KeyId::Q); } if key == 0x52 { key_opt = Some(KeyId::R); } if key == 0x53 { key_opt = Some(KeyId::S); } if key == 0x54 { key_opt = Some(KeyId::T); } if key == 0x55 { key_opt = Some(KeyId::U); } if key == 0x56 { key_opt = Some(KeyId::V); } if key == 0x57 { key_opt = Some(KeyId::W); } if key == 0x58 { key_opt = Some(KeyId::X); } if key == 0x59 { key_opt = Some(KeyId::Y); } if key == 0x5A { key_opt = Some(KeyId::Z); } if key == VK_CAPITAL { key_opt = Some(KeyId::CapsLock); } if key == VK_PAUSE { key_opt = Some(KeyId::Pause); } if key == VK_NEXT { key_opt = Some(KeyId::PageUp); } if key == VK_PRIOR { key_opt = Some(KeyId::PageDown); } if key == VK_SNAPSHOT { key_opt = Some(KeyId::PrintScreen); } if key == VK_INSERT { key_opt = Some(KeyId::Insert); } if key == VK_END { key_opt = Some(KeyId::End); } if key == VK_HOME { key_opt = Some(KeyId::Home); } if key == VK_DELETE { key_opt = Some(KeyId::Delete); } if key == VK_ADD { key_opt = Some(KeyId::Add); } if key == VK_SUBTRACT { key_opt = Some(KeyId::Subtract); } if key == VK_MULTIPLY { key_opt = Some(KeyId::Multiply); } if key == VK_DIVIDE { key_opt = Some(KeyId::Divide); } if key == VK_SEPARATOR { key_opt = Some(KeyId::Separator); } if key == VK_DECIMAL { key_opt = Some(KeyId::Decimal); } if key == VK_F1 { key_opt = Some(KeyId::F1); } if key == VK_F2 { key_opt = Some(KeyId::F2); } if key == VK_F3 { key_opt = Some(KeyId::F3); } if key == VK_F4 { key_opt = Some(KeyId::F4); } if key == VK_F5 { key_opt = Some(KeyId::F5); } if key == VK_F6 { key_opt = Some(KeyId::F6); } if key == VK_F7 { key_opt = Some(KeyId::F7); } if key == VK_F8 { key_opt = Some(KeyId::F8); } if key == VK_F9 { key_opt = Some(KeyId::F9); } if key == VK_F10 { key_opt = Some(KeyId::F10); } if key == VK_F11 { key_opt = Some(KeyId::F11); } if key == VK_F12 { key_opt = Some(KeyId::F12); } if key == VK_NUMLOCK { key_opt = Some(KeyId::Numlock); } if key == VK_OEM_PLUS { key_opt = Some(KeyId::Plus); } if key == VK_OEM_MINUS { key_opt = Some(KeyId::Minus); } if key == VK_OEM_COMMA { key_opt = Some(KeyId::Comma); } if key == VK_OEM_PERIOD { key_opt = Some(KeyId::FullStop); } if key == VK_OEM_1 { key_opt = Some(KeyId::SemiColon); } if key == VK_OEM_2 { key_opt = Some(KeyId::ForwardSlash); } if key == VK_OEM_3 { key_opt = Some(KeyId::Apostrophe); } if key == VK_OEM_4 { key_opt = Some(KeyId::LeftSquareBracket); } if key == VK_OEM_5 { key_opt = Some(KeyId::BackSlash); } if key == VK_OEM_6 { key_opt = Some(KeyId::RightSquareBracket); } if key == VK_OEM_7 { key_opt = Some(KeyId::Hash); } if key == VK_OEM_8 { key_opt = Some(KeyId::BackTick); } if let Some(key_id) = key_opt { output.push(RawEvent::KeyboardEvent(id, key_id, key_state)); } output }
#![no_std] #![no_main] extern crate panic_halt; use atmega168_hal::prelude::*; #[atmega168_hal::entry] fn main() -> ! { let mut current_level = 0; let mut target_level = 0; let dp = atmega168_hal::pac::Peripherals::take().unwrap(); let mut port_b = dp.PORTB.split(); let mut port_c = dp.PORTC.split(); let mut port_d = dp.PORTD.split(); let mut adc = atmega168_hal::adc::Adc::new(dp.ADC, Default::default()); // LED let mut led = port_b.pb1.into_output(&mut port_b.ddr); // Reflective Opto Coupler let mut optocoupler_0 = port_c.pc0.into_analog_input(&mut adc); let mut optocoupler_1 = port_c.pc1.into_analog_input(&mut adc); // Push Button Switch let button_0 = port_d.pd0.into_pull_up_input(&mut port_d.ddr); let button_1 = port_d.pd1.into_pull_up_input(&mut port_d.ddr); // Stepper Motor let mut step: i8 = 23; let mut c1 = port_b.pb2.into_output(&mut port_b.ddr); let mut c2 = port_c.pc2.into_output(&mut port_c.ddr); let mut c3 = port_c.pc3.into_output(&mut port_c.ddr); let mut c4 = port_c.pc4.into_output(&mut port_c.ddr); loop { led.set_low().void_unwrap(); if current_level == target_level { step = -1; led.set_high().void_unwrap(); if button_0.is_high().unwrap_or(false) { target_level = 0; } if button_1.is_high().unwrap_or(false) { target_level = 1; } } else if current_level < target_level { step += 1; step %= 4; } else if current_level > target_level { step -= 1; if step <= 0 { step = 4 } } let opto_0_value: u16 = nb::block!(adc.read(&mut optocoupler_0)).void_unwrap(); let opto_1_value: u16 = nb::block!(adc.read(&mut optocoupler_1)).void_unwrap(); if current_level == target_level { // ignore changes in level sensors when the elevator sits in the correct position } else if opto_0_value > 5 { current_level = 0 } else if opto_1_value > 5 { current_level = 1 } match step % 4 { 0 => { c1.set_high().void_unwrap(); c2.set_low().void_unwrap(); c3.set_high().void_unwrap(); c4.set_low().void_unwrap() } 1 => { c1.set_low().void_unwrap(); c2.set_high().void_unwrap(); c3.set_high().void_unwrap(); c4.set_low().void_unwrap() } 2 => { c1.set_low().void_unwrap(); c2.set_high().void_unwrap(); c3.set_low().void_unwrap(); c4.set_high().void_unwrap() } 3 => { c1.set_high().void_unwrap(); c2.set_low().void_unwrap(); c3.set_low().void_unwrap(); c4.set_high().void_unwrap() } _ => (), } } }
use std::cell::RefCell; use std::rc::Rc; use super::row::{PrivateSqliteRow, SqliteRow}; use super::stmt::StatementUse; use crate::result::QueryResult; #[allow(missing_debug_implementations)] pub struct StatementIterator<'a> { inner: PrivateStatementIterator<'a>, column_names: Option<Rc<[Option<String>]>>, field_count: usize, } enum PrivateStatementIterator<'a> { NotStarted(StatementUse<'a>), Started(Rc<RefCell<PrivateSqliteRow<'a>>>), TemporaryEmpty, } impl<'a> StatementIterator<'a> { pub fn new(stmt: StatementUse<'a>) -> Self { Self { inner: PrivateStatementIterator::NotStarted(stmt), column_names: None, field_count: 0, } } } impl<'a> Iterator for StatementIterator<'a> { type Item = QueryResult<SqliteRow<'a>>; fn next(&mut self) -> Option<Self::Item> { use PrivateStatementIterator::{NotStarted, Started, TemporaryEmpty}; match std::mem::replace(&mut self.inner, TemporaryEmpty) { NotStarted(stmt) => match stmt.step() { Err(e) => Some(Err(e)), Ok(None) => None, Ok(Some(stmt)) => { let field_count = stmt.column_count() as usize; self.field_count = field_count; let inner = Rc::new(RefCell::new(PrivateSqliteRow::Direct(stmt))); self.inner = Started(inner.clone()); Some(Ok(SqliteRow { inner, field_count })) } }, Started(mut last_row) => { // There was already at least one iteration step // We check here if the caller already released the row value or not // by checking if our Rc owns the data or not if let Some(last_row_ref) = Rc::get_mut(&mut last_row) { // We own the statement, there is no other reference here. // This means we don't need to copy out values from the sqlite provided // datastructures for now // We don't need to use the runtime borrowing system of the RefCell here // as we have a mutable reference, so all of this below is checked at compile time if let PrivateSqliteRow::Direct(stmt) = std::mem::replace(last_row_ref.get_mut(), PrivateSqliteRow::TemporaryEmpty) { match stmt.step() { Err(e) => Some(Err(e)), Ok(None) => None, Ok(Some(stmt)) => { let field_count = self.field_count; (*last_row_ref.get_mut()) = PrivateSqliteRow::Direct(stmt); self.inner = Started(last_row.clone()); Some(Ok(SqliteRow { inner: last_row, field_count, })) } } } else { // any other state than `PrivateSqliteRow::Direct` is invalid here // and should not happen. If this ever happens this is a logic error // in the code above unreachable!( "You've reached an impossible internal state. \ If you ever see this error message please open \ an issue at https://github.com/diesel-rs/diesel \ providing example code how to trigger this error." ) } } else { // We don't own the statement. There is another existing reference, likly because // a user stored the row in some long time container before calling next another time // In this case we copy out the current values into a temporary store and advance // the statement iterator internally afterwards let last_row = { let mut last_row = match last_row.try_borrow_mut() { Ok(o) => o, Err(_e) => { self.inner = Started(last_row.clone()); return Some(Err(crate::result::Error::DeserializationError( "Failed to reborrow row. Try to release any `SqliteField` or `SqliteValue` \ that exists at this point" .into(), ))); } }; let last_row = &mut *last_row; let duplicated = last_row.duplicate(&mut self.column_names); std::mem::replace(last_row, duplicated) }; if let PrivateSqliteRow::Direct(stmt) = last_row { match stmt.step() { Err(e) => Some(Err(e)), Ok(None) => None, Ok(Some(stmt)) => { let field_count = self.field_count; let last_row = Rc::new(RefCell::new(PrivateSqliteRow::Direct(stmt))); self.inner = Started(last_row.clone()); Some(Ok(SqliteRow { inner: last_row, field_count, })) } } } else { // any other state than `PrivateSqliteRow::Direct` is invalid here // and should not happen. If this ever happens this is a logic error // in the code above unreachable!( "You've reached an impossible internal state. \ If you ever see this error message please open \ an issue at https://github.com/diesel-rs/diesel \ providing example code how to trigger this error." ) } } } TemporaryEmpty => None, } } }
use crate::common::{boxvec::BoxVec, lock::PyMutex}; use crate::{ builtins::{ asyncgenerator::PyAsyncGenWrappedValue, function::{PyCell, PyCellRef, PyFunction}, tuple::{PyTuple, PyTupleTyped}, PyBaseExceptionRef, PyCode, PyCoroutine, PyDict, PyDictRef, PyGenerator, PyList, PySet, PySlice, PyStr, PyStrInterned, PyStrRef, PyTraceback, PyType, }, bytecode, convert::{IntoObject, ToPyResult}, coroutine::Coro, exceptions::ExceptionCtor, function::{ArgMapping, Either, FuncArgs}, protocol::{PyIter, PyIterReturn}, scope::Scope, source_code::SourceLocation, stdlib::builtins, vm::{Context, PyMethod}, AsObject, Py, PyObject, PyObjectRef, PyPayload, PyRef, PyResult, TryFromObject, VirtualMachine, }; use indexmap::IndexMap; use itertools::Itertools; #[cfg(feature = "threading")] use std::sync::atomic; use std::{fmt, iter::zip}; #[derive(Clone, Debug)] struct Block { /// The type of block. typ: BlockType, /// The level of the value stack when the block was entered. level: usize, } #[derive(Clone, Debug)] enum BlockType { Loop, TryExcept { handler: bytecode::Label, }, Finally { handler: bytecode::Label, }, /// Active finally sequence FinallyHandler { reason: Option<UnwindReason>, prev_exc: Option<PyBaseExceptionRef>, }, ExceptHandler { prev_exc: Option<PyBaseExceptionRef>, }, } pub type FrameRef = PyRef<Frame>; /// The reason why we might be unwinding a block. /// This could be return of function, exception being /// raised, a break or continue being hit, etc.. #[derive(Clone, Debug)] enum UnwindReason { /// We are returning a value from a return statement. Returning { value: PyObjectRef }, /// We hit an exception, so unwind any try-except and finally blocks. The exception should be /// on top of the vm exception stack. Raising { exception: PyBaseExceptionRef }, // NoWorries, /// We are unwinding blocks, since we hit break Break { target: bytecode::Label }, /// We are unwinding blocks since we hit a continue statements. Continue { target: bytecode::Label }, } #[derive(Debug)] struct FrameState { // We need 1 stack per frame /// The main data frame of the stack machine stack: BoxVec<PyObjectRef>, /// Block frames, for controlling loops and exceptions blocks: Vec<Block>, /// index of last instruction ran #[cfg(feature = "threading")] lasti: u32, } #[cfg(feature = "threading")] type Lasti = atomic::AtomicU32; #[cfg(not(feature = "threading"))] type Lasti = std::cell::Cell<u32>; #[pyclass(module = false, name = "frame")] pub struct Frame { pub code: PyRef<PyCode>, pub fastlocals: PyMutex<Box<[Option<PyObjectRef>]>>, pub(crate) cells_frees: Box<[PyCellRef]>, pub locals: ArgMapping, pub globals: PyDictRef, pub builtins: PyDictRef, // on feature=threading, this is a duplicate of FrameState.lasti, but it's faster to do an // atomic store than it is to do a fetch_add, for every instruction executed /// index of last instruction ran pub lasti: Lasti, /// tracer function for this frame (usually is None) pub trace: PyMutex<PyObjectRef>, state: PyMutex<FrameState>, // member pub trace_lines: PyMutex<bool>, pub temporary_refs: PyMutex<Vec<PyObjectRef>>, } impl PyPayload for Frame { fn class(ctx: &Context) -> &'static Py<PyType> { ctx.types.frame_type } } // Running a frame can result in one of the below: pub enum ExecutionResult { Return(PyObjectRef), Yield(PyObjectRef), } /// A valid execution result, or an exception type FrameResult = PyResult<Option<ExecutionResult>>; impl Frame { pub(crate) fn new( code: PyRef<PyCode>, scope: Scope, builtins: PyDictRef, closure: &[PyCellRef], vm: &VirtualMachine, ) -> Frame { let cells_frees = std::iter::repeat_with(|| PyCell::default().into_ref(&vm.ctx)) .take(code.cellvars.len()) .chain(closure.iter().cloned()) .collect(); let state = FrameState { stack: BoxVec::new(code.max_stackdepth as usize), blocks: Vec::new(), #[cfg(feature = "threading")] lasti: 0, }; Frame { fastlocals: PyMutex::new(vec![None; code.varnames.len()].into_boxed_slice()), cells_frees, locals: scope.locals, globals: scope.globals, builtins, code, lasti: Lasti::new(0), state: PyMutex::new(state), trace: PyMutex::new(vm.ctx.none()), trace_lines: PyMutex::new(true), temporary_refs: PyMutex::new(vec![]), } } pub fn current_location(&self) -> SourceLocation { self.code.locations[self.lasti() as usize - 1] } pub fn lasti(&self) -> u32 { #[cfg(feature = "threading")] { self.lasti.load(atomic::Ordering::Relaxed) } #[cfg(not(feature = "threading"))] { self.lasti.get() } } pub fn locals(&self, vm: &VirtualMachine) -> PyResult<ArgMapping> { let locals = &self.locals; let code = &**self.code; let map = &code.varnames; let j = std::cmp::min(map.len(), code.varnames.len()); if !code.varnames.is_empty() { let fastlocals = self.fastlocals.lock(); for (&k, v) in zip(&map[..j], &**fastlocals) { match locals.mapping().ass_subscript(k, v.clone(), vm) { Ok(()) => {} Err(e) if e.fast_isinstance(vm.ctx.exceptions.key_error) => {} Err(e) => return Err(e), } } } if !code.cellvars.is_empty() || !code.freevars.is_empty() { let map_to_dict = |keys: &[&PyStrInterned], values: &[PyCellRef]| { for (&k, v) in zip(keys, values) { if let Some(value) = v.get() { locals.mapping().ass_subscript(k, Some(value), vm)?; } else { match locals.mapping().ass_subscript(k, None, vm) { Ok(()) => {} Err(e) if e.fast_isinstance(vm.ctx.exceptions.key_error) => {} Err(e) => return Err(e), } } } Ok(()) }; map_to_dict(&code.cellvars, &self.cells_frees)?; if code.flags.contains(bytecode::CodeFlags::IS_OPTIMIZED) { map_to_dict(&code.freevars, &self.cells_frees[code.cellvars.len()..])?; } } Ok(locals.clone()) } } impl Py<Frame> { #[inline(always)] fn with_exec<R>(&self, f: impl FnOnce(ExecutingFrame) -> R) -> R { let mut state = self.state.lock(); let exec = ExecutingFrame { code: &self.code, fastlocals: &self.fastlocals, cells_frees: &self.cells_frees, locals: &self.locals, globals: &self.globals, builtins: &self.builtins, lasti: &self.lasti, object: self, state: &mut state, }; f(exec) } // #[cfg_attr(feature = "flame-it", flame("Frame"))] pub fn run(&self, vm: &VirtualMachine) -> PyResult<ExecutionResult> { self.with_exec(|mut exec| exec.run(vm)) } pub(crate) fn resume( &self, value: Option<PyObjectRef>, vm: &VirtualMachine, ) -> PyResult<ExecutionResult> { self.with_exec(|mut exec| { if let Some(value) = value { exec.push_value(value) } exec.run(vm) }) } pub(crate) fn gen_throw( &self, vm: &VirtualMachine, exc_type: PyObjectRef, exc_val: PyObjectRef, exc_tb: PyObjectRef, ) -> PyResult<ExecutionResult> { self.with_exec(|mut exec| exec.gen_throw(vm, exc_type, exc_val, exc_tb)) } pub fn yield_from_target(&self) -> Option<PyObjectRef> { self.with_exec(|exec| exec.yield_from_target().map(PyObject::to_owned)) } pub fn is_internal_frame(&self) -> bool { let code = self.f_code(); let filename = code.co_filename(); filename.as_str().contains("importlib") && filename.as_str().contains("_bootstrap") } pub fn next_external_frame(&self, vm: &VirtualMachine) -> Option<FrameRef> { self.f_back(vm).map(|mut back| loop { back = if let Some(back) = back.to_owned().f_back(vm) { back } else { break back; }; if !back.is_internal_frame() { break back; } }) } } /// An executing frame; essentially just a struct to combine the immutable data outside the mutex /// with the mutable data inside struct ExecutingFrame<'a> { code: &'a PyRef<PyCode>, fastlocals: &'a PyMutex<Box<[Option<PyObjectRef>]>>, cells_frees: &'a [PyCellRef], locals: &'a ArgMapping, globals: &'a PyDictRef, builtins: &'a PyDictRef, object: &'a Py<Frame>, lasti: &'a Lasti, state: &'a mut FrameState, } impl fmt::Debug for ExecutingFrame<'_> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("ExecutingFrame") .field("code", self.code) // .field("scope", self.scope) .field("state", self.state) .finish() } } impl ExecutingFrame<'_> { #[inline(always)] fn update_lasti(&mut self, f: impl FnOnce(&mut u32)) { #[cfg(feature = "threading")] { f(&mut self.state.lasti); self.lasti .store(self.state.lasti, atomic::Ordering::Relaxed); } #[cfg(not(feature = "threading"))] { let mut lasti = self.lasti.get(); f(&mut lasti); self.lasti.set(lasti); } } #[inline(always)] fn lasti(&self) -> u32 { #[cfg(feature = "threading")] { self.state.lasti } #[cfg(not(feature = "threading"))] { self.lasti.get() } } fn run(&mut self, vm: &VirtualMachine) -> PyResult<ExecutionResult> { flame_guard!(format!("Frame::run({})", self.code.obj_name)); // Execute until return or exception: let instrs = &self.code.instructions; let mut arg_state = bytecode::OpArgState::default(); loop { let idx = self.lasti() as usize; self.update_lasti(|i| *i += 1); let bytecode::CodeUnit { op, arg } = instrs[idx]; let arg = arg_state.extend(arg); let mut do_extend_arg = false; let result = self.execute_instruction(op, arg, &mut do_extend_arg, vm); match result { Ok(None) => {} Ok(Some(value)) => { break Ok(value); } // Instruction raised an exception Err(exception) => { #[cold] fn handle_exception( frame: &mut ExecutingFrame, exception: PyBaseExceptionRef, idx: usize, vm: &VirtualMachine, ) -> FrameResult { // 1. Extract traceback from exception's '__traceback__' attr. // 2. Add new entry with current execution position (filename, lineno, code_object) to traceback. // 3. Unwind block stack till appropriate handler is found. let loc = frame.code.locations[idx]; let next = exception.traceback(); let new_traceback = PyTraceback::new(next, frame.object.to_owned(), frame.lasti(), loc.row); vm_trace!("Adding to traceback: {:?} {:?}", new_traceback, loc.row()); exception.set_traceback(Some(new_traceback.into_ref(&vm.ctx))); vm.contextualize_exception(&exception); frame.unwind_blocks(vm, UnwindReason::Raising { exception }) } match handle_exception(self, exception, idx, vm) { Ok(None) => {} Ok(Some(result)) => break Ok(result), // TODO: append line number to traceback? // traceback.append(); Err(exception) => break Err(exception), } } } if !do_extend_arg { arg_state.reset() } } } fn yield_from_target(&self) -> Option<&PyObject> { if let Some(bytecode::CodeUnit { op: bytecode::Instruction::YieldFrom, .. }) = self.code.instructions.get(self.lasti() as usize) { Some(self.last_value_ref()) } else { None } } /// Ok(Err(e)) means that an error occurred while calling throw() and the generator should try /// sending it fn gen_throw( &mut self, vm: &VirtualMachine, exc_type: PyObjectRef, exc_val: PyObjectRef, exc_tb: PyObjectRef, ) -> PyResult<ExecutionResult> { if let Some(gen) = self.yield_from_target() { // borrow checker shenanigans - we only need to use exc_type/val/tb if the following // variable is Some let thrower = if let Some(coro) = self.builtin_coro(gen) { Some(Either::A(coro)) } else { vm.get_attribute_opt(gen.to_owned(), "throw")? .map(Either::B) }; if let Some(thrower) = thrower { let ret = match thrower { Either::A(coro) => coro .throw(gen, exc_type, exc_val, exc_tb, vm) .to_pyresult(vm), // FIXME: Either::B(meth) => meth.call((exc_type, exc_val, exc_tb), vm), }; return ret.map(ExecutionResult::Yield).or_else(|err| { self.pop_value(); self.update_lasti(|i| *i += 1); if err.fast_isinstance(vm.ctx.exceptions.stop_iteration) { let val = vm.unwrap_or_none(err.get_arg(0)); self.push_value(val); self.run(vm) } else { let (ty, val, tb) = vm.split_exception(err); self.gen_throw(vm, ty, val, tb) } }); } } let exception = vm.normalize_exception(exc_type, exc_val, exc_tb)?; match self.unwind_blocks(vm, UnwindReason::Raising { exception }) { Ok(None) => self.run(vm), Ok(Some(result)) => Ok(result), Err(exception) => Err(exception), } } fn unbound_cell_exception(&self, i: usize, vm: &VirtualMachine) -> PyBaseExceptionRef { if let Some(&name) = self.code.cellvars.get(i) { vm.new_exception_msg( vm.ctx.exceptions.unbound_local_error.to_owned(), format!("local variable '{name}' referenced before assignment"), ) } else { let name = self.code.freevars[i - self.code.cellvars.len()]; vm.new_name_error( format!("free variable '{name}' referenced before assignment in enclosing scope"), name.to_owned(), ) } } /// Execute a single instruction. #[inline(always)] fn execute_instruction( &mut self, instruction: bytecode::Instruction, arg: bytecode::OpArg, extend_arg: &mut bool, vm: &VirtualMachine, ) -> FrameResult { vm.check_signals()?; flame_guard!(format!("Frame::execute_instruction({:?})", instruction)); #[cfg(feature = "vm-tracing-logging")] { trace!("======="); /* TODO: for frame in self.frames.iter() { trace!(" {:?}", frame); } */ trace!(" {:#?}", self); trace!(" Executing op code: {:?}", instruction); trace!("======="); } #[cold] fn name_error(name: &'static PyStrInterned, vm: &VirtualMachine) -> PyBaseExceptionRef { vm.new_name_error(format!("name '{name}' is not defined"), name.to_owned()) } match instruction { bytecode::Instruction::LoadConst { idx } => { self.push_value(self.code.constants[idx.get(arg) as usize].clone().into()); Ok(None) } bytecode::Instruction::ImportName { idx } => { self.import(vm, Some(self.code.names[idx.get(arg) as usize]))?; Ok(None) } bytecode::Instruction::ImportNameless => { self.import(vm, None)?; Ok(None) } bytecode::Instruction::ImportStar => { self.import_star(vm)?; Ok(None) } bytecode::Instruction::ImportFrom { idx } => { let obj = self.import_from(vm, idx.get(arg))?; self.push_value(obj); Ok(None) } bytecode::Instruction::LoadFast(idx) => { #[cold] fn reference_error( varname: &'static PyStrInterned, vm: &VirtualMachine, ) -> PyBaseExceptionRef { vm.new_exception_msg( vm.ctx.exceptions.unbound_local_error.to_owned(), format!("local variable '{varname}' referenced before assignment",), ) } let idx = idx.get(arg) as usize; let x = self.fastlocals.lock()[idx] .clone() .ok_or_else(|| reference_error(self.code.varnames[idx], vm))?; self.push_value(x); Ok(None) } bytecode::Instruction::LoadNameAny(idx) => { let name = self.code.names[idx.get(arg) as usize]; let result = self.locals.mapping().subscript(name, vm); match result { Ok(x) => self.push_value(x), Err(e) if e.fast_isinstance(vm.ctx.exceptions.key_error) => { self.push_value(self.load_global_or_builtin(name, vm)?); } Err(e) => return Err(e), } Ok(None) } bytecode::Instruction::LoadGlobal(idx) => { let name = &self.code.names[idx.get(arg) as usize]; let x = self.load_global_or_builtin(name, vm)?; self.push_value(x); Ok(None) } bytecode::Instruction::LoadDeref(i) => { let i = i.get(arg) as usize; let x = self.cells_frees[i] .get() .ok_or_else(|| self.unbound_cell_exception(i, vm))?; self.push_value(x); Ok(None) } bytecode::Instruction::LoadClassDeref(i) => { let i = i.get(arg) as usize; let name = self.code.freevars[i - self.code.cellvars.len()]; let value = self.locals.mapping().subscript(name, vm).ok(); self.push_value(match value { Some(v) => v, None => self.cells_frees[i] .get() .ok_or_else(|| self.unbound_cell_exception(i, vm))?, }); Ok(None) } bytecode::Instruction::StoreFast(idx) => { let value = self.pop_value(); self.fastlocals.lock()[idx.get(arg) as usize] = Some(value); Ok(None) } bytecode::Instruction::StoreLocal(idx) => { let name = self.code.names[idx.get(arg) as usize]; let value = self.pop_value(); self.locals.mapping().ass_subscript(name, Some(value), vm)?; Ok(None) } bytecode::Instruction::StoreGlobal(idx) => { let value = self.pop_value(); self.globals .set_item(self.code.names[idx.get(arg) as usize], value, vm)?; Ok(None) } bytecode::Instruction::StoreDeref(i) => { let value = self.pop_value(); self.cells_frees[i.get(arg) as usize].set(Some(value)); Ok(None) } bytecode::Instruction::DeleteFast(idx) => { let mut fastlocals = self.fastlocals.lock(); let idx = idx.get(arg) as usize; if fastlocals[idx].is_none() { return Err(vm.new_exception_msg( vm.ctx.exceptions.unbound_local_error.to_owned(), format!( "local variable '{}' referenced before assignment", self.code.varnames[idx] ), )); } fastlocals[idx] = None; Ok(None) } bytecode::Instruction::DeleteLocal(idx) => { let name = self.code.names[idx.get(arg) as usize]; let res = self.locals.mapping().ass_subscript(name, None, vm); match res { Ok(()) => {} Err(e) if e.fast_isinstance(vm.ctx.exceptions.key_error) => { return Err(name_error(name, vm)) } Err(e) => return Err(e), } Ok(None) } bytecode::Instruction::DeleteGlobal(idx) => { let name = self.code.names[idx.get(arg) as usize]; match self.globals.del_item(name, vm) { Ok(()) => {} Err(e) if e.fast_isinstance(vm.ctx.exceptions.key_error) => { return Err(name_error(name, vm)) } Err(e) => return Err(e), } Ok(None) } bytecode::Instruction::DeleteDeref(i) => { self.cells_frees[i.get(arg) as usize].set(None); Ok(None) } bytecode::Instruction::LoadClosure(i) => { let value = self.cells_frees[i.get(arg) as usize].clone(); self.push_value(value.into()); Ok(None) } bytecode::Instruction::Subscript => self.execute_subscript(vm), bytecode::Instruction::StoreSubscript => self.execute_store_subscript(vm), bytecode::Instruction::DeleteSubscript => self.execute_delete_subscript(vm), bytecode::Instruction::Pop => { // Pop value from stack and ignore. self.pop_value(); Ok(None) } bytecode::Instruction::Duplicate => { // Duplicate top of stack let value = self.last_value(); self.push_value(value); Ok(None) } bytecode::Instruction::Duplicate2 => { // Duplicate top 2 of stack let top = self.last_value(); let second_to_top = self.nth_value(1).to_owned(); self.push_value(second_to_top); self.push_value(top); Ok(None) } // splitting the instructions like this offloads the cost of "dynamic" dispatch (on the // amount to rotate) to the opcode dispatcher, and generates optimized code for the // concrete cases we actually have bytecode::Instruction::Rotate2 => self.execute_rotate(2), bytecode::Instruction::Rotate3 => self.execute_rotate(3), bytecode::Instruction::BuildString { size } => { let s = self .pop_multiple(size.get(arg) as usize) .as_slice() .iter() .map(|pyobj| pyobj.payload::<PyStr>().unwrap().as_ref()) .collect::<String>(); let str_obj = vm.ctx.new_str(s); self.push_value(str_obj.into()); Ok(None) } bytecode::Instruction::BuildList { size } => { let elements = self.pop_multiple(size.get(arg) as usize).collect(); let list_obj = vm.ctx.new_list(elements); self.push_value(list_obj.into()); Ok(None) } bytecode::Instruction::BuildListUnpack { size } => { let elements = self.unpack_elements(vm, size.get(arg) as usize)?; let list_obj = vm.ctx.new_list(elements); self.push_value(list_obj.into()); Ok(None) } bytecode::Instruction::BuildSet { size } => { let set = PySet::new_ref(&vm.ctx); { for element in self.pop_multiple(size.get(arg) as usize) { set.add(element, vm)?; } } self.push_value(set.into()); Ok(None) } bytecode::Instruction::BuildSetUnpack { size } => { let set = PySet::new_ref(&vm.ctx); { for element in self.pop_multiple(size.get(arg) as usize) { vm.map_iterable_object(&element, |x| set.add(x, vm))??; } } self.push_value(set.into()); Ok(None) } bytecode::Instruction::BuildTuple { size } => { let elements = self.pop_multiple(size.get(arg) as usize).collect(); let list_obj = vm.ctx.new_tuple(elements); self.push_value(list_obj.into()); Ok(None) } bytecode::Instruction::BuildTupleUnpack { size } => { let elements = self.unpack_elements(vm, size.get(arg) as usize)?; let list_obj = vm.ctx.new_tuple(elements); self.push_value(list_obj.into()); Ok(None) } bytecode::Instruction::BuildMap { size } => self.execute_build_map(vm, size.get(arg)), bytecode::Instruction::BuildMapForCall { size } => { self.execute_build_map_for_call(vm, size.get(arg)) } bytecode::Instruction::DictUpdate => { let other = self.pop_value(); let dict = self .last_value_ref() .downcast_ref::<PyDict>() .expect("exact dict expected"); dict.merge_object(other, vm)?; Ok(None) } bytecode::Instruction::BuildSlice { step } => { self.execute_build_slice(vm, step.get(arg)) } bytecode::Instruction::ListAppend { i } => { let item = self.pop_value(); let obj = self.nth_value(i.get(arg)); let list: &Py<PyList> = unsafe { // SAFETY: trust compiler obj.downcast_unchecked_ref() }; list.append(item); Ok(None) } bytecode::Instruction::SetAdd { i } => { let item = self.pop_value(); let obj = self.nth_value(i.get(arg)); let set: &Py<PySet> = unsafe { // SAFETY: trust compiler obj.downcast_unchecked_ref() }; set.add(item, vm)?; Ok(None) } bytecode::Instruction::MapAdd { i } => { let value = self.pop_value(); let key = self.pop_value(); let obj = self.nth_value(i.get(arg)); let dict: &Py<PyDict> = unsafe { // SAFETY: trust compiler obj.downcast_unchecked_ref() }; dict.set_item(&*key, value, vm)?; Ok(None) } bytecode::Instruction::BinaryOperation { op } => self.execute_binop(vm, op.get(arg)), bytecode::Instruction::BinaryOperationInplace { op } => { self.execute_binop_inplace(vm, op.get(arg)) } bytecode::Instruction::LoadAttr { idx } => self.load_attr(vm, idx.get(arg)), bytecode::Instruction::StoreAttr { idx } => self.store_attr(vm, idx.get(arg)), bytecode::Instruction::DeleteAttr { idx } => self.delete_attr(vm, idx.get(arg)), bytecode::Instruction::UnaryOperation { op } => self.execute_unop(vm, op.get(arg)), bytecode::Instruction::TestOperation { op } => self.execute_test(vm, op.get(arg)), bytecode::Instruction::CompareOperation { op } => self.execute_compare(vm, op.get(arg)), bytecode::Instruction::ReturnValue => { let value = self.pop_value(); self.unwind_blocks(vm, UnwindReason::Returning { value }) } bytecode::Instruction::YieldValue => { let value = self.pop_value(); let value = if self.code.flags.contains(bytecode::CodeFlags::IS_COROUTINE) { PyAsyncGenWrappedValue(value).into_pyobject(vm) } else { value }; Ok(Some(ExecutionResult::Yield(value))) } bytecode::Instruction::YieldFrom => self.execute_yield_from(vm), bytecode::Instruction::SetupAnnotation => self.setup_annotations(vm), bytecode::Instruction::SetupLoop => { self.push_block(BlockType::Loop); Ok(None) } bytecode::Instruction::SetupExcept { handler } => { self.push_block(BlockType::TryExcept { handler: handler.get(arg), }); Ok(None) } bytecode::Instruction::SetupFinally { handler } => { self.push_block(BlockType::Finally { handler: handler.get(arg), }); Ok(None) } bytecode::Instruction::EnterFinally => { self.push_block(BlockType::FinallyHandler { reason: None, prev_exc: vm.current_exception(), }); Ok(None) } bytecode::Instruction::EndFinally => { // Pop the finally handler from the stack, and recall // what was the reason we were in this finally clause. let block = self.pop_block(); if let BlockType::FinallyHandler { reason, prev_exc } = block.typ { vm.set_exception(prev_exc); if let Some(reason) = reason { self.unwind_blocks(vm, reason) } else { Ok(None) } } else { self.fatal( "Block type must be finally handler when reaching EndFinally instruction!", ); } } bytecode::Instruction::SetupWith { end } => { let context_manager = self.pop_value(); let error_string = || -> String { format!( "'{:.200}' object does not support the context manager protocol", context_manager.class().name(), ) }; let enter_res = vm .get_special_method(&context_manager, identifier!(vm, __enter__))? .ok_or_else(|| vm.new_type_error(error_string()))? .invoke((), vm)?; let exit = context_manager .get_attr(identifier!(vm, __exit__), vm) .map_err(|_exc| { vm.new_type_error({ format!("'{} (missed __exit__ method)", error_string()) }) })?; self.push_value(exit); self.push_block(BlockType::Finally { handler: end.get(arg), }); self.push_value(enter_res); Ok(None) } bytecode::Instruction::BeforeAsyncWith => { let mgr = self.pop_value(); let error_string = || -> String { format!( "'{:.200}' object does not support the asynchronous context manager protocol", mgr.class().name(), ) }; let aenter_res = vm .get_special_method(&mgr, identifier!(vm, __aenter__))? .ok_or_else(|| vm.new_type_error(error_string()))? .invoke((), vm)?; let aexit = mgr .get_attr(identifier!(vm, __aexit__), vm) .map_err(|_exc| { vm.new_type_error({ format!("'{} (missed __aexit__ method)", error_string()) }) })?; self.push_value(aexit); self.push_value(aenter_res); Ok(None) } bytecode::Instruction::SetupAsyncWith { end } => { let enter_res = self.pop_value(); self.push_block(BlockType::Finally { handler: end.get(arg), }); self.push_value(enter_res); Ok(None) } bytecode::Instruction::WithCleanupStart => { let block = self.current_block().unwrap(); let reason = match block.typ { BlockType::FinallyHandler { reason, .. } => reason, _ => self.fatal("WithCleanupStart expects a FinallyHandler block on stack"), }; let exc = match reason { Some(UnwindReason::Raising { exception }) => Some(exception), _ => None, }; let exit = self.pop_value(); let args = if let Some(exc) = exc { vm.split_exception(exc) } else { (vm.ctx.none(), vm.ctx.none(), vm.ctx.none()) }; let exit_res = exit.call(args, vm)?; self.push_value(exit_res); Ok(None) } bytecode::Instruction::WithCleanupFinish => { let block = self.pop_block(); let (reason, prev_exc) = match block.typ { BlockType::FinallyHandler { reason, prev_exc } => (reason, prev_exc), _ => self.fatal("WithCleanupFinish expects a FinallyHandler block on stack"), }; let suppress_exception = self.pop_value().try_to_bool(vm)?; vm.set_exception(prev_exc); if suppress_exception { Ok(None) } else if let Some(reason) = reason { self.unwind_blocks(vm, reason) } else { Ok(None) } } bytecode::Instruction::PopBlock => { self.pop_block(); Ok(None) } bytecode::Instruction::GetIter => { let iterated_obj = self.pop_value(); let iter_obj = iterated_obj.get_iter(vm)?; self.push_value(iter_obj.into()); Ok(None) } bytecode::Instruction::GetAwaitable => { let awaited_obj = self.pop_value(); let awaitable = if awaited_obj.payload_is::<PyCoroutine>() { awaited_obj } else { let await_method = vm.get_method_or_type_error( awaited_obj.clone(), identifier!(vm, __await__), || { format!( "object {} can't be used in 'await' expression", awaited_obj.class().name(), ) }, )?; await_method.call((), vm)? }; self.push_value(awaitable); Ok(None) } bytecode::Instruction::GetAIter => { let aiterable = self.pop_value(); let aiter = vm.call_special_method(&aiterable, identifier!(vm, __aiter__), ())?; self.push_value(aiter); Ok(None) } bytecode::Instruction::GetANext => { let aiter = self.last_value(); let awaitable = vm.call_special_method(&aiter, identifier!(vm, __anext__), ())?; let awaitable = if awaitable.payload_is::<PyCoroutine>() { awaitable } else { vm.call_special_method(&awaitable, identifier!(vm, __await__), ())? }; self.push_value(awaitable); Ok(None) } bytecode::Instruction::EndAsyncFor => { let exc = self.pop_value(); self.pop_value(); // async iterator we were calling __anext__ on if exc.fast_isinstance(vm.ctx.exceptions.stop_async_iteration) { vm.take_exception().expect("Should have exception in stack"); Ok(None) } else { Err(exc.downcast().unwrap()) } } bytecode::Instruction::ForIter { target } => self.execute_for_iter(vm, target.get(arg)), bytecode::Instruction::MakeFunction(flags) => { self.execute_make_function(vm, flags.get(arg)) } bytecode::Instruction::CallFunctionPositional { nargs } => { let args = self.collect_positional_args(nargs.get(arg)); self.execute_call(args, vm) } bytecode::Instruction::CallFunctionKeyword { nargs } => { let args = self.collect_keyword_args(nargs.get(arg)); self.execute_call(args, vm) } bytecode::Instruction::CallFunctionEx { has_kwargs } => { let args = self.collect_ex_args(vm, has_kwargs.get(arg))?; self.execute_call(args, vm) } bytecode::Instruction::LoadMethod { idx } => { let obj = self.pop_value(); let method_name = self.code.names[idx.get(arg) as usize]; let method = PyMethod::get(obj, method_name, vm)?; let (target, is_method, func) = match method { PyMethod::Function { target, func } => (target, true, func), PyMethod::Attribute(val) => (vm.ctx.none(), false, val), }; // TODO: figure out a better way to communicate PyMethod::Attribute - CPython uses // target==NULL, maybe we could use a sentinel value or something? self.push_value(target); self.push_value(vm.ctx.new_bool(is_method).into()); self.push_value(func); Ok(None) } bytecode::Instruction::CallMethodPositional { nargs } => { let args = self.collect_positional_args(nargs.get(arg)); self.execute_method_call(args, vm) } bytecode::Instruction::CallMethodKeyword { nargs } => { let args = self.collect_keyword_args(nargs.get(arg)); self.execute_method_call(args, vm) } bytecode::Instruction::CallMethodEx { has_kwargs } => { let args = self.collect_ex_args(vm, has_kwargs.get(arg))?; self.execute_method_call(args, vm) } bytecode::Instruction::Jump { target } => { self.jump(target.get(arg)); Ok(None) } bytecode::Instruction::JumpIfTrue { target } => self.jump_if(vm, target.get(arg), true), bytecode::Instruction::JumpIfFalse { target } => { self.jump_if(vm, target.get(arg), false) } bytecode::Instruction::JumpIfTrueOrPop { target } => { self.jump_if_or_pop(vm, target.get(arg), true) } bytecode::Instruction::JumpIfFalseOrPop { target } => { self.jump_if_or_pop(vm, target.get(arg), false) } bytecode::Instruction::Raise { kind } => self.execute_raise(vm, kind.get(arg)), bytecode::Instruction::Break { target } => self.unwind_blocks( vm, UnwindReason::Break { target: target.get(arg), }, ), bytecode::Instruction::Continue { target } => self.unwind_blocks( vm, UnwindReason::Continue { target: target.get(arg), }, ), bytecode::Instruction::PrintExpr => self.print_expr(vm), bytecode::Instruction::LoadBuildClass => { self.push_value(vm.builtins.get_attr(identifier!(vm, __build_class__), vm)?); Ok(None) } bytecode::Instruction::UnpackSequence { size } => { self.unpack_sequence(size.get(arg), vm) } bytecode::Instruction::UnpackEx { args } => { let args = args.get(arg); self.execute_unpack_ex(vm, args.before, args.after) } bytecode::Instruction::FormatValue { conversion } => { self.format_value(conversion.get(arg), vm) } bytecode::Instruction::PopException {} => { let block = self.pop_block(); if let BlockType::ExceptHandler { prev_exc } = block.typ { vm.set_exception(prev_exc); Ok(None) } else { self.fatal("block type must be ExceptHandler here.") } } bytecode::Instruction::Reverse { amount } => { let stack_len = self.state.stack.len(); self.state.stack[stack_len - amount.get(arg) as usize..stack_len].reverse(); Ok(None) } bytecode::Instruction::ExtendedArg => { *extend_arg = true; Ok(None) } } } #[inline] fn load_global_or_builtin(&self, name: &Py<PyStr>, vm: &VirtualMachine) -> PyResult { self.globals .get_chain(self.builtins, name, vm)? .ok_or_else(|| { vm.new_name_error(format!("name '{name}' is not defined"), name.to_owned()) }) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn unpack_elements(&mut self, vm: &VirtualMachine, size: usize) -> PyResult<Vec<PyObjectRef>> { let mut result = Vec::<PyObjectRef>::new(); for element in self.pop_multiple(size) { let items: Vec<_> = element.try_to_value(vm)?; result.extend(items); } Ok(result) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn import(&mut self, vm: &VirtualMachine, module: Option<&Py<PyStr>>) -> PyResult<()> { let module = module.unwrap_or(vm.ctx.empty_str); let from_list = <Option<PyTupleTyped<PyStrRef>>>::try_from_object(vm, self.pop_value())?; let level = usize::try_from_object(vm, self.pop_value())?; let module = vm.import(module, from_list, level)?; self.push_value(module); Ok(()) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn import_from(&mut self, vm: &VirtualMachine, idx: bytecode::NameIdx) -> PyResult { let module = self.last_value(); let name = self.code.names[idx as usize]; let err = || vm.new_import_error(format!("cannot import name '{name}'"), name.to_owned()); // Load attribute, and transform any error into import error. if let Some(obj) = vm.get_attribute_opt(module.clone(), name)? { return Ok(obj); } // fallback to importing '{module.__name__}.{name}' from sys.modules let mod_name = module .get_attr(identifier!(vm, __name__), vm) .map_err(|_| err())?; let mod_name = mod_name.downcast::<PyStr>().map_err(|_| err())?; let full_mod_name = format!("{mod_name}.{name}"); let sys_modules = vm.sys_module.get_attr("modules", vm).map_err(|_| err())?; sys_modules.get_item(&full_mod_name, vm).map_err(|_| err()) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn import_star(&mut self, vm: &VirtualMachine) -> PyResult<()> { let module = self.pop_value(); // Grab all the names from the module and put them in the context if let Some(dict) = module.dict() { let filter_pred: Box<dyn Fn(&str) -> bool> = if let Ok(all) = dict.get_item(identifier!(vm, __all__), vm) { let all: Vec<PyStrRef> = all.try_to_value(vm)?; let all: Vec<String> = all .into_iter() .map(|name| name.as_str().to_owned()) .collect(); Box::new(move |name| all.contains(&name.to_owned())) } else { Box::new(|name| !name.starts_with('_')) }; for (k, v) in dict { let k = PyStrRef::try_from_object(vm, k)?; if filter_pred(k.as_str()) { self.locals.mapping().ass_subscript(&k, Some(v), vm)?; } } } Ok(()) } /// Unwind blocks. /// The reason for unwinding gives a hint on what to do when /// unwinding a block. /// Optionally returns an exception. #[cfg_attr(feature = "flame-it", flame("Frame"))] fn unwind_blocks(&mut self, vm: &VirtualMachine, reason: UnwindReason) -> FrameResult { // First unwind all existing blocks on the block stack: while let Some(block) = self.current_block() { match block.typ { BlockType::Loop => match reason { UnwindReason::Break { target } => { self.pop_block(); self.jump(target); return Ok(None); } UnwindReason::Continue { target } => { self.jump(target); return Ok(None); } _ => { self.pop_block(); } }, BlockType::Finally { handler } => { self.pop_block(); let prev_exc = vm.current_exception(); if let UnwindReason::Raising { exception } = &reason { vm.set_exception(Some(exception.clone())); } self.push_block(BlockType::FinallyHandler { reason: Some(reason), prev_exc, }); self.jump(handler); return Ok(None); } BlockType::TryExcept { handler } => { self.pop_block(); if let UnwindReason::Raising { exception } = reason { self.push_block(BlockType::ExceptHandler { prev_exc: vm.current_exception(), }); vm.contextualize_exception(&exception); vm.set_exception(Some(exception.clone())); self.push_value(exception.into()); self.jump(handler); return Ok(None); } } BlockType::FinallyHandler { prev_exc, .. } | BlockType::ExceptHandler { prev_exc } => { self.pop_block(); vm.set_exception(prev_exc); } } } // We do not have any more blocks to unwind. Inspect the reason we are here: match reason { UnwindReason::Raising { exception } => Err(exception), UnwindReason::Returning { value } => Ok(Some(ExecutionResult::Return(value))), UnwindReason::Break { .. } | UnwindReason::Continue { .. } => { self.fatal("break or continue must occur within a loop block.") } // UnwindReason::NoWorries => Ok(None), } } #[inline(always)] fn execute_rotate(&mut self, amount: usize) -> FrameResult { let i = self.state.stack.len() - amount; self.state.stack[i..].rotate_right(1); Ok(None) } fn execute_subscript(&mut self, vm: &VirtualMachine) -> FrameResult { let b_ref = self.pop_value(); let a_ref = self.pop_value(); let value = a_ref.get_item(&*b_ref, vm)?; self.push_value(value); Ok(None) } fn execute_store_subscript(&mut self, vm: &VirtualMachine) -> FrameResult { let idx = self.pop_value(); let obj = self.pop_value(); let value = self.pop_value(); obj.set_item(&*idx, value, vm)?; Ok(None) } fn execute_delete_subscript(&mut self, vm: &VirtualMachine) -> FrameResult { let idx = self.pop_value(); let obj = self.pop_value(); obj.del_item(&*idx, vm)?; Ok(None) } fn execute_build_map(&mut self, vm: &VirtualMachine, size: u32) -> FrameResult { let size = size as usize; let map_obj = vm.ctx.new_dict(); for (key, value) in self.pop_multiple(2 * size).tuples() { map_obj.set_item(&*key, value, vm)?; } self.push_value(map_obj.into()); Ok(None) } fn execute_build_map_for_call(&mut self, vm: &VirtualMachine, size: u32) -> FrameResult { let size = size as usize; let map_obj = vm.ctx.new_dict(); for obj in self.pop_multiple(size) { // Take all key-value pairs from the dict: let dict: PyDictRef = obj.downcast().map_err(|obj| { vm.new_type_error(format!("'{}' object is not a mapping", obj.class().name())) })?; for (key, value) in dict { if map_obj.contains_key(&*key, vm) { let key_repr = &key.repr(vm)?; let msg = format!( "got multiple values for keyword argument {}", key_repr.as_str() ); return Err(vm.new_type_error(msg)); } map_obj.set_item(&*key, value, vm)?; } } self.push_value(map_obj.into()); Ok(None) } fn execute_build_slice(&mut self, vm: &VirtualMachine, step: bool) -> FrameResult { let step = if step { Some(self.pop_value()) } else { None }; let stop = self.pop_value(); let start = self.pop_value(); let obj = PySlice { start: Some(start), stop, step, } .into_ref(&vm.ctx); self.push_value(obj.into()); Ok(None) } fn collect_positional_args(&mut self, nargs: u32) -> FuncArgs { FuncArgs { args: self.pop_multiple(nargs as usize).collect(), kwargs: IndexMap::new(), } } fn collect_keyword_args(&mut self, nargs: u32) -> FuncArgs { let kwarg_names = self .pop_value() .downcast::<PyTuple>() .expect("kwarg names should be tuple of strings"); let args = self.pop_multiple(nargs as usize); let kwarg_names = kwarg_names .as_slice() .iter() .map(|pyobj| pyobj.payload::<PyStr>().unwrap().as_ref().to_owned()); FuncArgs::with_kwargs_names(args, kwarg_names) } fn collect_ex_args(&mut self, vm: &VirtualMachine, has_kwargs: bool) -> PyResult<FuncArgs> { let kwargs = if has_kwargs { let kw_dict: PyDictRef = self.pop_value().downcast().map_err(|_| { // TODO: check collections.abc.Mapping vm.new_type_error("Kwargs must be a dict.".to_owned()) })?; let mut kwargs = IndexMap::new(); for (key, value) in kw_dict.into_iter() { let key = key .payload_if_subclass::<PyStr>(vm) .ok_or_else(|| vm.new_type_error("keywords must be strings".to_owned()))?; kwargs.insert(key.as_str().to_owned(), value); } kwargs } else { IndexMap::new() }; let args = self.pop_value(); let args = args.try_to_value(vm)?; Ok(FuncArgs { args, kwargs }) } #[inline] fn execute_call(&mut self, args: FuncArgs, vm: &VirtualMachine) -> FrameResult { let func_ref = self.pop_value(); let value = func_ref.call(args, vm)?; self.push_value(value); Ok(None) } #[inline] fn execute_method_call(&mut self, args: FuncArgs, vm: &VirtualMachine) -> FrameResult { let func = self.pop_value(); let is_method = self.pop_value().is(&vm.ctx.true_value); let target = self.pop_value(); // TODO: It was PyMethod before #4873. Check if it's correct. let func = if is_method { if let Some(descr_get) = func.class().mro_find_map(|cls| cls.slots.descr_get.load()) { let cls = target.class().to_owned().into(); descr_get(func, Some(target), Some(cls), vm)? } else { func } } else { drop(target); // should be None func }; let value = func.call(args, vm)?; self.push_value(value); Ok(None) } fn execute_raise(&mut self, vm: &VirtualMachine, kind: bytecode::RaiseKind) -> FrameResult { let cause = match kind { bytecode::RaiseKind::RaiseCause => { let val = self.pop_value(); Some(if vm.is_none(&val) { // if the cause arg is none, we clear the cause None } else { // if the cause arg is an exception, we overwrite it let ctor = ExceptionCtor::try_from_object(vm, val).map_err(|_| { vm.new_type_error( "exception causes must derive from BaseException".to_owned(), ) })?; Some(ctor.instantiate(vm)?) }) } // if there's no cause arg, we keep the cause as is bytecode::RaiseKind::Raise | bytecode::RaiseKind::Reraise => None, }; let exception = match kind { bytecode::RaiseKind::RaiseCause | bytecode::RaiseKind::Raise => { ExceptionCtor::try_from_object(vm, self.pop_value())?.instantiate(vm)? } bytecode::RaiseKind::Reraise => vm .topmost_exception() .ok_or_else(|| vm.new_runtime_error("No active exception to reraise".to_owned()))?, }; info!("Exception raised: {:?} with cause: {:?}", exception, cause); if let Some(cause) = cause { exception.set_cause(cause); } Err(exception) } fn builtin_coro<'a>(&self, coro: &'a PyObject) -> Option<&'a Coro> { match_class!(match coro { ref g @ PyGenerator => Some(g.as_coro()), ref c @ PyCoroutine => Some(c.as_coro()), _ => None, }) } fn _send( &self, gen: &PyObject, val: PyObjectRef, vm: &VirtualMachine, ) -> PyResult<PyIterReturn> { match self.builtin_coro(gen) { Some(coro) => coro.send(gen, val, vm), // FIXME: turn return type to PyResult<PyIterReturn> then ExecutionResult will be simplified None if vm.is_none(&val) => PyIter::new(gen).next(vm), None => { let meth = gen.get_attr("send", vm)?; PyIterReturn::from_pyresult(meth.call((val,), vm), vm) } } } fn execute_yield_from(&mut self, vm: &VirtualMachine) -> FrameResult { // Value send into iterator: let val = self.pop_value(); let coro = self.last_value_ref(); let result = self._send(coro, val, vm)?; // PyIterReturn returned from e.g. gen.__next__() or gen.send() match result { PyIterReturn::Return(value) => { // Set back program counter: self.update_lasti(|i| *i -= 1); Ok(Some(ExecutionResult::Yield(value))) } PyIterReturn::StopIteration(value) => { let value = vm.unwrap_or_none(value); self.pop_value(); self.push_value(value); Ok(None) } } } fn execute_unpack_ex(&mut self, vm: &VirtualMachine, before: u8, after: u8) -> FrameResult { let (before, after) = (before as usize, after as usize); let value = self.pop_value(); let elements: Vec<_> = value.try_to_value(vm)?; let min_expected = before + after; let middle = elements.len().checked_sub(min_expected).ok_or_else(|| { vm.new_value_error(format!( "not enough values to unpack (expected at least {}, got {})", min_expected, elements.len() )) })?; let mut elements = elements; // Elements on stack from right-to-left: self.state .stack .extend(elements.drain(before + middle..).rev()); let middle_elements = elements.drain(before..).collect(); let t = vm.ctx.new_list(middle_elements); self.push_value(t.into()); // Lastly the first reversed values: self.state.stack.extend(elements.into_iter().rev()); Ok(None) } #[inline] fn jump(&mut self, label: bytecode::Label) { let target_pc = label.0; vm_trace!("jump from {:?} to {:?}", self.lasti(), target_pc); self.update_lasti(|i| *i = target_pc); } #[inline] fn jump_if(&mut self, vm: &VirtualMachine, target: bytecode::Label, flag: bool) -> FrameResult { let obj = self.pop_value(); let value = obj.try_to_bool(vm)?; if value == flag { self.jump(target); } Ok(None) } #[inline] fn jump_if_or_pop( &mut self, vm: &VirtualMachine, target: bytecode::Label, flag: bool, ) -> FrameResult { let obj = self.last_value(); let value = obj.try_to_bool(vm)?; if value == flag { self.jump(target); } else { self.pop_value(); } Ok(None) } /// The top of stack contains the iterator, lets push it forward fn execute_for_iter(&mut self, vm: &VirtualMachine, target: bytecode::Label) -> FrameResult { let top_of_stack = PyIter::new(self.last_value()); let next_obj = top_of_stack.next(vm); // Check the next object: match next_obj { Ok(PyIterReturn::Return(value)) => { self.push_value(value); Ok(None) } Ok(PyIterReturn::StopIteration(_)) => { // Pop iterator from stack: self.pop_value(); // End of for loop self.jump(target); Ok(None) } Err(next_error) => { // Pop iterator from stack: self.pop_value(); Err(next_error) } } } fn execute_make_function( &mut self, vm: &VirtualMachine, flags: bytecode::MakeFunctionFlags, ) -> FrameResult { let qualified_name = self .pop_value() .downcast::<PyStr>() .expect("qualified name to be a string"); let code_obj: PyRef<PyCode> = self .pop_value() .downcast() .expect("Second to top value on the stack must be a code object"); let closure = if flags.contains(bytecode::MakeFunctionFlags::CLOSURE) { Some(PyTupleTyped::try_from_object(vm, self.pop_value()).unwrap()) } else { None }; let annotations = if flags.contains(bytecode::MakeFunctionFlags::ANNOTATIONS) { self.pop_value() } else { vm.ctx.new_dict().into() }; let kw_only_defaults = if flags.contains(bytecode::MakeFunctionFlags::KW_ONLY_DEFAULTS) { Some( self.pop_value() .downcast::<PyDict>() .expect("Stack value for keyword only defaults expected to be a dict"), ) } else { None }; let defaults = if flags.contains(bytecode::MakeFunctionFlags::DEFAULTS) { Some( self.pop_value() .downcast::<PyTuple>() .expect("Stack value for defaults expected to be a tuple"), ) } else { None }; // pop argc arguments // argument: name, args, globals // let scope = self.scope.clone(); let func_obj = PyFunction::new( code_obj, self.globals.clone(), closure, defaults, kw_only_defaults, PyMutex::new(qualified_name.clone()), ) .into_pyobject(vm); func_obj.set_attr(identifier!(vm, __doc__), vm.ctx.none(), vm)?; let name = qualified_name.as_str().split('.').next_back().unwrap(); func_obj.set_attr(identifier!(vm, __name__), vm.new_pyobj(name), vm)?; func_obj.set_attr(identifier!(vm, __qualname__), qualified_name, vm)?; let module = vm.unwrap_or_none(self.globals.get_item_opt(identifier!(vm, __name__), vm)?); func_obj.set_attr(identifier!(vm, __module__), module, vm)?; func_obj.set_attr(identifier!(vm, __annotations__), annotations, vm)?; self.push_value(func_obj); Ok(None) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn execute_binop(&mut self, vm: &VirtualMachine, op: bytecode::BinaryOperator) -> FrameResult { let b_ref = &self.pop_value(); let a_ref = &self.pop_value(); let value = match op { bytecode::BinaryOperator::Subtract => vm._sub(a_ref, b_ref), bytecode::BinaryOperator::Add => vm._add(a_ref, b_ref), bytecode::BinaryOperator::Multiply => vm._mul(a_ref, b_ref), bytecode::BinaryOperator::MatrixMultiply => vm._matmul(a_ref, b_ref), bytecode::BinaryOperator::Power => vm._pow(a_ref, b_ref, vm.ctx.none.as_object()), bytecode::BinaryOperator::Divide => vm._truediv(a_ref, b_ref), bytecode::BinaryOperator::FloorDivide => vm._floordiv(a_ref, b_ref), bytecode::BinaryOperator::Modulo => vm._mod(a_ref, b_ref), bytecode::BinaryOperator::Lshift => vm._lshift(a_ref, b_ref), bytecode::BinaryOperator::Rshift => vm._rshift(a_ref, b_ref), bytecode::BinaryOperator::Xor => vm._xor(a_ref, b_ref), bytecode::BinaryOperator::Or => vm._or(a_ref, b_ref), bytecode::BinaryOperator::And => vm._and(a_ref, b_ref), }?; self.push_value(value); Ok(None) } fn execute_binop_inplace( &mut self, vm: &VirtualMachine, op: bytecode::BinaryOperator, ) -> FrameResult { let b_ref = &self.pop_value(); let a_ref = &self.pop_value(); let value = match op { bytecode::BinaryOperator::Subtract => vm._isub(a_ref, b_ref), bytecode::BinaryOperator::Add => vm._iadd(a_ref, b_ref), bytecode::BinaryOperator::Multiply => vm._imul(a_ref, b_ref), bytecode::BinaryOperator::MatrixMultiply => vm._imatmul(a_ref, b_ref), bytecode::BinaryOperator::Power => vm._ipow(a_ref, b_ref, vm.ctx.none.as_object()), bytecode::BinaryOperator::Divide => vm._itruediv(a_ref, b_ref), bytecode::BinaryOperator::FloorDivide => vm._ifloordiv(a_ref, b_ref), bytecode::BinaryOperator::Modulo => vm._imod(a_ref, b_ref), bytecode::BinaryOperator::Lshift => vm._ilshift(a_ref, b_ref), bytecode::BinaryOperator::Rshift => vm._irshift(a_ref, b_ref), bytecode::BinaryOperator::Xor => vm._ixor(a_ref, b_ref), bytecode::BinaryOperator::Or => vm._ior(a_ref, b_ref), bytecode::BinaryOperator::And => vm._iand(a_ref, b_ref), }?; self.push_value(value); Ok(None) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn execute_unop(&mut self, vm: &VirtualMachine, op: bytecode::UnaryOperator) -> FrameResult { let a = self.pop_value(); let value = match op { bytecode::UnaryOperator::Minus => vm._neg(&a)?, bytecode::UnaryOperator::Plus => vm._pos(&a)?, bytecode::UnaryOperator::Invert => vm._invert(&a)?, bytecode::UnaryOperator::Not => { let value = a.try_to_bool(vm)?; vm.ctx.new_bool(!value).into() } }; self.push_value(value); Ok(None) } #[cold] fn setup_annotations(&mut self, vm: &VirtualMachine) -> FrameResult { let __annotations__ = identifier!(vm, __annotations__); // Try using locals as dict first, if not, fallback to generic method. let has_annotations = match self .locals .clone() .into_object() .downcast_exact::<PyDict>(vm) { Ok(d) => d.contains_key(__annotations__, vm), Err(o) => { let needle = __annotations__.to_object(); self._in(vm, needle, &o)? } }; if !has_annotations { self.locals .as_object() .set_item(__annotations__, vm.ctx.new_dict().into(), vm)?; } Ok(None) } fn print_expr(&mut self, vm: &VirtualMachine) -> FrameResult { let expr = self.pop_value(); let displayhook = vm .sys_module .get_attr("displayhook", vm) .map_err(|_| vm.new_runtime_error("lost sys.displayhook".to_owned()))?; displayhook.call((expr,), vm)?; Ok(None) } fn unpack_sequence(&mut self, size: u32, vm: &VirtualMachine) -> FrameResult { let value = self.pop_value(); let elements: Vec<_> = value.try_to_value(vm).map_err(|e| { if e.class().is(vm.ctx.exceptions.type_error) { vm.new_type_error(format!( "cannot unpack non-iterable {} object", value.class().name() )) } else { e } })?; let msg = match elements.len().cmp(&(size as usize)) { std::cmp::Ordering::Equal => { self.state.stack.extend(elements.into_iter().rev()); return Ok(None); } std::cmp::Ordering::Greater => { format!("too many values to unpack (expected {size})") } std::cmp::Ordering::Less => format!( "not enough values to unpack (expected {}, got {})", size, elements.len() ), }; Err(vm.new_value_error(msg)) } fn format_value( &mut self, conversion: bytecode::ConversionFlag, vm: &VirtualMachine, ) -> FrameResult { use bytecode::ConversionFlag; let value = self.pop_value(); let value = match conversion { ConversionFlag::Str => value.str(vm)?.into(), ConversionFlag::Repr => value.repr(vm)?.into(), ConversionFlag::Ascii => vm.ctx.new_str(builtins::ascii(value, vm)?).into(), ConversionFlag::None => value, }; let spec = self.pop_value(); let formatted = vm.format(&value, spec.downcast::<PyStr>().unwrap())?; self.push_value(formatted.into()); Ok(None) } fn _in(&self, vm: &VirtualMachine, needle: PyObjectRef, haystack: &PyObject) -> PyResult<bool> { let found = vm._contains(haystack, needle)?; found.try_to_bool(vm) } #[inline(always)] fn _not_in( &self, vm: &VirtualMachine, needle: PyObjectRef, haystack: &PyObject, ) -> PyResult<bool> { Ok(!self._in(vm, needle, haystack)?) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn execute_test(&mut self, vm: &VirtualMachine, op: bytecode::TestOperator) -> FrameResult { let b = self.pop_value(); let a = self.pop_value(); let value = match op { bytecode::TestOperator::Is => a.is(&b), bytecode::TestOperator::IsNot => !a.is(&b), bytecode::TestOperator::In => self._in(vm, a, &b)?, bytecode::TestOperator::NotIn => self._not_in(vm, a, &b)?, bytecode::TestOperator::ExceptionMatch => a.is_instance(&b, vm)?, }; self.push_value(vm.ctx.new_bool(value).into()); Ok(None) } #[cfg_attr(feature = "flame-it", flame("Frame"))] fn execute_compare( &mut self, vm: &VirtualMachine, op: bytecode::ComparisonOperator, ) -> FrameResult { let b = self.pop_value(); let a = self.pop_value(); let value = a.rich_compare(b, op.into(), vm)?; self.push_value(value); Ok(None) } fn load_attr(&mut self, vm: &VirtualMachine, attr: bytecode::NameIdx) -> FrameResult { let attr_name = self.code.names[attr as usize]; let parent = self.pop_value(); let obj = parent.get_attr(attr_name, vm)?; self.push_value(obj); Ok(None) } fn store_attr(&mut self, vm: &VirtualMachine, attr: bytecode::NameIdx) -> FrameResult { let attr_name = self.code.names[attr as usize]; let parent = self.pop_value(); let value = self.pop_value(); parent.set_attr(attr_name, value, vm)?; Ok(None) } fn delete_attr(&mut self, vm: &VirtualMachine, attr: bytecode::NameIdx) -> FrameResult { let attr_name = self.code.names[attr as usize]; let parent = self.pop_value(); parent.del_attr(attr_name, vm)?; Ok(None) } fn push_block(&mut self, typ: BlockType) { self.state.blocks.push(Block { typ, level: self.state.stack.len(), }); } fn pop_block(&mut self) -> Block { let block = self.state.blocks.pop().expect("No more blocks to pop!"); self.state.stack.truncate(block.level); block } #[inline] fn current_block(&self) -> Option<Block> { self.state.blocks.last().cloned() } #[inline] fn push_value(&mut self, obj: PyObjectRef) { match self.state.stack.try_push(obj) { Ok(()) => {} Err(_e) => self.fatal("tried to push value onto stack but overflowed max_stackdepth"), } } #[inline] fn pop_value(&mut self) -> PyObjectRef { match self.state.stack.pop() { Some(x) => x, None => self.fatal("tried to pop value but there was nothing on the stack"), } } fn pop_multiple(&mut self, count: usize) -> crate::common::boxvec::Drain<PyObjectRef> { let stack_len = self.state.stack.len(); self.state.stack.drain(stack_len - count..) } #[inline] fn last_value(&self) -> PyObjectRef { self.last_value_ref().to_owned() } #[inline] fn last_value_ref(&self) -> &PyObject { match &*self.state.stack { [.., last] => last, [] => self.fatal("tried to get top of stack but stack is empty"), } } #[inline] fn nth_value(&self, depth: u32) -> &PyObject { let stack = &self.state.stack; &stack[stack.len() - depth as usize - 1] } #[cold] #[inline(never)] fn fatal(&self, msg: &'static str) -> ! { dbg!(self); panic!("{}", msg) } } impl fmt::Debug for Frame { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let state = self.state.lock(); let stack_str = state .stack .iter() .map(|elem| { if elem.payload_is::<Frame>() { "\n > {frame}".to_owned() } else { format!("\n > {elem:?}") } }) .collect::<String>(); let block_str = state .blocks .iter() .map(|elem| format!("\n > {elem:?}")) .collect::<String>(); // TODO: fix this up let locals = self.locals.clone(); write!( f, "Frame Object {{ \n Stack:{}\n Blocks:{}\n Locals:{:?}\n}}", stack_str, block_str, locals.into_object() ) } }
//! Data structures to export a type as though it were a module. use std::fmt::{self, Display}; use core_extensions::SelfOps; use crate::{reflection::ModReflMode, type_layout::*}; #[derive(Debug, Serialize, Deserialize)] pub struct MRItem { item_name: String, type_: String, field_accessor: MRFieldAccessor, #[serde(flatten)] variant: MRItemVariant, } #[derive(Debug, Serialize, Deserialize)] pub struct MRNameType { name: String, type_: String, } #[derive(Debug, Serialize, Deserialize)] #[serde(tag = "variant")] pub enum MRItemVariant { Function(MRFunction), Module(MRModule), Static, } #[derive(Debug, Serialize, Deserialize)] pub struct MRFunction { params: Vec<MRNameType>, returns: MRNameType, } #[derive(Debug, Serialize, Deserialize)] pub struct MRModule { mod_refl_mode: MRModReflMode, items: Vec<MRItem>, } #[derive(Debug, Serialize, Deserialize)] pub enum MRModReflMode { Module, Opaque, DelegateDeref, } #[repr(u8)] #[derive(Debug, Serialize, Deserialize)] pub enum MRFieldAccessor { /// Accessible with `self.field_name` Direct, /// Accessible with `fn field_name(&self)->FieldType` Method { name: Option<String> }, /// Accessible with `fn field_name(&self)->Option<FieldType>` MethodOption, /// This field is completely inaccessible. Opaque, } impl MRItem { pub fn from_type_layout(layout: &'static TypeLayout) -> Self { let type_ = layout.full_type().to_string(); let variant = Self::get_item_variant(layout); Self { item_name: "root".into(), type_, field_accessor: MRFieldAccessor::Direct, variant, } } fn get_item_variant(layout: &'static TypeLayout) -> MRItemVariant { match layout.mod_refl_mode() { ModReflMode::Module => { let fields = match layout.data() { TLData::Struct { fields } => fields, TLData::PrefixType(prefix) => prefix.fields, TLData::Primitive { .. } | TLData::Opaque { .. } | TLData::Union { .. } | TLData::Enum { .. } => return MRItemVariant::Static, }; let items = fields .iter() .filter(|f| f.field_accessor() != FieldAccessor::Opaque) .map(|field| { let (type_, variant) = if field.is_function() { let func = MRFunction::from(&field.function_range().index(0)); (func.to_string(), MRItemVariant::Function(func)) } else { let layout = field.layout(); ( layout.full_type().to_string(), Self::get_item_variant(layout), ) }; MRItem { item_name: field.name().to_string(), type_, field_accessor: field.field_accessor().into(), variant, } }) .collect::<Vec<_>>(); MRItemVariant::Module(MRModule { mod_refl_mode: layout.mod_refl_mode().into(), items, }) } ModReflMode::Opaque => MRItemVariant::Static, ModReflMode::DelegateDeref { layout_index } => { let delegate_to = layout.shared_vars().type_layouts()[layout_index as usize]; let inner_layout = delegate_to(); Self::get_item_variant(inner_layout) } } } } /////////////////////////////////////////////////////////////////////////////// impl<'a> From<&'a TLFunction> for MRFunction { fn from(this: &'a TLFunction) -> Self { Self { params: this.get_params().map(MRNameType::from).collect::<Vec<_>>(), returns: this.get_return().into_::<MRNameType>(), } } } impl Display for MRFunction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "fn(")?; let param_count = self.params.len(); for (param_i, param) in self.params.iter().enumerate() { Display::fmt(param, f)?; if param_i + 1 != param_count { Display::fmt(&", ", f)?; } } write!(f, ")")?; let returns = &self.returns; Display::fmt(&"->", f)?; Display::fmt(returns, f)?; Ok(()) } } /////////////////////////////////////////////////////////////////////////////// impl From<TLField> for MRNameType { fn from(field: TLField) -> Self { let name = field.name().to_string(); let type_ = if field.is_function() { field.function_range().index(0).to_string() } else { field.layout().full_type().to_string() }; Self { name, type_ } } } impl Display for MRNameType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}:{}", self.name, self.type_) } } /////////////////////////////////////////////////////////////////////////////// impl From<ModReflMode> for MRModReflMode { fn from(this: ModReflMode) -> Self { match this { ModReflMode::Module { .. } => MRModReflMode::Module, ModReflMode::Opaque { .. } => MRModReflMode::Opaque, ModReflMode::DelegateDeref { .. } => MRModReflMode::DelegateDeref, } } } /////////////////////////////////////////////////////////////////////////////// impl From<FieldAccessor> for MRFieldAccessor { fn from(this: FieldAccessor) -> MRFieldAccessor { match this { FieldAccessor::Direct => MRFieldAccessor::Direct, FieldAccessor::Method => MRFieldAccessor::Method { name: None }, FieldAccessor::MethodNamed { name } => MRFieldAccessor::Method { name: Some(name.to_string()), }, FieldAccessor::MethodOption => MRFieldAccessor::MethodOption, FieldAccessor::Opaque => MRFieldAccessor::Opaque, } } }
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. pub(crate) trait U8Slice<'a>: Sized { #[inline(always)] fn start_pointer(self) -> usize { self.slice_().as_ptr() as usize } #[inline(always)] fn end_pointer(self) -> usize { self.start_pointer() + self.len_() } /// RFC 4034, Appendix B. #[inline(always)] fn key_tag(self) -> KeyTag { #[inline(always)] fn accumulate(data: &[u8], length: usize) -> u32 { let mut accumulator: u32 = 0; for index in 0 .. length { let value = data.u16_as_u32(index); accumulator += value; } accumulator } let length = self.len_(); let slice = self.slice_(); let accumulator = if length % 2 == 0 { accumulate(slice, length) } else { let last = length - 1; accumulate(slice, last) + self.u8_as_u32(last) << 8 }; let accumulator = accumulator + ((accumulator >> 16) & 0xFFFF); KeyTag((accumulator & 0xFFFF) as u16) } #[inline(always)] fn cast<T>(self, offset: usize) -> &'a T { unsafe { & * (self.get_::<T>(offset)) } } #[inline(always)] fn u8(self, offset: usize) -> u8 { self.value::<u8>(offset) } #[inline(always)] fn u8_as_u32(self, offset: usize) -> u32 { self.u8(offset) as u32 } #[inline(always)] fn u8_as_usize(self, offset: usize) -> usize { self.u8(offset) as usize } #[inline(always)] fn u16(self, offset: usize) -> u16 { self.value::<[u8; size_of::<u16>()]>(offset).from_network_endian_to_native_endian() } #[inline(always)] fn u16_as_u32(self, offset: usize) -> u32 { self.u16(offset) as u32 } #[inline(always)] fn u16_as_usize(self, offset: usize) -> usize { self.u16(offset) as usize } #[inline(always)] fn u32(self, offset: usize) -> u32 { self.value::<[u8; size_of::<u32>()]>(offset).from_network_endian_to_native_endian() } #[inline(always)] fn u64(self, offset: usize) -> u64 { self.value::<[u8; size_of::<u64>()]>(offset).from_network_endian_to_native_endian() } #[inline(always)] fn u16_network_endian(self, offset: usize) -> u16 { self.value::<u16>(offset) } #[inline(always)] fn value<T: Copy>(self, offset: usize) -> T { unsafe { * self.get_::<T>(offset) } } #[doc(hidden)] #[inline(always)] fn get_<T>(self, offset: usize) -> *const T { (unsafe { self.slice_().get_unchecked(offset) }).unsafe_cast::<T>() } #[doc(hidden)] #[inline(always)] fn len_(self) -> usize { self.slice_().len() } #[doc(hidden)] fn slice_(self) -> &'a [u8]; } impl<'a> U8Slice<'a> for &'a [u8] { #[inline(always)] fn slice_(self) -> &'a [u8] { self } } impl<'a> U8Slice<'a> for &'a mut [u8] { #[inline(always)] fn slice_(self) -> &'a [u8] { self } } impl<'a> U8Slice<'a> for &'a [u8; 2] { #[inline(always)] fn slice_(self) -> &'a [u8] { &self[..] } } impl<'a> U8Slice<'a> for &'a mut [u8; 2] { #[inline(always)] fn slice_(self) -> &'a [u8] { &self[..] } } impl<'a> U8Slice<'a> for &'a [u8; 4] { #[inline(always)] fn slice_(self) -> &'a [u8] { &self[..] } } impl<'a> U8Slice<'a> for &'a mut [u8; 4] { #[inline(always)] fn slice_(self) -> &'a [u8] { &self[..] } }
/*! Native Windows GUI menu base. */ use winapi::shared::windef::{HMENU, HWND}; use winapi::shared::minwindef::UINT; use super::base_helper::{CUSTOM_ID_BEGIN, to_utf16}; use crate::controls::ControlHandle; use crate::{NwgError}; use std::{mem, ptr}; use std::sync::atomic::{AtomicU32, Ordering}; static MENU_ITEMS_ID: AtomicU32 = AtomicU32::new(CUSTOM_ID_BEGIN); /// Build a system menu pub unsafe fn build_hmenu_control(text: Option<String>, item: bool, separator: bool, popup: bool, hmenu: Option<HMENU>, hwnd: Option<HWND>) -> Result<ControlHandle, NwgError> { use winapi::um::winuser::{CreateMenu, CreatePopupMenu, GetMenu, SetMenu, DrawMenuBar, AppendMenuW}; use winapi::um::winuser::{MF_STRING, MF_POPUP}; if separator { if hmenu.is_none() { return Err(NwgError::menu_create("Separator without parent")); } return Ok(build_hmenu_separator(hmenu.unwrap())); } if popup { if hwnd.is_none() { return Err(NwgError::menu_create("Popup menu without parent")); } let menu = CreatePopupMenu(); if menu.is_null() { return Err(NwgError::menu_create("Popup menu creation failed")); } use_menu_command(menu); return Ok(ControlHandle::PopMenu(hwnd.unwrap(), menu)); } let mut parent_menu: HMENU = ptr::null_mut(); let mut menu: HMENU = ptr::null_mut(); let mut item_id = 0; let mut flags = MF_STRING; if !item { flags |= MF_POPUP; } let text = to_utf16(text.unwrap_or("".to_string()).as_ref()); if hwnd.is_some() { let hwnd = hwnd.unwrap(); let mut menubar = GetMenu(hwnd); if menubar.is_null() { // If the window do not have a menu bar, create one menubar = CreateMenu(); use_menu_command(menubar); SetMenu(hwnd, menubar); } if item { menu = menubar; item_id = MENU_ITEMS_ID.fetch_add(1, Ordering::SeqCst); AppendMenuW(menubar, flags, item_id as usize, text.as_ptr()); } else { parent_menu = menubar; menu = CreateMenu(); if menu.is_null() { return Err(NwgError::menu_create("Menu without parent")); } use_menu_command(menu); AppendMenuW(menubar, flags, mem::transmute(menu), text.as_ptr()); } // Draw the menu bar to make sure the changes are visible DrawMenuBar(hwnd); } else if hmenu.is_some() { let parent = hmenu.unwrap(); if item { menu = parent; item_id = MENU_ITEMS_ID.fetch_add(1, Ordering::SeqCst); AppendMenuW(parent, flags, item_id as usize, text.as_ptr()); } else { parent_menu = parent; menu = CreateMenu(); if menu.is_null() { return Err(NwgError::menu_create("Menu without parent")); } use_menu_command(menu); AppendMenuW(parent, flags, mem::transmute(menu), text.as_ptr()); } } if item { Ok(ControlHandle::MenuItem(menu, item_id)) } else { Ok(ControlHandle::Menu(parent_menu, menu)) } } /** Enable or disable a menuitem at the selected position or using the selected ID. If the position is None and id is None, the last item is selected. */ pub unsafe fn enable_menuitem(h: HMENU, pos: Option<UINT>, id: Option<UINT>, enabled: bool) { use winapi::um::winuser::{MENUITEMINFOW, MIIM_STATE, MFS_DISABLED, MFS_ENABLED}; use winapi::um::winuser::{SetMenuItemInfoW, GetMenuItemCount}; use winapi::shared::minwindef::BOOL; let use_position = id.is_none(); let choice = if use_position { pos } else { id }; let value = match choice { Some(p) => p, None => (GetMenuItemCount(h) - 1) as u32 }; let state = match enabled { true => MFS_ENABLED, false => MFS_DISABLED }; let mut info = MENUITEMINFOW { cbSize: mem::size_of::<MENUITEMINFOW>() as UINT, fMask: MIIM_STATE, fType: 0, fState: state, wID: 0, hSubMenu: ptr::null_mut(), hbmpChecked: ptr::null_mut(), hbmpUnchecked: ptr::null_mut(), dwItemData: 0, dwTypeData: ptr::null_mut(), cch: 0, hbmpItem: ptr::null_mut() }; SetMenuItemInfoW(h, value, use_position as BOOL, &mut info); } /** Return the state of a menuitem. Panic if both pos and id are None. */ pub unsafe fn is_menuitem_enabled(h: HMENU, pos: Option<UINT>, id: Option<UINT>) -> bool { use winapi::um::winuser::{MENUITEMINFOW, MIIM_STATE, MFS_DISABLED}; use winapi::um::winuser::GetMenuItemInfoW; use winapi::shared::minwindef::BOOL; if id.is_none() && pos.is_none() { panic!("Both pos and id are None"); } let use_position = id.is_none(); let choice = if use_position { pos } else { id }; let value = match choice { Some(p) => p, None => unreachable!() }; let mut info = MENUITEMINFOW { cbSize: mem::size_of::<MENUITEMINFOW>() as UINT, fMask: MIIM_STATE, fType: 0, fState: 0, wID: 0, hSubMenu: ptr::null_mut(), hbmpChecked: ptr::null_mut(), hbmpUnchecked: ptr::null_mut(), dwItemData: 0, dwTypeData: ptr::null_mut(), cch: 0, hbmpItem: ptr::null_mut() }; GetMenuItemInfoW(h, value, use_position as BOOL, &mut info); (info.fState & MFS_DISABLED) != MFS_DISABLED } /// Set the state of a menuitem pub unsafe fn enable_menu(parent_menu: HMENU, menu: HMENU, e: bool) { let menu_index = menu_index_in_parent(parent_menu, menu); enable_menuitem(parent_menu, Some(menu_index), None, e); } /// Return the state of a menu. pub unsafe fn is_menu_enabled(parent_menu: HMENU, menu: HMENU) -> bool { let menu_index = menu_index_in_parent(parent_menu, menu); is_menuitem_enabled(parent_menu, Some(menu_index), None) } pub unsafe fn check_menu_item(parent_menu: HMENU, id: u32, check: bool) { use winapi::um::winuser::{CheckMenuItem, MF_BYCOMMAND, MF_CHECKED, MF_UNCHECKED}; let check = match check { true => MF_CHECKED, false => MF_UNCHECKED }; CheckMenuItem(parent_menu, id, MF_BYCOMMAND | check); } pub unsafe fn menu_item_checked(parent_menu: HMENU, id: u32) -> bool { use winapi::um::winuser::{GetMenuState, MF_BYCOMMAND, MF_CHECKED}; GetMenuState(parent_menu, id, MF_BYCOMMAND) & MF_CHECKED == MF_CHECKED } unsafe fn build_hmenu_separator(menu: HMENU) -> ControlHandle { use winapi::um::winuser::{GetMenuItemCount, SetMenuItemInfoW, AppendMenuW}; use winapi::um::winuser::{MENUITEMINFOW, MF_SEPARATOR, MIIM_ID}; use winapi::shared::minwindef::{BOOL}; let item_id = MENU_ITEMS_ID.fetch_add(1, Ordering::SeqCst); // MF_SEPARATOR ignore the lpNewItem and uIDNewItem parameters, so they must be set using SetMenuItemInfo AppendMenuW(menu, MF_SEPARATOR, 0, ptr::null()); // Set the unique id of the separator let pos = GetMenuItemCount(menu) - 1; let mut info = MENUITEMINFOW { cbSize: mem::size_of::<MENUITEMINFOW>() as UINT, fMask: MIIM_ID, fType: 0, fState: 0, wID: item_id, hSubMenu: ptr::null_mut(), hbmpChecked: ptr::null_mut(), hbmpUnchecked: ptr::null_mut(), dwItemData: 0, dwTypeData: ptr::null_mut(), cch: 0, hbmpItem: ptr::null_mut() }; SetMenuItemInfoW(menu, pos as UINT, true as BOOL, &mut info); ControlHandle::MenuItem(menu, item_id) } /** Configure the menu to use a WM_MENUCOMMAND instead of a WM_COMMAND when its action are triggered. Required in order to allow nwg to dispatch the events correctly */ unsafe fn use_menu_command(h: HMENU) { use winapi::um::winuser::{MENUINFO, MNS_NOTIFYBYPOS, MIM_STYLE, SetMenuInfo}; use winapi::shared::minwindef::DWORD; let mut info = MENUINFO { cbSize: mem::size_of::<MENUINFO>() as DWORD, fMask: MIM_STYLE, dwStyle: MNS_NOTIFYBYPOS, cyMax: 0, hbrBack: ptr::null_mut(), dwContextHelpID: 0, dwMenuData: 0 }; SetMenuInfo(h, &mut info); } /** Return the index of a children menu/menuitem in a parent menu. Panic if the menu is not found in the parent. */ pub unsafe fn menu_index_in_parent(parent: HMENU, menu: HMENU) -> UINT { use winapi::um::winuser::{GetMenuItemCount, GetSubMenu}; let children_count = GetMenuItemCount(parent); let mut sub_menu: HMENU; for i in 0..children_count { sub_menu = GetSubMenu(parent, i as i32); if sub_menu.is_null() { continue; } else if sub_menu == menu { return i as UINT; } } panic!("Menu/MenuItem not found in parent!") }
//! Definition of response structure. use std::cmp; use std::fmt; use std::ops::Index; /// Representation of a header. /// /// For convenience, the header value is trimmed at parsing time (optional spaces are /// removed from the beginning and the end of the value). #[derive(PartialEq, Eq, Debug)] pub struct Header { name: String, value: Option<String>, } pub fn new_header<K: Into<String>, V: Into<String>>(name: K, value: V) -> Header { Header { name: name.into(), value: Some(value.into()), } } impl fmt::Display for Header { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}: {}", self.name, self.value.as_ref().map(|s| s.as_str()).unwrap_or("") ) } } /// Representation of an HTTP response. #[derive(PartialEq, Eq, Debug)] pub struct HttpResponse { version: (u32, u32), status: u32, headers: Vec<Header>, body: Vec<u8>, } pub fn new_response(version: (u32, u32), status: u32, headers: Vec<Header>) -> HttpResponse { HttpResponse { version: version, status: status, headers: headers, body: Vec::new(), } } impl HttpResponse { /// Returns the status code of this response. pub fn status(&self) -> u32 { self.status } /// Returns true if this response has a header with the given `name` /// that matches the expected `value`. /// /// Comparisons are made in a case-insensitive manner. pub fn is<K: AsRef<str>, V: AsRef<str>>(&self, name: K, expected: V) -> bool { self[name.as_ref()].as_ref().map_or(false, |candidate| { candidate.eq_ignore_ascii_case(expected.as_ref()) }) } /// Returns true if this response has a header with the given `name` /// that has a comma-separated list of values, and one of those values /// matches the `expected` value. /// /// Comparisons are made in a case-insensitive manner. Each value of the comma-separated /// list is trimmed before comparison. pub fn has<K: AsRef<str>, V: AsRef<str>>(&self, name: K, expected: V) -> bool { self[name.as_ref()].as_ref().map_or(false, |candidate| { candidate.split(',').any(|item| { item.trim().eq_ignore_ascii_case(expected.as_ref()) }) }) } /// Returns true if this response has a 1xx Informational status code. pub fn is_informational(&self) -> bool { self.status >= 100 && self.status < 200 } /// Returns true if this response has a 2xx Successful status code. pub fn is_successful(&self) -> bool { self.status >= 200 && self.status < 300 } /// Returns true if this response has a 3xx Redirection status code. pub fn is_redirection(&self) -> bool { self.status >= 300 && self.status < 400 } /// Returns true if this response has a 4xx Client Error status code. pub fn is_client_error(&self) -> bool { self.status >= 400 && self.status < 500 } /// Returns true if this response isisis a 5xx Server Error status code. pub fn is_server_error(&self) -> bool { self.status >= 500 && self.status < 600 } /// Returns response body as a byte slice pub fn get_body(&self) -> &[u8] { &self.body } } /// Appends data to this response's body. pub fn append<A: AsRef<[u8]>>(res: &mut HttpResponse, buf: A) { res.body.extend_from_slice(buf.as_ref()); } const NONE: &'static Option<String> = &None; impl<'a> Index<&'a str> for HttpResponse { type Output = Option<String>; /// Retrieve the header with the given name. /// /// Comparison is made in a case-insensitive manner. fn index(&self, name: &str) -> &Option<String> { self.headers .iter() .find(|header| name.eq_ignore_ascii_case(&header.name)) .map(|header| &header.value) .unwrap_or(NONE) } } impl fmt::Display for HttpResponse { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!( f, "HTTP/{}.{} {}", self.version.0, self.version.1, self.status )?; for header in &self.headers { writeln!(f, "{}", header)?; } write!(f, "body: {} bytes = [", self.body.len())?; for byte in &self.body[0..cmp::min(self.body.len(), 30)] { write!(f, "{}", *byte as char)?; } writeln!(f, "...]") } }
use core::JsRuntime; fn main() { let mut runtime = JsRuntime::new(Default::default()); runtime .execute( "<init>", r#" const module = new WebAssembly.Module(new Uint8Array([ 0x00, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00, 0x01, 0x07, 0x01, 0x60, 0x02, 0x7f, 0x7f, 0x01, 0x7f, 0x03, 0x02, 0x01, 0x00, 0x07, 0x07, 0x01, 0x03, 0x61, 0x64, 0x64, 0x00, 0x00, 0x0a, 0x09, 0x01, 0x07, 0x00, 0x20, 0x00, 0x20, 0x01, 0x6a, 0x0b ])); const instance = new WebAssembly.Instance(module); const a = instance.exports.add(2, 2); const b = instance.exports.add(4, 4); const c = instance.exports.add(a, b); runtime.print("'C' calculated from WebAssembly is: " + c); "#, ) .unwrap(); }
#[macro_use] mod macros; use itertools::EitherOrBoth; use itertools::Itertools; #[cfg(feature = "regex")] pub use regex; use std::collections::{BTreeMap, HashMap}; use std::fmt; use std::iter::FromIterator; use std::ops::{Deref, DerefMut}; fn segmented(str: &str) -> impl Iterator<Item = &str> { str.split('/').filter(|seg| !seg.is_empty()) } pub struct PathMatch(HashMap<&'static str, String>); impl Default for PathMatch { fn default() -> Self { Self(HashMap::new()) } } impl Deref for PathMatch { type Target = HashMap<&'static str, String>; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for PathMatch { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } #[derive(Debug, Clone)] pub struct Path(pub Vec<PathSegment>); impl Path { pub fn matches(&self, path: &str) -> Option<PathMatch> { let mut params = PathMatch::default(); for el in self.iter().zip_longest(segmented(path)) { match el { EitherOrBoth::Both(expected, actual) => { if !expected.matches(actual) { return None; } if let PathSegment::Dynamic(param) = expected { params.insert(param.name, actual.to_owned()); } } _ => return None, } } Some(params) } pub fn replace( &self, params: &BTreeMap<&'static str, &'static str>, ) -> Option<PathAndQuery> { let mut segments = vec![]; let mut params = params.clone(); segments.push(""); // Workaround for leading slash for segment in self.iter() { match segment { PathSegment::Literal(str) => segments.push(str), PathSegment::Dynamic(param) => { let value = params.remove(param.name)?; segments.push(&value) } } } Some(PathAndQuery::new(segments).with_query(params)) } } impl Deref for Path { type Target = Vec<PathSegment>; fn deref(&self) -> &Self::Target { &self.0 } } impl fmt::Display for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "/{}", self.iter().format_with("/", |segment, f| f(segment)) ) } } impl From<&'static str> for Path { fn from(str: &'static str) -> Self { Self(segmented(str).map(PathSegment::Literal).collect()) } } #[derive(Debug, Clone)] pub enum PathToken { Any, #[cfg(feature = "regex")] Regex(regex::Regex), } impl PathToken { pub fn matches(&self, path: &str) -> bool { match self { Self::Any => true, #[cfg(feature = "regex")] Self::Regex(re) => re.is_match(path), } } } impl fmt::Display for PathToken { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::Any => write!(f, "*"), #[cfg(feature = "regex")] Self::Regex(re) => write!(f, "{}", re), } } } #[derive(Debug, Clone)] pub struct PathParam { name: &'static str, token: PathToken, } impl PathParam { pub fn new(name: &'static str, token: PathToken) -> Self { Self { name, token } } } impl fmt::Display for PathParam { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, ":{}", self.name) } } #[derive(Debug, Clone)] pub enum PathSegment { Literal(&'static str), Dynamic(PathParam), } impl PathSegment { fn matches(&self, path: &str) -> bool { match self { Self::Literal(str) => str == &path, Self::Dynamic(param) => param.token.matches(path), } } } impl fmt::Display for PathSegment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::Literal(str) => write!(f, "{}", str), Self::Dynamic(param) => write!(f, "{}", param), } } } #[derive(Debug, Clone)] pub struct PathAndQuery<'a> { segments: Vec<&'a str>, query: Vec<(&'static str, &'a str)>, } impl<'a> PathAndQuery<'a> { pub fn new(segments: Vec<&'a str>) -> Self { Self { segments, query: vec![], } } pub fn with_query( mut self, query: impl IntoIterator<Item = (&'static str, &'a str)>, ) -> Self { self.query = Vec::from_iter(query); self } } impl<'a> fmt::Display for PathAndQuery<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}", self.segments .iter() .format_with("/", |segment, f| f(segment)) )?; if !self.query.is_empty() { write!( f, "?{}", self.query.iter().format_with("&", |(k, v), f| f( &format_args!("{}={}", k, v) )) )?; } Ok(()) } }
#[no_mangle] pub fn sum(addr: i32, size: i32) -> i64 { let v_size = 1000000000; let mut v = vec![0u8]; for _ in 0..v_size { v.push(0); } let mut u = vec![0u8]; for _ in 0..v_size { u.push(0); } let mut result = 0i64; let addr = addr as *mut u8; let input = unsafe { std::slice::from_raw_parts(addr, size as _) }; for i in 0..(size as usize) { result += (input[i]) as i64; } // test mem allocation on stack result } #[no_mangle] pub fn add(adder1: i32, adder2: i32) -> i32 { adder1 + adder2 }
#[derive(PartialEq, Eq, Copy, Clone, Debug)] pub enum Region { TL, TR, BL, BR, } #[derive(Copy, Clone, Debug)] pub struct Rect { pub cx: f32, pub cy: f32, pub width: f32, pub height: f32, } impl Rect { fn quad(self, x: f32, y: f32) -> Region { let is_left = x < self.cx; let is_bottom = y < self.cy; match (is_bottom, is_left) { (true, true) => Region::BL, (true, false) => Region::BR, (false, true) => Region::TL, (false, false) => Region::TR, } } fn top_left(self) -> Rect { Rect { cx: self.cx - self.width / 4., cy: self.cy + self.height / 4., width: self.width / 2., height: self.height / 2., } } fn top_right(self) -> Rect { Rect { cx: self.cx + self.width / 4., cy: self.cy + self.height / 4., width: self.width / 2., height: self.height / 2., } } fn bottom_left(self) -> Rect { Rect { cx: self.cx - self.width / 4., cy: self.cy - self.height / 4., width: self.width / 2., height: self.height / 2., } } fn bottom_right(self) -> Rect { Rect { cx: self.cx + self.width / 4., cy: self.cy - self.height / 4., width: self.width / 2., height: self.height / 2., } } pub fn sub_rect(self, region: Region) -> Rect { match region { Region::TL => self.top_left(), Region::TR => self.top_right(), Region::BL => self.bottom_left(), Region::BR => self.bottom_right(), } } pub fn left(self) -> f32 { self.cx - self.width / 2. } pub fn bottom(self) -> f32 { self.cy - self.height / 2. } pub fn width(self) -> f32 { self.width } pub fn height(self) -> f32 { self.height } } #[derive(PartialEq, Eq, Copy, Clone, Debug)] pub struct NodeId { index: usize, } impl NodeId { pub fn new(index: usize) -> NodeId { NodeId { index } } } #[derive(Copy, Clone, Debug)] pub enum Element { Empty, Leaf { x: f32, y: f32, n: usize, value: f32, }, Node { node_id: NodeId, }, } #[derive(Clone, Debug)] pub struct Node<T> { top_left: Box<Element>, top_right: Box<Element>, bottom_left: Box<Element>, bottom_right: Box<Element>, rect: Rect, data: T, } #[derive(Clone, Debug)] pub struct Quadtree<T> { root: NodeId, nodes: Vec<Node<T>>, } impl<T: Default> Node<T> { pub fn new(rect: Rect) -> Node<T> { Node { top_left: Box::new(Element::Empty), top_right: Box::new(Element::Empty), bottom_left: Box::new(Element::Empty), bottom_right: Box::new(Element::Empty), rect: rect, data: T::default(), } } pub fn child(&self, region: Region) -> Element { match region { Region::TL => *self.top_left, Region::TR => *self.top_right, Region::BL => *self.bottom_left, Region::BR => *self.bottom_right, } } pub fn insert(&mut self, region: Region, element: Element) { match region { Region::TL => self.top_left = Box::new(element), Region::TR => self.top_right = Box::new(element), Region::BL => self.bottom_left = Box::new(element), Region::BR => self.bottom_right = Box::new(element), } } } impl<T: Default> Quadtree<T> { pub fn new(rect: Rect) -> Quadtree<T> { let mut nodes = Vec::new(); nodes.push(Node::<T>::new(rect)); Quadtree { root: NodeId { index: 0 }, nodes: nodes, } } pub fn root(&self) -> NodeId { self.root } pub fn find(&self, u: NodeId, x: f32, y: f32) -> (NodeId, Region) { let node = &self.nodes[u.index]; let region = node.rect.quad(x, y); let child = node.child(region); match child { Element::Node { node_id: v } => self.find(v, x, y), _ => (u, region), } } pub fn insert(&mut self, u: NodeId, x: f32, y: f32, value: f32) -> (NodeId, Region) { let (v, region) = self.find(u, x, y); match self.nodes[v.index].child(region) { Element::Empty => self.insert_to_empty(v, region, x, y, value), Element::Leaf { x: x0, y: y0, n, value, } => { if x == x && y == y0 { self.increment_leaf(v, region, x, y, n, value) } else { self.insert_to_leaf(v, region, x, y, x0, y0, n, value) } } _ => { panic!("unexpected"); } } } fn insert_to_empty( &mut self, u: NodeId, region: Region, x: f32, y: f32, value: f32, ) -> (NodeId, Region) { let node = self.nodes.get_mut(u.index).unwrap(); node.insert( region, Element::Leaf { x: x, y: y, n: 1, value, }, ); (u, region) } fn insert_to_leaf( &mut self, u: NodeId, region: Region, x: f32, y: f32, x0: f32, y0: f32, n: usize, value: f32, ) -> (NodeId, Region) { let index = self.nodes.len(); let rect = self.nodes[u.index].rect.sub_rect(region); self.nodes.push(Node::new(rect)); let new_node = NodeId { index: index }; self.nodes[u.index].insert(region, Element::Node { node_id: new_node }); let region = self.nodes[new_node.index].rect.quad(x0, y0); self.nodes[new_node.index].insert( region, Element::Leaf { x: x0, y: y0, n: n, value, }, ); self.insert(new_node, x, y, value) } fn increment_leaf( &mut self, u: NodeId, region: Region, x: f32, y: f32, n: usize, value: f32, ) -> (NodeId, Region) { self.nodes[u.index].insert( region, Element::Leaf { x: x, y: y, n: n + 1, value: value, }, ); (u, region) } pub fn rect(&self, u: NodeId) -> Rect { self.nodes[u.index].rect } pub fn element(&self, u: NodeId, region: Region) -> Element { self.nodes[u.index].child(region) } pub fn elements(&self, u: NodeId) -> [(Box<Element>, Region); 4] { let node = &self.nodes[u.index]; [ (node.top_left.clone(), Region::TL), (node.top_right.clone(), Region::TR), (node.bottom_left.clone(), Region::BL), (node.bottom_right.clone(), Region::BR), ] } pub fn data(&self, u: NodeId) -> &T { &self.nodes[u.index].data } pub fn data_mut(&mut self, u: NodeId) -> &mut T { &mut self.nodes[u.index].data } } #[cfg(test)] mod tests { use super::{Element, Quadtree, Rect, Region}; fn make_tree() -> Quadtree<()> { Quadtree::new(Rect { cx: 0., cy: 0., width: 100., height: 100., }) } #[test] fn test_find() { let tree = make_tree(); let root = tree.root(); let (node_id, region) = tree.find(root, 10., 10.); assert!(node_id.index == 0); assert!(region == Region::TR); } #[test] fn test_insert() { let mut tree = make_tree(); let root = tree.root(); let (node_id, region) = tree.insert(root, 10., 10., 0.); assert!(node_id.index == 0); assert!(region == Region::TR); let (node_id, region) = tree.insert(root, 20., 40., 0.); assert!(node_id.index == 1); assert!(region == Region::TL); let (node_id, region) = tree.insert(root, 10., 30., 0.); assert!(node_id.index == 2); assert!(region == Region::BL); } #[test] fn test_elements() { let tree = make_tree(); let root = tree.root(); for &(ref e, _) in tree.elements(root).iter() { assert!(match **e { Element::Empty => true, _ => false, }); } } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::alloc::Layout; use std::cmp::Ordering; use std::fmt; use std::marker::PhantomData; use std::ops::Sub; use std::sync::Arc; use common_arrow::arrow::bitmap::Bitmap; use common_exception::ErrorCode; use common_exception::Result; use common_expression::type_check::check_number; use common_expression::types::number::Number; use common_expression::types::number::UInt8Type; use common_expression::types::ArgType; use common_expression::types::BooleanType; use common_expression::types::DataType; use common_expression::types::DateType; use common_expression::types::NumberDataType; use common_expression::types::NumberType; use common_expression::types::TimestampType; use common_expression::types::ValueType; use common_expression::with_integer_mapped_type; use common_expression::Column; use common_expression::ColumnBuilder; use common_expression::Expr; use common_expression::FunctionContext; use common_expression::Scalar; use common_io::prelude::*; use num_traits::AsPrimitive; use serde::de::DeserializeOwned; use serde::Deserialize; use serde::Serialize; use super::AggregateFunctionRef; use super::AggregateNullVariadicAdaptor; use super::StateAddr; use crate::aggregates::aggregate_function_factory::AggregateFunctionDescription; use crate::aggregates::assert_unary_params; use crate::aggregates::assert_variadic_arguments; use crate::aggregates::AggregateFunction; use crate::BUILTIN_FUNCTIONS; #[derive(Serialize, Deserialize)] struct AggregateWindowFunnelState<T> { #[serde(bound(deserialize = "T: DeserializeOwned"))] pub events_list: Vec<(T, u8)>, pub sorted: bool, } impl<T> AggregateWindowFunnelState<T> where T: Ord + Sub<Output = T> + AsPrimitive<u64> + Serialize + DeserializeOwned + Clone + Send + Sync { pub fn new() -> Self { Self { events_list: Vec::new(), sorted: true, } } #[inline(always)] fn add(&mut self, timestamp: T, event: u8) { if self.sorted && !self.events_list.is_empty() { let last = self.events_list.last().unwrap(); if last.0 == timestamp { self.sorted = last.1 <= event; } else { self.sorted = last.0 <= timestamp; } } self.events_list.push((timestamp, event)); } #[inline(always)] fn merge(&mut self, other: &mut Self) { if other.events_list.is_empty() { return; } let l1 = self.events_list.len(); let l2 = other.events_list.len(); self.sort(); other.sort(); let mut merged = Vec::with_capacity(self.events_list.len() + other.events_list.len()); let cmp = |a: &(T, u8), b: &(T, u8)| { let ord = a.0.cmp(&b.0); if ord == Ordering::Equal { a.1.cmp(&b.1) } else { ord } }; { let mut i = 0; let mut j = 0; while i < l1 && j < l2 { if cmp(&self.events_list[i], &other.events_list[j]) == Ordering::Less { merged.push(self.events_list[i]); i += 1; } else { merged.push(other.events_list[j]); j += 1; } } if i < l1 { merged.extend(self.events_list[i..].iter()); } if j < l2 { merged.extend(other.events_list[j..].iter()); } } self.events_list = merged; } #[inline(always)] fn sort(&mut self) { let cmp = |a: &(T, u8), b: &(T, u8)| { let ord = a.0.cmp(&b.0); if ord == Ordering::Equal { a.1.cmp(&b.1) } else { ord } }; if !self.sorted { self.events_list.sort_by(cmp); } } fn serialize(&self, writer: &mut Vec<u8>) -> Result<()> { serialize_into_buf(writer, self) } fn deserialize(&mut self, reader: &mut &[u8]) -> Result<()> { *self = deserialize_from_slice(reader)?; Ok(()) } } #[derive(Clone)] pub struct AggregateWindowFunnelFunction<T> { display_name: String, _arguments: Vec<DataType>, event_size: usize, window: u64, t: PhantomData<T>, } impl<T> AggregateFunction for AggregateWindowFunnelFunction<T> where T: ArgType + Send + Sync, T::Scalar: Number + Ord + Sub<Output = T::Scalar> + AsPrimitive<u64> + Clone + Serialize + DeserializeOwned + 'static, { fn name(&self) -> &str { "AggregateWindowFunnelFunction" } fn return_type(&self) -> Result<DataType> { Ok(DataType::Number(NumberDataType::UInt8)) } fn init_state(&self, place: StateAddr) { place.write(AggregateWindowFunnelState::<T::Scalar>::new); } fn state_layout(&self) -> Layout { Layout::new::<AggregateWindowFunnelState<T::Scalar>>() } fn accumulate( &self, place: StateAddr, columns: &[Column], validity: Option<&Bitmap>, _input_rows: usize, ) -> Result<()> { let mut dcolumns = Vec::with_capacity(self.event_size); for i in 0..self.event_size { let dcolumn = BooleanType::try_downcast_column(&columns[i + 1]).unwrap(); dcolumns.push(dcolumn); } let tcolumn = T::try_downcast_column(&columns[0]).unwrap(); let state = place.get::<AggregateWindowFunnelState<T::Scalar>>(); match validity { Some(bitmap) => { for ((row, timestamp), valid) in T::iter_column(&tcolumn).enumerate().zip(bitmap.iter()) { if valid { let timestamp = T::to_owned_scalar(timestamp); for (i, filter) in dcolumns.iter().enumerate() { if filter.get_bit(row) { state.add(timestamp, (i + 1) as u8); } } } } } None => { for (row, timestamp) in T::iter_column(&tcolumn).enumerate() { let timestamp = T::to_owned_scalar(timestamp); for (i, filter) in dcolumns.iter().enumerate() { if filter.get_bit(row) { state.add(timestamp, (i + 1) as u8); } } } } } Ok(()) } fn accumulate_keys( &self, places: &[StateAddr], offset: usize, columns: &[Column], _input_rows: usize, ) -> Result<()> { let mut dcolumns = Vec::with_capacity(self.event_size); for i in 0..self.event_size { let dcolumn = BooleanType::try_downcast_column(&columns[i + 1]).unwrap(); dcolumns.push(dcolumn); } let tcolumn = T::try_downcast_column(&columns[0]).unwrap(); for ((row, timestamp), place) in T::iter_column(&tcolumn).enumerate().zip(places.iter()) { let state = (place.next(offset)).get::<AggregateWindowFunnelState<T::Scalar>>(); let timestamp = T::to_owned_scalar(timestamp); for (i, filter) in dcolumns.iter().enumerate() { if filter.get_bit(row) { state.add(timestamp, (i + 1) as u8); } } } Ok(()) } fn accumulate_row(&self, place: StateAddr, columns: &[Column], row: usize) -> Result<()> { let tcolumn = T::try_downcast_column(&columns[0]).unwrap(); let timestamp = unsafe { T::index_column_unchecked(&tcolumn, row) }; let timestamp = T::to_owned_scalar(timestamp); let state = place.get::<AggregateWindowFunnelState<T::Scalar>>(); for i in 0..self.event_size { let dcolumn = BooleanType::try_downcast_column(&columns[i + 1]).unwrap(); if dcolumn.get_bit(row) { state.add(timestamp, (i + 1) as u8); } } Ok(()) } fn serialize(&self, place: StateAddr, writer: &mut Vec<u8>) -> Result<()> { let state = place.get::<AggregateWindowFunnelState<T::Scalar>>(); AggregateWindowFunnelState::<T::Scalar>::serialize(state, writer) } fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> { let state = place.get::<AggregateWindowFunnelState<T::Scalar>>(); state.deserialize(reader) } fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> { let rhs = rhs.get::<AggregateWindowFunnelState<T::Scalar>>(); let state = place.get::<AggregateWindowFunnelState<T::Scalar>>(); state.merge(rhs); Ok(()) } #[allow(unused_mut)] fn merge_result(&self, place: StateAddr, builder: &mut ColumnBuilder) -> Result<()> { let builder = UInt8Type::try_downcast_builder(builder).unwrap(); let result = self.get_event_level(place); builder.push(result); Ok(()) } fn need_manual_drop_state(&self) -> bool { true } unsafe fn drop_state(&self, place: StateAddr) { let state = place.get::<AggregateWindowFunnelState<T::Scalar>>(); std::ptr::drop_in_place(state); } fn get_own_null_adaptor( &self, _nested_function: AggregateFunctionRef, _params: Vec<Scalar>, _arguments: Vec<DataType>, ) -> Result<Option<AggregateFunctionRef>> { Ok(Some(AggregateNullVariadicAdaptor::<false>::create( Arc::new(self.clone()), ))) } } impl<T> fmt::Display for AggregateWindowFunnelFunction<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.display_name) } } impl<T> AggregateWindowFunnelFunction<T> where T: ArgType + Send + Sync, T::Scalar: Number + Ord + Sub<Output = T::Scalar> + AsPrimitive<u64> + Clone + Serialize + DeserializeOwned + 'static, { pub fn try_create( display_name: &str, params: Vec<Scalar>, arguments: Vec<DataType>, ) -> Result<AggregateFunctionRef> { let event_size = arguments.len() - 1; let window = check_number( None, FunctionContext::default(), &Expr::<usize>::Cast { span: None, is_try: false, expr: Box::new(Expr::Constant { span: None, scalar: params[0].clone(), data_type: params[0].as_ref().infer_data_type(), }), dest_type: DataType::Number(NumberDataType::UInt64), }, &BUILTIN_FUNCTIONS, )?; Ok(Arc::new(Self { display_name: display_name.to_owned(), _arguments: arguments, event_size, window, t: PhantomData, })) } /// Loop through the entire events_list, update the event timestamp value /// The level path must be 1---2---3---...---check_events_size, find the max event level that satisfied the path in the sliding window. /// If found, returns the max event level, else return 0. /// The Algorithm complexity is O(n). fn get_event_level(&self, place: StateAddr) -> u8 { let state = place.get::<AggregateWindowFunnelState<T::Scalar>>(); if state.events_list.is_empty() { return 0; } if self.event_size == 1 { return 1; } state.sort(); let mut events_timestamp: Vec<Option<T::Scalar>> = Vec::with_capacity(self.event_size); for _i in 0..self.event_size { events_timestamp.push(None); } for (timestamp, event) in state.events_list.iter() { let event_idx = (event - 1) as usize; if event_idx == 0 { events_timestamp[event_idx] = Some(timestamp.to_owned()); } else if let Some(v) = events_timestamp[event_idx - 1] { // we already sort the events_list let window: u64 = timestamp.sub(v).as_(); if window <= self.window { events_timestamp[event_idx] = events_timestamp[event_idx - 1]; } } } for i in (0..self.event_size).rev() { if events_timestamp[i].is_some() { return i as u8 + 1; } } 0 } } pub fn try_create_aggregate_window_funnel_function( display_name: &str, params: Vec<Scalar>, arguments: Vec<DataType>, ) -> Result<AggregateFunctionRef> { assert_unary_params(display_name, params.len())?; assert_variadic_arguments(display_name, arguments.len(), (1, 32))?; for (idx, arg) in arguments[1..].iter().enumerate() { if !arg.is_boolean() { return Err(ErrorCode::BadDataValueType(format!( "Illegal type of the argument {:?} in AggregateWindowFunnelFunction, must be boolean, got: {:?}", idx + 1, arg ))); } } with_integer_mapped_type!(|NUM_TYPE| match &arguments[0] { DataType::Number(NumberDataType::NUM_TYPE) => AggregateWindowFunnelFunction::< NumberType<NUM_TYPE>, >::try_create( display_name, params, arguments ), DataType::Date => AggregateWindowFunnelFunction::<DateType>::try_create(display_name, params, arguments), DataType::Timestamp => AggregateWindowFunnelFunction::<TimestampType>::try_create( display_name, params, arguments ), _ => Err(ErrorCode::BadDataValueType(format!( "AggregateWindowFunnelFunction does not support type '{:?}'", arguments[0] ))), }) } pub fn aggregate_window_funnel_function_desc() -> AggregateFunctionDescription { AggregateFunctionDescription::creator(Box::new(try_create_aggregate_window_funnel_function)) }
use std::env; use std::fmt; use std::io::BufReader; use std::io::BufRead; use std::fs::File; use std::ops::Add; use std::ops::Div; use std::collections::HashMap; use std::collections::HashSet; #[derive(Copy,Clone,Hash,Eq,PartialEq,PartialOrd,Ord)] struct Point { x: u32, y: u32, } fn manhattan_distance(p: &Point, q: &Point) -> u32 { let delta_x = if p.x >= q.x { p.x - q.x } else { q.x - p.x }; let delta_y = if p.y >= q.y { p.y - q.y } else { q.y - p.y }; delta_x + delta_y } impl fmt::Display for Point { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "({},{})", self.x, self.y) } } impl Add<u32> for Point { type Output = Point; fn add(self, other: u32) -> Point { Point {x: self.x + other, y: self.y + other} } } impl<'a> Add<&'a Point> for Point { type Output = Point; fn add(self, other: &'a Point) -> Point { Point { x: self.x + other.x, y: self.y + other.y, } } } impl Div<u32> for Point { type Output = Point; fn div(self, other: u32) -> Point { Point {x: self.x / other, y: self.y / other} } } impl<'a> Div<&'a Point> for Point { type Output = Point; fn div(self, other: &'a Point) -> Point { Point { x: self.x / other.x, y: self.y / other.y, } } } impl std::str::FromStr for Point { type Err = std::num::ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let coords: Vec<&str> = s.trim_matches(|p| p == '(' || p == ')' ) .split(',') .map(|x| x.trim()) .collect(); let x_fromstr = coords[0].parse::<u32>()?; let y_fromstr = coords[1].parse::<u32>()?; Ok(Point { x: x_fromstr, y: y_fromstr }) } } fn do_part_one(points: Vec<Point>) { let min_x = 0; let min_y = 0; let max_x = points.iter().map(|p| p.x).max().unwrap(); let max_y = points.iter().map(|p| p.y).max().unwrap(); let mut infinite = HashSet::new(); let mut areas = HashMap::new(); for i in min_x..=max_x { for j in min_y..=max_y { let q = Point{x: i, y: j}; let distances: Vec<u32> = points.iter(). map(|p| manhattan_distance(&p, &q)).collect(); let min_distance = distances.iter().min().unwrap(); let closest: Vec<Point> = points.iter().zip(distances.iter()). filter(|(_,d)| *d == min_distance). map(|(p,_)| *p). collect(); if closest.len() == 1 { let p = closest[0].clone(); let area = areas.entry(p).or_insert(0); *area += 1; if i == min_x || i == max_x || j == min_y || j == max_y { infinite.insert(p); } } } } for p in infinite.iter() { areas.remove(p); } let max_p = areas.keys().max_by_key(|p| areas.get(*p).unwrap()).unwrap(); let max_area = areas.get(max_p).unwrap(); println!("{}: {}", max_p, max_area); } fn do_part_two(points: Vec<Point>, max_dist: u32) { let min_x = 0; let min_y = 0; let max_x = points.iter().map(|p| p.x).max().unwrap(); let max_y = points.iter().map(|p| p.y).max().unwrap(); let mut region = HashSet::new(); for i in min_x..=max_x { for j in min_y..=max_y { let q = Point{x: i, y: j}; let distances: Vec<u32> = points.iter(). map(|p| manhattan_distance(&p, &q)).collect(); let total_distance: u32 = distances.iter().sum(); if total_distance < max_dist { region.insert(q); } } } println!("{}", region.len()); } fn main() { let args: Vec<String> = env::args().collect(); if args.len() < 3 { panic!("Too few arguments!") } let f = File::open(&args[1]).expect("File not found!"); let reader = BufReader::new(&f); let part: u32 = args[2].parse().expect("Invalid part!"); let points: Vec<Point> = reader. lines(). map(|l| l.unwrap().trim().parse::<Point>().expect("Invalid point!")). collect(); if part == 1 { do_part_one(points); } else { let max_dist: u32 = args[3].parse().expect("Invalid max distance!"); do_part_two(points, max_dist); } }
use crate::constants::*; use crate::obstacle::Obstacle; use crate::rocket::Rocket; use crate::target::Target; use rand::seq::SliceRandom; use sdl2::rect::Point; pub const ROCKET_HEIGHT: u32 = 15; pub const ROCKET_WIDTH: u32 = 3; pub struct Population { pub rockets: Vec<Rocket>, origin: Point, mating_pool: Vec<usize>, generation: u32, } impl Population { pub fn new(capacity: usize, x: i32, y: i32) -> Self { let mut rockets = Vec::new(); for i in 0..capacity { let mut name = String::from("Rocket#"); name.push_str(&i.to_string()); rockets.push(Rocket::new( name, Point::new(x, y), ROCKET_HEIGHT, ROCKET_WIDTH, None, )); } Population { rockets, origin: Point::new(x, y), mating_pool: Vec::new(), generation: 0, } } /// Evaluates every rocket based on its fitness pub fn evaluate(&mut self, target: &Target, obstacle: &Obstacle) { let mut max_fitness = 0.0; self.mating_pool = Vec::new(); self.generation += 1; println!("\nGeneration #{:} finished:", self.generation); let mut average = 0.0; let mut num_reached = 0; // Iterate over the entire population for i in 0..POPULATION_SIZE { // Calculate each one's fitness self.rockets[i].calculate_fitness(target, obstacle); // And calculate the maximum fitness if self.rockets[i].fitness > max_fitness { max_fitness = self.rockets[i].fitness; } average += self.rockets[i].fitness; if self.rockets[i].reached { num_reached += 1; } } println!( " - Average fitness: {:.2}", average / POPULATION_SIZE as f64 ); println!(" - Maximum fitness: {:.2}", max_fitness); println!(" - {:} rockets hit the target\n", num_reached); for i in 0..POPULATION_SIZE { self.rockets[i].fitness /= max_fitness; } for i in 0..POPULATION_SIZE { let n = (self.rockets[i].fitness * 100.0) as i32; for _ in 0..n { self.mating_pool.push(i); } } } /// Runs a natural selection on the current mating pool pub fn natural_selection(&mut self) { for i in 0..POPULATION_SIZE { // Choose two random parents let a = self.mating_pool.choose(&mut rand::thread_rng()).unwrap(); let b = self.mating_pool.choose(&mut rand::thread_rng()).unwrap(); // Pick each one's DNA let parent_a = &self.rockets[*a].dna; let parent_b = &self.rockets[*b].dna; // Cross both DNAs over let mut child = parent_a.crossover(parent_b); // Apply some random low-probability mutation child.mutate(); let mut name = String::from("Rocket#"); name.push_str(&i.to_string()); self.rockets[i] = Rocket::new( name, Point::new(self.origin.x(), self.origin.y()), ROCKET_HEIGHT, ROCKET_WIDTH, Some(child), ); } } }
use crate::mechanics::damage::DamageMultiplier; use crate::types::effectiveness::effectiveness_maps::EffectivenessMaps; use crate::types::MonsterType; mod effectiveness_maps; lazy_static! { pub static ref EFFECTIVENESS_MAPS: EffectivenessMaps = EffectivenessMaps::default(); } impl MonsterType { /// Returns a `DamageMultiplier` that indicates the effectiveness of `self` on another type pub fn effectiveness_on_type<T: AsRef<MonsterType>>(&self, other_type: T) -> DamageMultiplier { EFFECTIVENESS_MAPS.get_effectiveness_for_type(self, other_type.as_ref()) } } #[cfg(test)] mod tests { use crate::mechanics::damage::DamageMultiplier; use crate::types::effectiveness::*; use crate::types::MonsterType; #[test] fn test_effectiveness_on_type_fire_on_water() { assert_eq!( MonsterType::FIRE.effectiveness_on_type(MonsterType::WATER), DamageMultiplier::HALF ); } #[test] fn test_effectiveness_on_type_fire_on_plant() { assert_eq!( MonsterType::FIRE.effectiveness_on_type(MonsterType::PLANT), DamageMultiplier::DOUBLE ); } #[test] fn test_effectiveness_maps() { let eff: DamageMultiplier = EFFECTIVENESS_MAPS.get_effectiveness_for_type(&MonsterType::FIRE, &MonsterType::WATER); assert_eq!(eff, DamageMultiplier::HALF) } }
use parser::{Parser, Error as ParseError}; use std::fs::File; use std::io::Error as IoError; use std::io::prelude::*; use std::path::Path; /// Represents the contents of a material file that has been loaded into memory but has not been /// sent to the renderer. #[derive(Debug, PartialEq, Eq)] pub struct MaterialSource { pub properties: Vec<PropertySource>, pub programs: Vec<ProgramSource>, } impl MaterialSource { pub fn from_file<P: AsRef<Path>>(path: P) -> Result<MaterialSource, Error> { let mut file = File::open(&path)?; let mut contents = String::new(); file.read_to_string(&mut contents)?; MaterialSource::from_str(&*contents) } pub fn from_str<T: AsRef<str>>(source: T) -> Result<MaterialSource, Error> { let mut parser = Parser::new(source.as_ref()); parser.parse().map_err(|error| error.into()) } } /// Represents a program item parsed from a material file. /// /// TODO: Document the different variants. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ProgramSource { Vertex(String), Fragment(String), } impl ProgramSource { /// Checks if the program source is a vertex shader. pub fn is_vertex(&self) -> bool { match *self { ProgramSource::Vertex(_) => true, _ => false, } } /// Checks if the programs source is fragment shader. pub fn is_fragment(&self) -> bool { match *self { ProgramSource::Fragment(_) => true, _ => false, } } pub fn source(&self) -> &str { match *self { ProgramSource::Vertex(ref source) => &*source, ProgramSource::Fragment(ref source) => &*source, } } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct PropertySource { pub name: String, pub property_type: PropertyType, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[allow(bad_style)] pub enum PropertyType { Color, Texture2d, f32, Vector3, } /// Represents an error in parsing a material source file. #[derive(Debug)] pub enum Error { IoError(IoError), ParseError(ParseError), } impl PartialEq for Error { fn eq(&self, other: &Error) -> bool { match *self { Error::IoError(_) => false, Error::ParseError(parse_error) => match *other { Error::IoError(_) => false, Error::ParseError(other_parse_error) => parse_error == other_parse_error } } } } impl From<ParseError> for Error { fn from(error: ParseError) -> Error { Error::ParseError(error) } } impl From<IoError> for Error { fn from(error: IoError) -> Error { Error::IoError(error) } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::hash::Hash; // `len()` returns an Option thus `is_empty()` can not be provided. #[allow(clippy::len_without_is_empty)] pub trait Filter: Sized { type CodecError: std::error::Error; /// The number of **cardinal** keys built into the filter. /// /// An implementation that does not store keys count should return `None`. fn len(&self) -> Option<usize> { None } /// Check if the given key is in the filter. /// /// False positive: returning `true` only a key **probably** presents. fn contains<K: ?Sized + Hash>(&self, key: &K) -> bool; /// Check if the pre-computed digest is in the filter. fn contains_digest(&self, digest: u64) -> bool; /// Serialize the filter. fn to_bytes(&self) -> Result<Vec<u8>, Self::CodecError>; /// Deserialize the binary array to a filter. fn from_bytes(buf: &[u8]) -> Result<(Self, usize), Self::CodecError>; } pub trait FilterBuilder { type Filter: Filter; type Error: std::error::Error; /// Add a key into the filter for building. fn add_key<K: Hash>(&mut self, key: &K); /// Add several keys into the filter for building. /// /// This methods can be called more than once. fn add_keys<K: Hash>(&mut self, keys: &[K]); /// Populate with pre-compute collection of 64-bit digests. fn add_digests<'i, I: IntoIterator<Item = &'i u64>>(&mut self, digests: I); /// Build the filter with added keys. fn build(&mut self) -> Result<Self::Filter, Self::Error>; }
pub fn update_matrix(mat: Vec<Vec<i32>>) -> Vec<Vec<i32>> { let (width, height) = (mat.len(), mat[0].len()); let distance = std::cell::RefCell::new(vec![vec![i32::MAX; height]; width]); let mut stack = vec![]; for x in 0..width { for y in 0..height { if mat[x][y] == 0 { distance.borrow_mut()[x][y] = 0; stack.push((x, y)) } } } while let Some((row, col)) = stack.pop() { const DIRS: [(i32, i32); 4] = [(-1, 0), (0, -1), (1, 0), (0, 1)]; DIRS.iter() .map(|(dx, dy)| ((dx + row as i32) as usize, (dy + col as i32) as usize)) .filter(|&(dx, dy)| { dx < width && dy < height && distance.borrow()[dx][dy] > distance.borrow()[row][col] + 1 }) .for_each(|(dx, dy)| { let mut dist = distance.borrow_mut(); dist[dx][dy] = dist[row][col] + 1; stack.push((dx, dy)); }) } distance.into_inner() } #[cfg(test)] mod update_matrix_tests { use super::*; #[test] fn update_matrix_test_one() { assert_eq!( update_matrix(vec![vec![0, 0, 0], vec![0, 1, 0], vec![1, 1, 1]]), vec![vec![0, 0, 0], vec![0, 1, 0], vec![1, 2, 1]] ); } }
// Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. use util::collections::HashMap; use super::Result; pub use raftstore::store::Config as RaftStoreConfig; pub use storage::Config as StorageConfig; pub const DEFAULT_CLUSTER_ID: u64 = 0; pub const DEFAULT_LISTENING_ADDR: &'static str = "127.0.0.1:20160"; const DEFAULT_ADVERTISE_LISTENING_ADDR: &'static str = ""; const DEFAULT_NOTIFY_CAPACITY: usize = 40960; const DEFAULT_END_POINT_CONCURRENCY: usize = 8; const DEFAULT_GRPC_CONCURRENCY: usize = 4; const DEFAULT_GRPC_CONCURRENT_STREAM: usize = 1024; const DEFAULT_GRPC_RAFT_CONN_NUM: usize = 10; const DEFAULT_GRPC_STREAM_INITIAL_WINDOW_SIZE: usize = 2 * 1024 * 1024; const DEFAULT_MESSAGES_PER_TICK: usize = 4096; #[derive(Clone, Debug)] pub struct Config { pub cluster_id: u64, // Server listening address. pub addr: String, // Server labels to specify some attributes about this server. pub labels: HashMap<String, String>, // Server advertise listening address for outer communication. // If not set, we will use listening address instead. pub advertise_addr: String, pub notify_capacity: usize, pub messages_per_tick: usize, pub grpc_concurrency: usize, pub grpc_concurrent_stream: usize, pub grpc_raft_conn_num: usize, pub grpc_stream_initial_window_size: usize, pub storage: StorageConfig, pub raft_store: RaftStoreConfig, pub end_point_concurrency: usize, } impl Default for Config { fn default() -> Config { Config { cluster_id: DEFAULT_CLUSTER_ID, addr: DEFAULT_LISTENING_ADDR.to_owned(), labels: HashMap::default(), advertise_addr: DEFAULT_ADVERTISE_LISTENING_ADDR.to_owned(), notify_capacity: DEFAULT_NOTIFY_CAPACITY, messages_per_tick: DEFAULT_MESSAGES_PER_TICK, grpc_concurrency: DEFAULT_GRPC_CONCURRENCY, grpc_concurrent_stream: DEFAULT_GRPC_CONCURRENT_STREAM, grpc_raft_conn_num: DEFAULT_GRPC_RAFT_CONN_NUM, grpc_stream_initial_window_size: DEFAULT_GRPC_STREAM_INITIAL_WINDOW_SIZE, end_point_concurrency: DEFAULT_END_POINT_CONCURRENCY, storage: StorageConfig::default(), raft_store: RaftStoreConfig::default(), } } } impl Config { pub fn new() -> Config { Config::default() } pub fn validate(&self) -> Result<()> { try!(self.raft_store.validate()); if self.end_point_concurrency == 0 { return Err(box_err!("server.server.end-point-concurrency: {} is invalid, \ shouldn't be 0", self.end_point_concurrency)); } Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_config_validate() { let mut cfg = Config::new(); assert!(cfg.validate().is_ok()); cfg.raft_store.raft_heartbeat_ticks = 0; assert!(cfg.validate().is_err()); } }
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // pretty-expanded FIXME #23616 #![feature(slice_patterns)] struct Foo(isize, isize, isize, isize); struct Bar{a: isize, b: isize, c: isize, d: isize} pub fn main() { let Foo(..) = Foo(5, 5, 5, 5); let Foo(..) = Foo(5, 5, 5, 5); let Bar{..} = Bar{a: 5, b: 5, c: 5, d: 5}; let (..) = (5, 5, 5, 5); let Foo(a, b, ..) = Foo(5, 5, 5, 5); let Foo(.., d) = Foo(5, 5, 5, 5); let (a, b, ..) = (5, 5, 5, 5); let (.., c, d) = (5, 5, 5, 5); let Bar{b: b, ..} = Bar{a: 5, b: 5, c: 5, d: 5}; match [5, 5, 5, 5] { [..] => { } } match [5, 5, 5, 5] { [a, ..] => { } } match [5, 5, 5, 5] { [.., b] => { } } match [5, 5, 5, 5] { [a, .., b] => { } } match [5, 5, 5] { [..] => { } } match [5, 5, 5] { [a, ..] => { } } match [5, 5, 5] { [.., a] => { } } match [5, 5, 5] { [a, .., b] => { } } }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /// catalog_interface is the module defining `Catalog` trait mod interface; /// the catalog manager implementation mod manager; pub use interface::Catalog; pub use interface::StorageDescription; pub use manager::CatalogManager; pub use manager::CATALOG_DEFAULT;
mod datatypes; mod sig; mod values; #[macro_use] extern crate serde_derive; use ethabi::param_type::ParamType; use ethabi::{decode, Address}; use serde::Serialize; use values::{token_to_value, Value}; use wasm_bindgen::prelude::*; #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[derive(Serialize, Debug)] #[serde(tag = "t", content = "c")] pub enum JsResult<T> { Ok(T), Err(String), } impl<T> From<Result<T, String>> for JsResult<T> { fn from(result: Result<T, String>) -> Self { match result { Ok(v) => JsResult::Ok(v), Err(e) => JsResult::Err(e), } } } impl<T> From<JsResult<T>> for JsValue where T: Serialize, { fn from(result: JsResult<T>) -> JsValue { to_js_result(&result) } } fn to_js_result<T: Serialize>(result: &JsResult<T>) -> JsValue { JsValue::from_serde(result).expect("Failed to serialize JsResult") } #[derive(Serialize, Debug)] pub struct DecodedParameters(Vec<Value>); #[wasm_bindgen] pub fn abi_decode_parameters(data: &[u8], type_list: Box<[JsValue]>) -> JsValue { let params = type_list .iter() .map(|v| match v.as_string() { Some(s) => datatypes::parse_param_type(&s), None => Err(format!("Type argument {:?} is not a string", v)), }) .collect::<Result<Vec<ParamType>, String>>(); let decoded = match params { Ok(params) => match decode(&params[..], data) { Ok(tokens) => JsResult::Ok(DecodedParameters( tokens.iter().map(|t| token_to_value(&t)).collect(), )), Err(err) => JsResult::Err(format!("Failed to decode: {:?}", err)), }, Err(e) => JsResult::Err(format!("Failed to decode: {:?}", e)), }; to_js_result(&decoded) } #[wasm_bindgen] pub fn parse_function_signature(signature: String) -> JsValue { to_js_result(&sig::parse_signature(&signature, false).into()) } #[wasm_bindgen] pub fn parse_event_signature(signature: String) -> JsValue { to_js_result(&sig::parse_signature(&signature, true).into()) } #[wasm_bindgen] pub fn is_valid_param_type(type_str: String) -> bool { match datatypes::parse_param_type(&type_str) { Ok(_) => true, Err(_) => false, } } #[wasm_bindgen] pub fn is_array_type(data_type: String) -> JsValue { to_js_result(&match datatypes::parse_param_type(&data_type) { Ok(ParamType::Array(_)) => JsResult::Ok(true), Ok(ParamType::FixedArray(_, _)) => JsResult::Ok(true), Ok(_) => JsResult::Ok(false), Err(e) => JsResult::Err(format!("{}", e)), }) } #[derive(Serialize, Debug)] pub struct ParamDataSize { length: usize, exact: bool, } #[wasm_bindgen] pub fn get_data_size(type_str: String) -> JsValue { to_js_result(&match datatypes::parse_param_type(&type_str) { Ok(t) => match datatypes::get_data_size(&t) { (size, exact) => JsResult::Ok(ParamDataSize { length: size, exact: exact, }), }, Err(err) => JsResult::Err(err), }) } #[wasm_bindgen] pub fn to_checksum_address(address_str: String) -> JsValue { to_js_result(&to_checksum_address_internal(address_str).into()) } pub fn to_checksum_address_internal(address_str: String) -> Result<String, String> { if !address_str.starts_with("0x") { return Err(format!( "Invalid address {:?} (expected \"0x\" prefix)", address_str )); } let decoded = match values::from_hex(&address_str) { Ok(v) => Ok(v), Err(_) => Err(format!("Invalid address {:?}", address_str)), }?; if decoded.len() != 20 { return Err(format!("Invalid size of address {}", address_str)); } let address: Address = Address::from_slice(&decoded); Ok(values::to_checksum(&address)) } const NULL_SHA3: &'static str = "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"; #[wasm_bindgen] pub fn sha3(s: String) -> Option<String> { let hash = if s.starts_with("0x") { match values::from_hex(&s) { Ok(v) => tiny_keccak::keccak256(&v), Err(_) => tiny_keccak::keccak256(s.as_bytes()), } } else { tiny_keccak::keccak256(s.as_bytes()) }; let result = values::to_hex(&hash, true); if result == NULL_SHA3 { None } else { Some(result) } } #[wasm_bindgen(start)] pub fn init_panic_hook() { // When the `console_error_panic_hook` feature is enabled, we can call the // `set_panic_hook` function at least once during initialization, and then // we will get better error messages if our code ever panics. // // For more details see // https://github.com/rustwasm/console_error_panic_hook#readme #[cfg(feature = "console_error_panic_hook")] console_error_panic_hook::set_once(); }
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)] pub enum Type { Unknown = -1, Burning = 0, Empowered = 1, Frozen = 2, Hastened = 3, Shielded = 4, Count = 5, } #[derive(Clone, Debug, Eq, PartialEq)] pub struct Status { pub id: i64, pub type_: Type, pub wizard_id: i64, pub player_id: i64, pub remaining_duration_ticks: i32, }
#![allow(dead_code)] pub mod cpu; pub mod bus; pub mod nes; pub mod instructions; pub mod opcodes; use crate::nes::NES; #[macro_use] extern crate lazy_static; #[macro_use] extern crate bitflags; fn main() { let mut nes = NES::new(); let testrom = vec![0xc4, 0x00, 0x01]; nes.cpu.load_rom(testrom); nes.cpu.reset(); nes.cpu.decode(); }
//! Windows system calls in the `io` module. use crate::backend::c; use crate::backend::conv::{borrowed_fd, ret}; use crate::backend::fd::LibcFd; use crate::fd::{BorrowedFd, RawFd}; use crate::io; use core::mem::MaybeUninit; pub(crate) unsafe fn close(raw_fd: RawFd) { let _ = c::close(raw_fd as LibcFd); } pub(crate) fn ioctl_fionread(fd: BorrowedFd<'_>) -> io::Result<u64> { let mut nread = MaybeUninit::<c::c_ulong>::uninit(); unsafe { ret(c::ioctl(borrowed_fd(fd), c::FIONREAD, nread.as_mut_ptr()))?; Ok(u64::from(nread.assume_init())) } } pub(crate) fn ioctl_fionbio(fd: BorrowedFd<'_>, value: bool) -> io::Result<()> { unsafe { let mut data = value as c::c_uint; ret(c::ioctl(borrowed_fd(fd), c::FIONBIO, &mut data)) } }
#[doc = "Reader of register MMIS"] pub type R = crate::R<u32, super::MMIS>; #[doc = "Reader of field `MIS`"] pub type MIS_R = crate::R<bool, bool>; #[doc = "Reader of field `CLKMIS`"] pub type CLKMIS_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - Masked Interrupt Status"] #[inline(always)] pub fn mis(&self) -> MIS_R { MIS_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Clock Timeout Masked Interrupt Status"] #[inline(always)] pub fn clkmis(&self) -> CLKMIS_R { CLKMIS_R::new(((self.bits >> 1) & 0x01) != 0) } }
use crate::MIXER_PARAMS; use rand::{Rng, thread_rng}; use typenum::{U16}; use pairing::bn256::{Fr}; use crate::native::{Note, WithdrawPub, WithdrawSec}; use fawkes_crypto::native::{ num::Num, poseidon::{MerkleProof, poseidon_merkle_proof_root} }; pub fn gen_test_data() -> (WithdrawPub<Fr>, WithdrawSec<Fr, U16>){ let mut rng = thread_rng(); let ref params = MIXER_PARAMS; let proof = MerkleProof::<Fr,U16> { sibling: (0..16).map(|_| rng.gen()).collect(), path: (0..16).map(|_| rng.gen()).collect() }; let note = Note::<Fr> { secret: rng.gen() }; let hash = note.hash(params); let nullifier = note.nullifier(params); let root = poseidon_merkle_proof_root(hash, &proof, &params.compress); let memo: Num<Fr> = rng.gen(); (WithdrawPub {root, nullifier, memo}, WithdrawSec {proof, note}) }
#![feature(test)] extern crate test; extern crate infotree; use infotree::cursor; use infotree::traits::Leaf; type CursorMut<L> = cursor::CursorMut<L, ()>; use test::Bencher; use std::collections::BTreeSet; const TOTAL: usize = 8192; #[derive(Clone)] struct TestLeaf(usize); impl Leaf for TestLeaf { type Info = (); fn compute_info(&self) { } } #[bench] fn btreeset_insert_remove(b: &mut Bencher) { let mut tree = (1..TOTAL).collect::<BTreeSet<_>>(); b.iter(|| { tree.insert(0); tree.remove(&0) }) } #[bench] fn cm_insert_remove_local(b: &mut Bencher) { let mut cm = (1..TOTAL).map(|e| TestLeaf(e)).collect::<CursorMut<_>>(); cm.reset(); cm.first_leaf(); b.iter(|| { cm.insert_leaf(TestLeaf(0), false); cm.remove_leaf() }) } #[bench] fn cm_insert_remove_reset(b: &mut Bencher) { let mut cm = (1..TOTAL).map(|e| TestLeaf(e)).collect::<CursorMut<_>>(); b.iter(|| { cm.reset(); cm.insert_leaf(TestLeaf(0), false); cm.remove_leaf() }) } #[bench] fn cm_insert_remove_local_cloned(b: &mut Bencher) { let mut cm = (1..TOTAL).map(|e| TestLeaf(e)).collect::<CursorMut<_>>(); cm.reset(); cm.first_leaf(); b.iter(|| { let mut cm = cm.clone(); cm.insert_leaf(TestLeaf(0), false); cm.remove_leaf() }) } #[bench] fn cm_insert_remove_root_cloned(b: &mut Bencher) { let mut cm = (1..TOTAL).map(|e| TestLeaf(e)).collect::<CursorMut<_>>(); cm.reset(); b.iter(|| { let mut cm = cm.clone(); cm.insert_leaf(TestLeaf(0), false); cm.remove_leaf() }) }
// fn main() { // let v = vec![1, 2, 3]; // because we've given i32 values, // Rust can infer that the type of v is Vec<i32>, // and the type annotation isn't necessary... // let v: Vec<i32> = Vec::new(); // let mut v = Vec::new(); // v.push(5); // v.push(6); // v.push(7); // v.push(8); // } // like any other struct, a vector will be freed when it goes out of scope // fn main() { // let v = vec![1, 2, 3, 4, 5]; // // this gives us a reference // let third: &i32 = &v[2]; // // this gives us an Option<&T> // let third: Option<&i32> = v.get(2); // // two ways to reference an element so you can choose // // how the program behaves when you try to use // // an index value that the vector doesn't have // // an element for // } fn main() { let v = vec![1, 2, 3, 4, 5]; // use this method when you want a fatal error // let does_not_exist = &v[100]; // use this if accessing an element beyond the range of the vector // happens occasionally under normal circumstances // i.e. if a user inputs a larger num than is present // you can then print all possible values to the user // and prompt them to input again instead of crashing the program // let does_not_exist = v.get(100); // immutable references let v = vec![100, 32, 57]; for i in &v { println!("{}", i); } // mutable references let mut v = vec![100, 32, 57]; for i in &mut v { // to change the value that the mutable reference refers to, // we have to use the dereference operator `*` // to get to the value in `i` before we can use the `+=` operator *i += 50; } }
#[derive(Clone, Debug)] pub(crate) struct BinRegistry { bins: std::collections::BTreeMap<String, crate::schema::Bin>, fallback: bool, } impl BinRegistry { pub(crate) fn new() -> Self { Self { bins: Default::default(), fallback: true, } } pub(crate) fn register_bin(&mut self, name: String, bin: crate::schema::Bin) { self.bins.insert(name, bin); } pub(crate) fn register_bins( &mut self, bins: impl Iterator<Item = (String, crate::schema::Bin)>, ) { self.bins.extend(bins); } pub(crate) fn resolve_bin( &self, bin: crate::schema::Bin, ) -> Result<crate::schema::Bin, crate::Error> { match bin { crate::schema::Bin::Path(path) => { let bin = crate::schema::Bin::Path(path); Ok(bin) } crate::schema::Bin::Name(name) => { let bin = self.resolve_name(&name); Ok(bin) } crate::schema::Bin::Ignore => Ok(crate::schema::Bin::Ignore), crate::schema::Bin::Error(err) => Err(err), } } pub(crate) fn resolve_name(&self, name: &str) -> crate::schema::Bin { if let Some(path) = self.bins.get(name) { return path.clone(); } if self.fallback { let path = crate::cargo::cargo_bin(name); if path.exists() { return crate::schema::Bin::Path(path); } } crate::schema::Bin::Name(name.to_owned()) } } impl Default for BinRegistry { fn default() -> Self { Self::new() } }
use crate::{ gma::{GMAEntry, GMAFile, GMAFilePointers, GMAMetadata}, Transaction, GMOD_APP_ID, }; use image::{DynamicImage, GenericImageView, ImageError, ImageFormat}; use parking_lot::Mutex; use path_slash::PathBufExt; use std::{ fs::File, io::BufReader, mem::MaybeUninit, path::{Path, PathBuf}, sync::Arc, }; use steamworks::{PublishedFileId, SteamError}; use walkdir::WalkDir; #[cfg(not(target_os = "windows"))] use std::collections::HashSet; pub enum PublishError { NotWhitelisted(Vec<String>), NoEntries, DuplicateEntry(String), InvalidContentPath, MultipleGMAs, IconTooLarge, IconTooSmall, IconInvalidFormat, IOError, SteamError(SteamError), ImageError(ImageError), } impl std::fmt::Display for PublishError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { PublishError::NotWhitelisted(whitelisted) => write!(f, "ERR_WHITELIST:{}", whitelisted.join("\n")), PublishError::NoEntries => write!(f, "ERR_NO_ENTRIES"), PublishError::DuplicateEntry(path) => write!(f, "ERR_DUPLICATE_ENTRIES:{}", path), PublishError::InvalidContentPath => write!(f, "ERR_INVALID_CONTENT_PATH"), PublishError::MultipleGMAs => write!(f, "ERR_MULTIPLE_GMAS"), PublishError::IconTooLarge => write!(f, "ERR_ICON_TOO_LARGE"), PublishError::IconTooSmall => write!(f, "ERR_ICON_TOO_SMALL"), PublishError::IconInvalidFormat => write!(f, "ERR_ICON_INVALID_FORMAT"), PublishError::IOError => write!(f, "ERR_IO_ERROR"), PublishError::SteamError(error) => write!(f, "ERR_STEAM_ERROR:{}", error.to_string()), PublishError::ImageError(error) => write!(f, "ERR_IMAGE_ERROR:{}", error.to_string()), } } } impl serde::Serialize for PublishError { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { serializer.serialize_str(&self.to_string()) } } impl From<SteamError> for PublishError { fn from(error: SteamError) -> PublishError { PublishError::SteamError(error) } } impl From<ImageError> for PublishError { fn from(error: ImageError) -> PublishError { PublishError::ImageError(error) } } impl From<std::io::Error> for PublishError { fn from(_: std::io::Error) -> PublishError { PublishError::IOError } } use super::Steam; pub struct ContentPath(PathBuf); impl std::ops::Deref for ContentPath { type Target = PathBuf; fn deref(&self) -> &Self::Target { &self.0 } } impl Into<PathBuf> for ContentPath { fn into(self) -> PathBuf { self.0 } } impl ContentPath { pub fn new(path: PathBuf) -> Result<ContentPath, PublishError> { if !path.is_dir() { return Err(PublishError::InvalidContentPath); } let mut gma_path: MaybeUninit<PathBuf> = MaybeUninit::uninit(); for (i, path) in path .read_dir()? .filter_map(|entry| { entry.ok().and_then(|entry| { let path = entry.path(); let extension = path.extension()?; if extension == "gma" { Some(path) } else { None } }) }) .enumerate() { if i > 0 { return Err(PublishError::MultipleGMAs); } unsafe { gma_path.as_mut_ptr().write(path); } } Ok(ContentPath(unsafe { gma_path.assume_init() })) } } const WORKSHOP_ICON_MAX_SIZE: u64 = 1000000; const WORKSHOP_ICON_MIN_SIZE: u64 = 16; const WORKSHOP_DEFAULT_ICON: &'static [u8] = include_bytes!("../../../public/img/gmpublisher_default_icon.png"); pub enum WorkshopIcon { Custom { image: DynamicImage, path: PathBuf, format: ImageFormat, width: u32, height: u32, upscale: bool, }, Default, } impl WorkshopIcon { pub fn can_upscale(width: u32, height: u32, format: ImageFormat) -> bool { !matches!(format, ImageFormat::Gif) && ((width < 512 || height < 512) || (width != height)) } } impl Into<PathBuf> for WorkshopIcon { fn into(self) -> PathBuf { match self { WorkshopIcon::Custom { path, image, width, height, upscale, format, } => { if upscale && WorkshopIcon::can_upscale(width, height, format) { let format_extension = match format { ImageFormat::Png => "png", ImageFormat::Jpeg => "jpg", _ => unreachable!(), }; let mut temp_img = app_data!().temp_dir().to_owned(); temp_img.push(format!("gmpublisher_upscaled_icon.{}", format_extension)); let image = image.resize_exact(512, 512, image::imageops::FilterType::CatmullRom); match image.save_with_format(&temp_img, format) { Ok(_) => temp_img, Err(_) => path, } } else { path } } WorkshopIcon::Default => { let mut path = app_data!().temp_dir().to_owned(); path.push("gmpublisher_default_icon.png"); if !path.is_file() && path.metadata().map(|metadata| metadata.len()).unwrap_or(0) != WORKSHOP_DEFAULT_ICON.len() as u64 { std::fs::write(&path, WORKSHOP_DEFAULT_ICON).expect("Failed to write default icon to temp directory!"); } path } } } } impl WorkshopIcon { pub fn new<P: AsRef<Path>>(path: P, upscale: bool) -> Result<WorkshopIcon, PublishError> { let path = path.as_ref(); let len = path.metadata()?.len(); if len > WORKSHOP_ICON_MAX_SIZE { return Err(PublishError::IconTooLarge); } else if len < WORKSHOP_ICON_MIN_SIZE { return Err(PublishError::IconTooSmall); } let file_extension = path.extension().and_then(|x| x.to_str()).unwrap_or("jpg").to_ascii_lowercase(); let image_format = match file_extension.as_str() { "png" => ImageFormat::Png, "gif" => ImageFormat::Gif, "jpeg" | "jpg" => ImageFormat::Jpeg, _ => return Err(PublishError::IconInvalidFormat), }; let image = image::load(BufReader::new(File::open(path)?), image_format)?; Ok(WorkshopIcon::Custom { path: path.to_path_buf(), width: image.width(), height: image.height(), format: image_format, upscale, image, }) } } pub enum WorkshopUpdateType { Creation { title: String, path: ContentPath, tags: Vec<String>, addon_type: String, preview: WorkshopIcon, }, Update { title: String, path: ContentPath, tags: Vec<String>, addon_type: String, preview: Option<WorkshopIcon>, changes: Option<String>, }, } impl Steam { pub fn update(&self, id: PublishedFileId, details: WorkshopUpdateType, transaction: Transaction) -> Result<bool, PublishError> { use WorkshopUpdateType::*; let result = Arc::new(Mutex::new(None)); let result_ref = result.clone(); let update_handle = match details { Creation { title, path, mut tags, addon_type, preview, } => { tags.reserve(tags.len() + 2); tags.push("Addon".to_string()); tags.push(addon_type); self.client() .ugc() .start_item_update(GMOD_APP_ID, id) .content_path(&path) .title(&title) .preview_path(&Into::<PathBuf>::into(preview)) .tags(tags) .description("Uploaded with [url=https://gmpublisher.download]gmpublisher[/url]") .submit(None, move |result| { *result_ref.lock() = Some(result); }) } Update { title, path, tags, addon_type, preview, changes, } => { let mut tags = tags; tags.reserve(tags.len() + 2); tags.push("Addon".to_string()); tags.push(addon_type); let preview_path: Option<PathBuf> = match preview { Some(preview) => Some(preview.into()), None => None, }; let update = self.client().ugc().start_item_update(GMOD_APP_ID, id); match preview_path { Some(preview_path) => update.preview_path(&preview_path), None => update, } .content_path(&path) .tags(tags) .title(&title) .submit(changes.as_deref(), move |result| { *result_ref.lock() = Some(result); }) } }; let result = loop { let (processed, progress, total) = update_handle.progress(); if !matches!(processed, steamworks::UpdateStatus::Invalid) { transaction.status(match processed { steamworks::UpdateStatus::Invalid => unreachable!(), steamworks::UpdateStatus::PreparingConfig => "PUBLISH_PREPARING_CONFIG", steamworks::UpdateStatus::PreparingContent => "PUBLISH_PREPARING_CONTENT", steamworks::UpdateStatus::UploadingContent => "PUBLISH_UPLOADING_CONTENT", steamworks::UpdateStatus::UploadingPreviewFile => "PUBLISH_UPLOADING_PREVIEW_FILE", steamworks::UpdateStatus::CommittingChanges => "PUBLISH_COMMITTING_CHANGES", }); } if total == 0 { transaction.progress(0.); } else { transaction.data(total); transaction.progress(progress as f64 / total as f64); } if !result.is_locked() && result.lock().is_some() { break Arc::try_unwrap(result).unwrap().into_inner().unwrap(); } else { self.run_callbacks(); } }; match result { Ok((_, legal_agreement)) => { transaction.progress(1.); Ok(legal_agreement) } Err(error) => Err(PublishError::SteamError(error)), } } pub fn publish(&self, details: WorkshopUpdateType, transaction: Transaction) -> (Option<PublishedFileId>, Result<bool, PublishError>) { debug_assert!(matches!(details, WorkshopUpdateType::Creation { .. })); let published = Arc::new(Mutex::new(None)); let published_ref = published.clone(); self.client() .ugc() .create_item(GMOD_APP_ID, steamworks::FileType::Community, move |result| { *published_ref.lock() = Some(result); }); loop { if let Some(published_ref) = published.try_lock() { if published_ref.is_some() { break; } } self.run_callbacks(); } let id = match Arc::try_unwrap(published).unwrap().into_inner().unwrap() { Ok((id, _)) => id, Err(error) => return (None, Err(PublishError::SteamError(error))), }; (Some(id), self.update(id, details, transaction)) } } #[tauri::command] fn verify_whitelist(path: PathBuf) -> Result<(Vec<GMAEntry>, u64), PublishError> { if !path.is_dir() || !path.is_absolute() { return Err(PublishError::InvalidContentPath); } let root_path_strip_len = path.to_slash_lossy().len() + 1; let ignore: Vec<String> = app_data!().settings.read().ignore_globs.iter().cloned().map(|mut glob| { glob.push('\0'); glob }).collect(); let mut size = 0; let mut failed_extra = false; let mut failed = Vec::with_capacity(10); let mut files = Vec::new(); #[cfg(not(target_os = "windows"))] let mut dedup: HashSet<String> = HashSet::new(); for (path, relative_path) in WalkDir::new(&path) .contents_first(true) .into_iter() .filter_map(|entry| { let path = match entry { Ok(entry) => entry.into_path(), Err(err) => match err.path() { Some(path) => path.to_path_buf(), None => return None, }, }; if path.is_dir() { return None; } let relative_path = { let mut relative_path = path.to_slash_lossy(); if relative_path.len() < root_path_strip_len { return None; } relative_path.split_off(root_path_strip_len).to_lowercase() }; Some((path, relative_path)) }) .filter(|(_, relative_path)| crate::gma::whitelist::filter_default_ignored(relative_path)) .filter(|(_, relative_path)| !crate::gma::whitelist::is_ignored(relative_path, &ignore)) { #[cfg(not(target_os = "windows"))] { if !dedup.insert(relative_path.to_owned()) { return Err(PublishError::DuplicateEntry(relative_path)); } } if !crate::gma::whitelist::check(&relative_path) { if failed.len() == 9 { failed_extra = true; break; } else { failed.push(relative_path); } } else if failed.is_empty() { let entry_size = path.metadata().map(|metadata| metadata.len()).unwrap_or(0); size += entry_size; files.push(GMAEntry { path: relative_path, size: entry_size, crc: 0, index: 0, }); } } // TODO some tasks shouldnt be cancelable (i.e. showing the cross button) if failed.is_empty() { if files.is_empty() { Err(PublishError::NoEntries) } else { Ok((files, size)) } } else { failed.sort_unstable(); if failed_extra { failed.push("...".to_string()); } Err(PublishError::NotWhitelisted(failed)) } } #[tauri::command] pub fn publish( content_path_src: PathBuf, icon_path: Option<PathBuf>, title: String, tags: Vec<String>, addon_type: String, upscale: bool, update_id: Option<PublishedFileId>, changes: Option<String>, ) -> u32 { let transaction = transaction!(); let id = transaction.id; rayon::spawn(move || { let preview = match icon_path { Some(icon_path) => { transaction.status("PUBLISH_PROCESSING_ICON"); match WorkshopIcon::new(icon_path, upscale) { Ok(icon) => Some(icon), Err(error) => { transaction.error(error.to_string(), turbonone!()); return; } } } None => { if update_id.is_none() { Some(WorkshopIcon::Default) } else { None } } }; transaction.status("PUBLISH_PACKING"); let mut path = app_data!().temp_dir().to_owned(); path.pop(); path.push("gmpublisher_publishing"); if let Err(_) = std::fs::create_dir_all(&path) { transaction.error("ERR_IO_ERROR", turbonone!()); return; } path.push("gmpublisher.gma"); { let gma = GMAFile { // TODO convert to GMAFile::new() path: path.clone(), size: 0, id: None, metadata: Some(GMAMetadata::Standard { title: title.clone(), addon_type: addon_type.clone(), tags: tags.clone(), ignore: app_data!().settings.read().ignore_globs.to_owned(), }), entries: None, pointers: GMAFilePointers::default(), version: 3, extracted_name: String::new(), modified: None, membuffer: None, }; if let Err(error) = gma.create(&content_path_src, transaction.clone()) { if !transaction.aborted() { transaction.error(error.to_string(), turbonone!()); } return; } } let mut content_path = path.clone(); content_path.pop(); let content_path = match ContentPath::new(content_path) { Ok(content_path) => content_path, Err(error) => { transaction.error(error.to_string(), turbonone!()); return; } }; transaction.status("PUBLISH_STARTING"); let (id, result) = if let Some(id) = update_id { ( update_id, steam!().update( id, WorkshopUpdateType::Update { title, path: content_path, tags, addon_type, preview, changes, }, transaction.clone(), ), ) } else { steam!().publish( WorkshopUpdateType::Creation { title, path: content_path, tags, addon_type, preview: preview.unwrap(), }, transaction.clone(), ) }; ignore! { std::fs::remove_file(path) }; match result { Ok(legal_agreement) => { if legal_agreement { crate::path::open("https://steamcommunity.com/workshop/workshoplegalagreement"); } let id = id.unwrap(); crate::path::open(format!("https://steamcommunity.com/sharedfiles/filedetails/?id={}", id.0)); transaction.finished(turbonone!()); app_data!().settings.write().my_workshop_local_paths.insert(id, content_path_src); ignore! { app_data!().settings.read().save() }; app_data!().send(); } Err(error) => { transaction.error(error.to_string(), turbonone!()); if let Some(id) = id { steam!().client().ugc().delete_item(id, |_| {}); } } }; }); id } #[tauri::command] pub fn verify_icon(path: PathBuf) -> Result<(String, bool), Transaction> { WorkshopIcon::new(&path, false) .and_then(|icon| { let (prefix, can_upscale) = match icon { WorkshopIcon::Custom { format, width, height, .. } => ( format!( "data:image/{};base64,", match format { ImageFormat::Png => "png", ImageFormat::Jpeg => "jpeg", ImageFormat::Gif => "gif", _ => unreachable!(), } ), WorkshopIcon::can_upscale(width, height, format), ), _ => unreachable!(), }; let base64 = base64::encode(std::fs::read(path)?); Ok((prefix + &base64, can_upscale)) }) .map_err(|error| { let transaction = transaction!(); transaction.error(error.to_string(), turbonone!()); transaction }) }
// /* // * Copyright 2020 Fluence Labs Limited // * // * Licensed under the Apache License, Version 2.0 (the "License"); // * you may not use this file except in compliance with the License. // * You may obtain a copy of the License at // * // * http://www.apache.org/licenses/LICENSE-2.0 // * // * Unless required by applicable law or agreed to in writing, software // * distributed under the License is distributed on an "AS IS" BASIS, // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // * See the License for the specific language governing permissions and // * limitations under the License. // */ // // #![recursion_limit = "1024"] // // pub mod constants; // // use crate::constants::NODES; // // use fluence_client::client::Client; // // use async_std::future::timeout; // use async_std::task; // use futures::channel::mpsc; // use futures::select; // use futures::stream::{FuturesUnordered, StreamExt}; // // use bencher::stats::Stats; // use futures::channel::mpsc::TrySendError; // use itertools::Itertools; // use fluence_client::ClientEvent; // use libp2p::PeerId; // use parity_multiaddr::Multiaddr; // use std::error::Error; // use std::time::{Duration, SystemTime}; // // const MSG_COUNT: u32 = 5u32; // // #[derive(Clone)] // struct Node { // address: Multiaddr, // peer_id: PeerId, // } // // impl Node { // fn new(address: Multiaddr, peer_id: PeerId) -> Self { // Node { address, peer_id } // } // } // // fn nodes() -> Vec<Node> { // NODES // .iter() // .map(|(address, peer_id)| Node::new(address.parse().unwrap(), peer_id.parse().unwrap())) // .collect() // } // // #[allow(dead_code)] // fn local_nodes() -> Vec<Node> { // vec![ // Node::new( // "/ip4/127.0.0.1/tcp/9990/ws".parse().unwrap(), // "QmY28NSCefB532XbERtnKHadexGuNzAfYnh5fJk6qhLsSi" // .parse() // .unwrap(), // ), // Node::new( // "/ip4/127.0.0.1/tcp/9991/ws".parse().unwrap(), // "Qme4XRbTMzYax1NyKp4dR4NS1bcVkF1Xw3fpWE8J1eCyRe" // .parse() // .unwrap(), // ), // ] // } // // type TaskId = usize; // // #[derive(Clone, Debug)] // enum Action { // Sent, // Received(Duration), // Finished, // Started, // Iterated(TaskId, u32), // Timeout, // } // // // cargo test -- --nocapture // #[test] // pub fn measure_relay_test() { // task::block_on(run_measures()).unwrap() // } // // async fn run_measures() -> Result<(), Box<dyn Error>> { // let (stat_outlet, stat_inlet) = mpsc::unbounded(); // let clients = nodes() // .into_iter() // // Take all possible 2-combinations of nodes // .tuple_combinations() // // .take(1) // // .tuples::<(_, _)>() // uncomment to take only pairs // // Create & connect clients to a corresponding nodes // .map(|(a, b)| connect_clients(a, b)) // .collect::<FuturesUnordered<_>>() // .collect::<Vec<_>>(); // // // Convert Vec of Results to Result of Vec // let mut clients: Vec<_> = clients.await.into_iter().collect::<Result<_, _>>()?; // println!( // "All clients connected. Total pairs count: {}", // clients.len() // ); // // // TODO: it works only when there's a pause between connection and message sending :( Why? // task::sleep(Duration::from_millis(1000)).await; // // // convert client pairs to the measure tasks // let futures: FuturesUnordered<_> = clients // .iter_mut() // .enumerate() // // Asynchronously run measuring task // .map(|(i, (a, b))| run_measure(i, a, b, MSG_COUNT, stat_outlet.clone())) // .collect(); // println!("Spawned."); // // drop(stat_outlet); // // // TODO: stats outlet is dropped before all events are received by inlet // // https://book.async.rs/tutorial/handling_disconnection.html // let mut stats = stat_inlet.fuse(); // let mut measures: Vec<Action> = vec![]; // // Stream of tasks // let mut tasks = futures.fuse(); // // async move { // loop { // select!( // stat = stats.next() => { // match stat { // Some(s) => { // measures.push(s.clone()); // match s { // Action::Iterated(id, i) => {}, // println!("task {} iteration {}", id, i), // _ => {} // } // } // None => { // // When stats channel is closed, print measures one last time // print_measures(&measures); // // No more stats writers => break the loop // println!("All stats received."); // break // } // } // } // // task = tasks.next() => { // match task { // // print measures every time a task is finished // Some(Ok(_)) => print_measures(&measures), // Some(Err(e)) => eprintln!("Task finished with error {:?}", e), // None => { // println!("All tasks finished.") // } // } // } // ) // } // } // .await; // Ok(()) // } // // /// Creates 2 clients and connects them to nodes correspondingly // async fn connect_clients(node1: Node, node2: Node) -> Result<(Client, Client), Box<dyn Error>> { // let (client1, _) = Client::connect(node1.address).await?; // let (client2, _) = Client::connect(node2.address).await?; // Ok((client1, client2)) // } // // async fn run_measure( // id: TaskId, // client1: &Client, // client2: &mut Client, // count: u32, // stat: mpsc::UnboundedSender<Action>, // ) -> Result<(), TrySendError<Action>> { // stat.unbounded_send(Action::Started)?; // // for i in 0..count { // send_and_wait(client1, client2, &stat).await?; // stat.unbounded_send(Action::Iterated(id, i))?; // } // // stat.unbounded_send(Action::Finished)?; // // Ok(()) // } // // async fn send_and_wait( // client1: &Client, // client2: &mut Client, // stat: &mpsc::UnboundedSender<Action>, // ) -> Result<(), TrySendError<Action>> { // client1.send(Command::Relay { // dst: client2.peer_id.clone(), // data: now().as_millis().to_string(), // }); // // stat.unbounded_send(Action::Sent)?; // // // TODO: move timeout to arguments // let result = timeout(Duration::from_secs(5), client2.receive_one()).await; // let now = now(); // // match result { // Ok(Some(ClientEvent::FunctionCall { call, sender })) => { // let sent = call // .arguments // .get("sent") // .and_then(|v| v.as_u64()) // .map(Duration::from_millis) // .expect("parse 'sent' from arguments"); // // let passed = now - sent; // stat.unbounded_send(Action::Received(passed))?; // } // Err(_) => stat.unbounded_send(Action::Timeout)?, // _ => {} // } // // Ok(()) // } // // fn now() -> Duration { // std::time::SystemTime::now() // .duration_since(SystemTime::UNIX_EPOCH) // .expect("Error on now()") // } // // fn print_measures(measures: &Vec<Action>) { // fn pp(f: f64) -> u128 { // Duration::from_secs_f64(f).as_millis() // } // // let received: Vec<_> = measures // .iter() // .filter_map(|m| match m { // Action::Received(d) => Some(d.as_secs_f64()), // _ => None, // }) // .collect(); // // let (mut sent, mut started, mut finished, mut timeout) = (0u32, 0u32, 0u32, 0u32); // for m in measures { // match m { // Action::Sent => sent += 1, // Action::Started => started += 1, // Action::Finished => finished += 1, // Action::Timeout => timeout += 1, // _ => {} // } // } // // println!( // "\n\nstartd\tfinshd\tsent\trcvd\ttimeout\n{}\t{}\t{}\t{}\t{}", // started, // finished, // sent, // received.len(), // timeout, // ); // // if !received.is_empty() { // println!( // "mean\tmedian\tvar\t.75\t.95\t.99\tmax\tmin\n{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}", // pp(received.mean()), // pp(received.median()), // pp(received.var()), // pp(received.percentile(75.0)), // pp(received.percentile(95.0)), // pp(received.percentile(99.0)), // pp(received.max()), // pp(received.min()) // ); // } // }
use std::cmp::max; use itertools::all; use quickcheck::TestResult; #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } } #[cfg(test)] mod properties { use super::*; use std::collections::BTreeSet; #[quickcheck] // 1. Create a random list fn iterator_parity(mut xs: Vec<usize>) -> bool { // 2. Create an empty AVL tree and BTree // 3. For each element in the list, insert it to both tree let avl_set = xs.iter().cloned().collect::<AvlTreeSet<_>>(); let btree_set = xs.iter().cloned().collect::<BTreeSet<_>>(); // 4. Both AVL and BTree iterator should be equal item per item equal(avl_set.iter(), btree_set.iter()) } } // #[quickcheck] // fn insert_parity(mut btree_set: BTreeSet<u8>, x: u8) -> bool { // let mut avl_set = btree_set.iter().cloned().collect::<AvlTreeSet<_>>(); // avl_set.insert(x) == btree_set.insert(x) // } #[quickcheck] fn node_height(set: AvlTreeSet<u16>) -> bool { all(set.node_iter(), |node| { node.height == 1 + max(node.left_height(), node.right_height()) }) } #[quickcheck] fn rotate_right_preserves_order(btree: BTreeSet<u8>) -> TestResult { let mut set = btree.iter().cloned().collect::<AvlTreeSet<_>>(); if !set.root.is_some() { return TestResult::discard(); } if !set.root.as_mut().unwrap().rotate_right() { return TestResult::discard(); } TestResult::from_bool(equal(set.iter(), btree.iter())) } #[quickcheck] fn rotate_right_tils_balance_factor(xs: Vec<u32>) -> TestResult { let mut set = xs.iter().cloned().collect::<AvlTreeSet<_>>(); if !set.root.is_some() { return TestResult::discard(); } let root_node = set.root.as_mut().unwrap(); let balance_factor = root_node.balance_factor(); if !root_node.rotate_right() { return TestResult::discard(); } let tilted_factor = root_node.balance_factor(); TestResult::from_bool(balance_factor - tilted_factor == 2) } #[quickcheck] fn rotate_left_and_rotate_left_identity(set: AvlTreeSet<u8>) -> TestResult { if set.root.is_none() { return TestResult::discard(); } let mut rotated_set = set.clone(); let root_node = rotated_set.root.as_mut().unwrap(); if root_node.rotate_left() { root_node.rotate_right(); } else { root_node.rotate_right(); root_node.rotate_left(); } TestResult::from_bool(rotated_set == set) } #[quickcheck] fn balanced_nodes(set: AvlTreeSet<u16>) -> bool { all(set.node_iter(), |node| node.balance_factor().abs() < 2) } #[quickcheck] fn contains_parity(xs: Vec<isize>) -> bool { let evens = xs .iter() .cloned() .filter(|x| x % 2 == 0) .collect::<Vec<_>>(); let avl_set = evens.iter().cloned().collect::<AvlTreeSet<_>>(); let btree_set = evens.iter().cloned().collect::<BTreeSet<_>>(); all(xs.iter(), |x| avl_set.contains(x) == btree_set.contains(x)) } #[quickcheck] fn take_parity(xs: Vec<usize>) -> bool { let odds = xs .iter() .cloned() .filter(|x| x % 2 == 1) .collect::<Vec<_>>(); let mut avl_set = odds.iter().cloned().collect::<AvlTreeSet<_>>(); let mut btree_set = odds.iter().cloned().collect::<BTreeSet<_>>(); all(xs.iter(), |x| avl_set.take(x) == btree_set.take(x)) } #[quickcheck] fn take_iterator_parity(xs: Vec<i16>) -> bool { let fives = xs .iter() .cloned() .filter(|x| x % 5 == 0) .collect::<Vec<_>>(); let mut avl_set = xs.iter().cloned().collect::<AvlTreeSet<_>>(); let mut btree_set = xs.iter().cloned().collect::<BTreeSet<_>>(); for five in fives { assert_eq!(avl_set.take(&five), btree_set.take(&five)); } equal(avl_set.iter(), btree_set.iter()) } #[quickcheck] fn take_height_nodes(xs: Vec<isize>) -> bool { let negatives = xs.iter().cloned().filter(|&x| x < 0).collect::<Vec<_>>(); let mut set = xs.iter().cloned().collect::<AvlTreeSet<_>>(); for negative in negatives { set.take(&negative); } all(set.node_iter(), |node| { node.height == 1 + max(node.left_height(), node.right_height()) }) } #[quickcheck] fn take_balanced_nodes(xs: Vec<usize>) -> bool { let odds = xs .iter() .cloned() .filter(|x| x % 2 == 1) .collect::<Vec<_>>();n let mut set = xs.iter().cloned().collect::<AvlTreeSet<_>>(); for odd in odds { set.take(&odd); } all(set.node_iter(), |node| node.balance_factor().abs() < 2) } #[bench] fn setup_random_btree_set(b: &mut Bencher) { let mut rng = thread_rng(); let mut set = BTreeSet::new(); b.iter(|| { set.insert(rng.gen::<usize>()); }); } #[bench] fn setup_random_avltree_set(b: &mut Bencher) { let mut rng = thread_rng(); let mut set = AvlTreeSet::new(); b.iter(|| { set.insert(rng.gen::<usize>()); }); }
/** El objetivo del ejercicio es implementar un programa de consola para jugar al ahorcado. Bienvenido al ahorcado de FIUBA! La palabra hasta el momento es: _ _ _ _ _ _ Adivinaste las siguientes letras: Te quedan 5 intentos. Ingresa una letra: r La palabra hasta el momento es: _ _ _ _ _ r Adivinaste las siguientes letras: r Te quedan 5 intentos. Ingresa una letra: c Si se ingresa una letra que no forma parte de la palabra, se pierde un intento. La lista de palabras se debe leer de un archivo de texto, donde cada línea del archivo contendrá una palabra. De esa lista, se deberá elegir una palabra (puede ser una selección secuencial de palabras). El programa termina cuando se adivina correctamente la palabra pensada, o cuando se acabaron los intentos. Mostrar las letras que se ingresaron y que no forman parte de la palabra (las que hacen que se pierda un intento). Verificar si se ingresó nuevamente una letra que ya estaba. **/ mod game; mod error; use std::collections::HashMap; use std::fs::File; use std::io::{self, BufRead}; use std::path::Path; use crate::error::GameError; use crate::game::game_mod::{Ahorcado, GameStatus}; fn get_char() -> char { let mut input = String::new(); std::io::stdin().read_line(&mut input).expect("Failed to read line"); input.chars().next().unwrap() } fn print_word(result_word: &[String]) { println!("{}", result_word.join("")); } fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>> where P: AsRef<Path>, { let file = File::open(filename)?; Ok(io::BufReader::new(file).lines()) } fn print_wrong_characters(hash_wrong_chars: &HashMap<char, ()>) { let characters = hash_wrong_chars.keys().map(|s| s.to_string()).collect::<Vec<_>>().join(", "); println!("{}", characters); } fn play(ahorcado: &mut Ahorcado) { loop { println!("turnos restantes: {}", ahorcado.get_remaining_attempts()); println!("Ingresa una letra"); let input_char = get_char(); let game = ahorcado.play(input_char); match &game { Ok(stats) => { match stats.status { GameStatus::Success => { println!("Felicitaciones adivino la palabra"); println!("La palabra era: {}", ahorcado.word); break; } GameStatus::Pending => { println!("Mal!!."); } GameStatus::GameOver => { println!("Game over"); break; } GameStatus::CharGuessed => { println!("Adivino la siguiente letra: {}", stats.guessed_char); println!("La palabra hasta ahora es: {}", stats.get_guessed_word()); } } }, Err(e) => { match e { GameError::NoChancesAvailable => { eprintln!("Error: {}", e); break; }, GameError::CharacterIsAlreadyUsed => eprintln!("Error: {}", e), } } } } } fn main(){ println!("Bienvenido al ahorcado de FIUBA!"); let lines = read_lines("./palabras.txt"); match lines { Ok(lines) => { for line in lines { if let Ok(guess_word) = line { let mut ahorcado = game::game_mod::Ahorcado::new(guess_word); play(&mut ahorcado); } } }, Err(err) => println!("Error, {}", err) } }
/*! A very simple application that show your name in a message box. See `basic` for the version without the derive macro */ extern crate native_windows_gui as nwg; extern crate native_windows_derive as nwd; use nwd::NwgUi; use nwg::NativeUi; use std::thread; use std::sync::mpsc::{channel, Receiver}; use std::cell::RefCell; #[derive(Default, NwgUi)] pub struct BasicApp { #[nwg_control(size: (300, 300), position: (300, 300), title: "Basic example", flags: "WINDOW|VISIBLE")] #[nwg_events( OnInit: [BasicApp::setup], OnWindowClose: [BasicApp::say_goodbye] )] window: nwg::Window, #[nwg_control(size: (280, 280), position: (10, 10), focus: true, flags: "VISIBLE|AUTOVSCROLL")] text_box: nwg::TextBox, #[nwg_control] #[nwg_events(OnNotice: [BasicApp::update_text])] update_text: nwg::Notice, text_receiver: RefCell<Option<Receiver<String>>>, } impl BasicApp { fn setup(&self) { let (sender, receiver) = channel(); // Creates a sender to trigger the `OnNotice` event let notice_sender = self.update_text.sender(); thread::spawn(move || { let mut counter = 0; loop { counter += 1; sender.send(format!("ID: {}\r\n", counter)).unwrap(); // Calling the notice function will trigger the OnNotice event on the gui thread notice_sender.notice(); thread::sleep(::std::time::Duration::from_millis(500)); } }); *self.text_receiver.borrow_mut() = Some(receiver); } fn update_text(&self) { let mut receiver_ref = self.text_receiver.borrow_mut(); let receiver = receiver_ref.as_mut().unwrap(); while let Ok(data) = receiver.try_recv() { let mut new_text = self.text_box.text(); new_text.push_str(&data); self.text_box.set_text(&new_text); self.text_box.scroll_lastline(); } } fn say_goodbye(&self) { nwg::stop_thread_dispatch(); } } fn main() { nwg::init().expect("Failed to init Native Windows GUI"); nwg::Font::set_global_family("Segoe UI").expect("Failed to set default font"); let _app = BasicApp::build_ui(Default::default()).expect("Failed to build UI"); nwg::dispatch_thread_events(); }
/* * Rustのトレイト。(C#やJavaでいうインタフェースに類似) * CreatedAt: 2019-06-28 */ fn main() { let v = vec![7, 3, 9, 1, 2]; println!("{:?}: {}", v, largest_copy(&v)); let v = vec!['d', 'f', 'a', 'z', 'c']; println!("{:?}: {}", v, largest_copy(&v)); let v = vec!["d", "f", "a", "z", "c"]; println!("{:?}: {}", v, largest_copy(&v)); let v = vec![7, 3, 9, 1, 2]; println!("{:?}: {}", v, largest_clone(&v)); let v = vec!["d", "f", "a", "z", "c"]; println!("{:?}: {}", v, largest_clone(&v)); let v = vec![7, 3, 9, 1, 2]; println!("{:?}: {}", v, largest(&v)); let v = vec!["d", "f", "a", "z", "c"]; println!("{:?}: {}", v, largest(&v)); } fn largest_copy<T: PartialOrd + Copy>(list: &[T]) -> T { let mut max = list[0]; for &i in list.iter() { if max < i { max = i; } } max } fn largest_clone<T: PartialOrd + Clone>(list: &[T]) -> T { let mut max = &list[0]; for i in list.iter() { if max < i { max = i; } } max.clone() } fn largest<T: PartialOrd>(list: &[T]) -> &T { let mut max = &list[0]; for i in list.iter() { if max < i { max = i; } } max } /* fn largest_clone<T: PartialOrd + Clone>(list: &[T]) -> &T { let mut max = &list[0]; for i in list.iter() { if max < i { max = i; } } max } fn largest_clone<T: PartialOrd + Clone>(list: &[T]) -> T { let list_ = list.clone(); let mut max = &list_[0]; for i in list_.iter() { if max < i { max = i; } } max.clone() } */
use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use crate::{ ast::{ self, AssignmentBody, Ast, AstError, AstKind, AstString, Function, Identifier, Int, List, Match, MatchCase, OrPattern, Struct, Symbol, Text, TextPart, }, cst::{self, Cst, CstDb, CstKind, UnwrapWhitespaceAndComment}, error::CompilerError, module::Module, position::Offset, rcst_to_cst::RcstToCst, string_to_rcst::ModuleError, utils::AdjustCasingOfFirstLetter, }; use std::{ops::Range, sync::Arc}; #[salsa::query_group(CstToAstStorage)] pub trait CstToAst: CstDb + RcstToCst { #[salsa::transparent] fn ast_to_cst_id(&self, id: ast::Id) -> Option<cst::Id>; #[salsa::transparent] fn ast_id_to_span(&self, id: ast::Id) -> Option<Range<Offset>>; #[salsa::transparent] fn ast_id_to_display_span(&self, id: ast::Id) -> Option<Range<Offset>>; #[salsa::transparent] fn cst_to_ast_id(&self, module: Module, id: cst::Id) -> Vec<ast::Id>; fn ast(&self, module: Module) -> AstResult; } pub type AstResult = Result<(Arc<Vec<Ast>>, Arc<FxHashMap<ast::Id, cst::Id>>), ModuleError>; fn ast_to_cst_id(db: &dyn CstToAst, id: ast::Id) -> Option<cst::Id> { let (_, ast_to_cst_id_mapping) = db.ast(id.module.clone()).ok()?; ast_to_cst_id_mapping.get(&id).cloned() } fn ast_id_to_span(db: &dyn CstToAst, id: ast::Id) -> Option<Range<Offset>> { let cst_id = db.ast_to_cst_id(id.clone())?; Some(db.find_cst(id.module, cst_id).data.span) } fn ast_id_to_display_span(db: &dyn CstToAst, id: ast::Id) -> Option<Range<Offset>> { let cst_id = db.ast_to_cst_id(id.clone())?; Some(db.find_cst(id.module, cst_id).display_span()) } fn cst_to_ast_id(db: &dyn CstToAst, module: Module, id: cst::Id) -> Vec<ast::Id> { if let Ok((_, ast_to_cst_id_mapping)) = db.ast(module) { ast_to_cst_id_mapping .iter() .filter_map(|(key, &value)| if value == id { Some(key) } else { None }) .cloned() .collect_vec() } else { vec![] } } fn ast(db: &dyn CstToAst, module: Module) -> AstResult { let mut context = LoweringContext::new(module.clone()); db.cst(module).map(|cst| { let cst = cst.unwrap_whitespace_and_comment(); let asts = context.lower_csts(&cst); (Arc::new(asts), Arc::new(context.id_mapping)) }) } #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] enum LoweringType { Expression, Pattern, PatternLiteralPart, } struct LoweringContext { module: Module, next_id: usize, id_mapping: FxHashMap<ast::Id, cst::Id>, } impl LoweringContext { fn new(module: Module) -> Self { LoweringContext { module, next_id: 0, id_mapping: FxHashMap::default(), } } fn lower_csts(&mut self, csts: &[Cst]) -> Vec<Ast> { csts.iter() .map(|it| self.lower_cst(it, LoweringType::Expression)) .collect() } fn lower_cst(&mut self, cst: &Cst, lowering_type: LoweringType) -> Ast { match &cst.kind { CstKind::EqualsSign | CstKind::Comma | CstKind::Dot | CstKind::Colon | CstKind::ColonEqualsSign | CstKind::Bar | CstKind::OpeningParenthesis | CstKind::ClosingParenthesis | CstKind::OpeningBracket | CstKind::ClosingBracket | CstKind::OpeningCurlyBrace | CstKind::ClosingCurlyBrace | CstKind::Arrow | CstKind::SingleQuote | CstKind::DoubleQuote | CstKind::Percent | CstKind::Octothorpe => self.create_ast( cst.data.id, AstKind::Error { child: None, errors: vec![self.create_error(cst, AstError::UnexpectedPunctuation)], }, ), CstKind::Whitespace(_) | CstKind::Newline(_) | CstKind::Comment { .. } | CstKind::TrailingWhitespace { .. } => { panic!("Whitespace should have been removed before lowering to AST.") } CstKind::Identifier(identifier) => { let string = self.create_string(cst.data.id, identifier.to_string()); let mut kind = AstKind::Identifier(Identifier(string)); if lowering_type == LoweringType::PatternLiteralPart { kind = AstKind::Error { child: None, errors: vec![self.create_error( cst, AstError::PatternLiteralPartContainsInvalidExpression, )], }; }; self.create_ast(cst.data.id, kind) } CstKind::Symbol(symbol) => { let string = self.create_string(cst.data.id, symbol.to_string()); self.create_ast(cst.data.id, AstKind::Symbol(Symbol(string))) } CstKind::Int { value, .. } => { self.create_ast(cst.data.id, AstKind::Int(Int(value.clone()))) } CstKind::Text { opening, parts, closing, } => { let mut errors = vec![]; let opening_single_quote_count = match &opening.kind { CstKind::OpeningText { opening_single_quotes, opening_double_quote: box Cst { kind: CstKind::DoubleQuote, .. } } if opening_single_quotes .iter() .all(|single_quote| single_quote.kind.is_single_quote()) => opening_single_quotes.len(), _ => panic!("Text needs to start with any number of single quotes followed by a double quote, but started with {}.", opening) }; let mut lowered_parts = vec![]; for part in parts { match &part.kind { CstKind::TextPart(text) => { let string = self.create_string(part.data.id, text.clone()); let text_part = self.create_ast(part.data.id, AstKind::TextPart(TextPart(string))); lowered_parts.push(text_part); }, CstKind::TextInterpolation { opening_curly_braces, expression, closing_curly_braces, } => { if lowering_type != LoweringType::Expression { return self.create_ast_for_invalid_expression_in_pattern(cst); }; if opening_curly_braces.len() != (opening_single_quote_count + 1) || !opening_curly_braces .iter() .all(|opening_curly_brace| opening_curly_brace.kind.is_opening_curly_brace()) { panic!( "Text interpolation needs to start with {} opening curly braces, but started with {}.", opening_single_quote_count + 1, opening_curly_braces.iter().map(|cst| format!("{}", cst)).join(""), ) } let ast = self.lower_cst(expression, LoweringType::Expression); if closing_curly_braces.len() == opening_single_quote_count + 1 && closing_curly_braces .iter() .all(|closing_curly_brace| closing_curly_brace.kind.is_closing_curly_brace()) { lowered_parts.push(ast); } else { let error = self.create_ast( part.data.id, AstKind::Error { child: Some(Box::new(ast)), errors: vec![CompilerError { module: self.module.clone(), span: part.data.span.clone(), payload: AstError::TextInterpolationMissesClosingCurlyBraces.into(), }], }, ); lowered_parts.push(error); } }, CstKind::Error { error, .. } => errors.push(CompilerError { module: self.module.clone(), span: part.data.span.clone(), payload: error.clone().into(), }), _ => panic!("Text contains non-TextPart. Whitespaces should have been removed already."), } } let text = self.create_ast(cst.data.id, AstKind::Text(Text(lowered_parts))); if !matches!( &closing.kind, CstKind::ClosingText { closing_double_quote: box Cst { kind: CstKind::DoubleQuote, .. }, closing_single_quotes } if closing_single_quotes .iter() .all(|single_quote| single_quote.kind.is_single_quote()) && opening_single_quote_count == closing_single_quotes.len() ) { errors.push(self.create_error(closing, AstError::TextMissesClosingQuote)); } self.wrap_in_errors(cst.data.id, text, errors) } CstKind::OpeningText { .. } => panic!("OpeningText should only occur in Text."), CstKind::ClosingText { .. } => panic!("ClosingText should only occur in Text."), CstKind::TextPart(_) => panic!("TextPart should only occur in Text."), CstKind::TextInterpolation { .. } => { panic!("TextInterpolation should only occur in Text.") } CstKind::BinaryBar { left, bar, right } => { match lowering_type { // In an expression context, a bar introduces a call. LoweringType::Expression => { let left = self.lower_cst(left, LoweringType::Expression); assert!( bar.kind.is_bar(), "BinaryBar must contain a bar, but instead contained a {bar}.", ); let call = self.lower_cst(right, LoweringType::Expression); let call = match call { Ast { kind: AstKind::Call(ast::Call { receiver, mut arguments, .. }), .. } if right.kind.is_call() => { arguments.insert(0, left); ast::Call { receiver, arguments, is_from_pipe: true, } } call => ast::Call { receiver: Box::new(call), arguments: vec![left], is_from_pipe: true, }, }; self.create_ast(cst.data.id, AstKind::Call(call)) } // In a pattern context, a bar represents an or pattern. LoweringType::Pattern | LoweringType::PatternLiteralPart => { let mut patterns = vec![]; let mut cst = cst; while let Cst { kind: CstKind::BinaryBar { left, bar, right }, .. } = cst { patterns.push(self.lower_cst(right, LoweringType::Pattern)); assert!( bar.kind.is_bar(), "Expected a bar after the left side of an or pattern, but found {bar}.", ); cst = left; } patterns.push(self.lower_cst(left, LoweringType::Pattern)); patterns.reverse(); let mut errors = vec![]; let captured_identifiers = patterns .iter() .map(|it| it.captured_identifiers()) .collect_vec(); let all_identifiers = captured_identifiers .iter() .flat_map(|it| it.keys()) .collect::<FxHashSet<_>>(); for identifier in all_identifiers { let number_of_missing_captures = captured_identifiers .iter() .filter(|it| !it.contains_key(identifier)) .count(); if number_of_missing_captures == 0 { continue; } let empty_vec = vec![]; let all_captures = captured_identifiers .iter() .flat_map(|it| it.get(identifier).unwrap_or(&empty_vec)) .filter_map(|it| self.id_mapping.get(it).cloned()) .collect_vec(); errors.push(self.create_error( left, AstError::OrPatternIsMissingIdentifiers { identifier: identifier.to_owned(), number_of_missing_captures: number_of_missing_captures.try_into().unwrap(), all_captures, }, )) } let ast = self.create_ast(cst.data.id, AstKind::OrPattern(OrPattern(patterns))); self.wrap_in_errors(cst.data.id, ast, errors) } } } CstKind::Parenthesized { opening_parenthesis, inner, closing_parenthesis, } => { match lowering_type { LoweringType::Expression => {} LoweringType::Pattern | LoweringType::PatternLiteralPart => { return self.create_ast( cst.data.id, AstKind::Error { child: None, errors: vec![ self.create_error(cst, AstError::ParenthesizedInPattern) ], }, ); } } let mut ast = self.lower_cst(inner, LoweringType::Expression); assert!( opening_parenthesis.kind.is_opening_parenthesis(), "Parenthesized needs to start with opening parenthesis, but started with {opening_parenthesis}.", ); if !closing_parenthesis.kind.is_closing_parenthesis() { ast = self.create_ast( closing_parenthesis.data.id, AstKind::Error { child: Some(Box::new(ast)), errors: vec![self.create_error( closing_parenthesis, AstError::ParenthesizedMissesClosingParenthesis, )], }, ); } ast } CstKind::Call { receiver, arguments, } => { let mut receiver_kind = &receiver.kind; loop { receiver_kind = match receiver_kind { CstKind::Parenthesized { opening_parenthesis, inner, closing_parenthesis, } => { assert!( opening_parenthesis.kind.is_opening_parenthesis(), "Parenthesized needs to start with opening parenthesis, but started with {opening_parenthesis}.", ); assert!( closing_parenthesis.kind.is_closing_parenthesis(), "Parenthesized for a call receiver needs to end with closing parenthesis, but ended with {closing_parenthesis}.", ); &inner.kind } _ => break, }; } let receiver = self.lower_cst(receiver, LoweringType::Expression); let arguments = self.lower_csts(arguments); self.create_ast( cst.data.id, AstKind::Call(ast::Call { receiver: receiver.into(), arguments, is_from_pipe: false, }), ) } CstKind::List { opening_parenthesis, items, closing_parenthesis, } => { let mut errors = vec![]; if lowering_type == LoweringType::PatternLiteralPart { errors.push( self.create_error( cst, AstError::PatternLiteralPartContainsInvalidExpression, ), ); }; assert!( opening_parenthesis.kind.is_opening_parenthesis(), "List should always have an opening parenthesis, but instead had {opening_parenthesis}.", ); let mut ast_items = vec![]; if items.len() == 1 && let CstKind::Comma = items[0].kind { // Empty list (`(,)`), do nothing. } else { for item in items { let CstKind::ListItem { value, comma, } = &item.kind else { errors.push(self.create_error(cst, AstError::ListWithNonListItem)); continue; }; let mut value = self.lower_cst(&value.clone(), lowering_type); if let Some(comma) = comma && !comma.kind.is_comma() { value = self.create_ast( comma.data.id, AstKind::Error { child: Some(Box::new(value)), errors: vec![self.create_error(comma, AstError::ListItemMissesComma)], }, ); } ast_items.push(value); } } if !closing_parenthesis.kind.is_closing_parenthesis() { errors.push( self.create_error( closing_parenthesis, AstError::ListMissesClosingParenthesis, ), ); } let ast = self.create_ast(cst.data.id, AstKind::List(List(ast_items))); self.wrap_in_errors(cst.data.id, ast, errors) } CstKind::ListItem { .. } => panic!("ListItem should only appear in List."), CstKind::Struct { opening_bracket, fields, closing_bracket, } => { let mut errors = vec![]; if lowering_type == LoweringType::PatternLiteralPart { errors.push( self.create_error( cst, AstError::PatternLiteralPartContainsInvalidExpression, ), ); }; assert!( opening_bracket.kind.is_opening_bracket(), "Struct should always have an opening bracket, but instead had {opening_bracket}.", ); let fields = fields .iter() .filter_map(|field| { let CstKind::StructField { key_and_colon, value, comma, } = &field.kind else { errors.push(self.create_error(cst, AstError::StructWithNonStructField)); return None; }; if let Some(box (key, colon)) = key_and_colon { // Normal syntax, e.g. `[foo: bar]`. let key_lowering_type = match lowering_type { LoweringType::Expression => LoweringType::Expression, LoweringType::Pattern | LoweringType::PatternLiteralPart => { LoweringType::PatternLiteralPart } }; let mut key = self.lower_cst(key, key_lowering_type); if !colon.kind.is_colon() { key = self.create_ast( colon.data.id, AstKind::Error { child: Some(Box::new(key)), errors: vec![self .create_error(colon, AstError::StructKeyMissesColon)], }, ) } let mut value = self.lower_cst(&value.clone(), lowering_type); if let Some(comma) = comma && !comma.kind.is_comma() { value = self.create_ast( comma.data.id, AstKind::Error { child: Some(Box::new(value)), errors: vec![self.create_error( comma, AstError::StructValueMissesComma, )], }, ) } Some((Some(key), value)) } else { // Shorthand syntax, e.g. `[foo]`. let mut ast = self.lower_cst(&value.clone(), lowering_type); if !ast.kind.is_identifier() { ast = self.create_ast( value.data.id, AstKind::Error { child: Some(Box::new(ast)), errors: vec![self.create_error( value, AstError::StructShorthandWithNotIdentifier, )], }, ) } if let Some(comma) = comma && !comma.kind.is_comma() { ast = self.create_ast( comma.data.id, AstKind::Error { child: Some(Box::new(ast)), errors: vec![self.create_error( comma, AstError::StructValueMissesComma, )], }, ) } Some((None, ast)) } }) .collect(); if !closing_bracket.kind.is_closing_bracket() { errors.push( self.create_error(closing_bracket, AstError::StructMissesClosingBrace), ); } let ast = self.create_ast(cst.data.id, AstKind::Struct(Struct { fields })); self.wrap_in_errors(cst.data.id, ast, errors) } CstKind::StructField { .. } => panic!("StructField should only appear in Struct."), CstKind::StructAccess { struct_, dot, key } => { if lowering_type != LoweringType::Expression { return self.create_ast_for_invalid_expression_in_pattern(cst); }; self.lower_struct_access(cst.data.id, struct_, dot, key) } CstKind::Match { expression, percent, cases, } => { if lowering_type != LoweringType::Expression { return self.create_ast_for_invalid_expression_in_pattern(expression); }; let expression = self.lower_cst(expression, LoweringType::Expression); assert!( percent.kind.is_percent(), "Expected a percent sign after the expression to match over, but found {percent}.", ); let cases = self.lower_csts(cases); self.create_ast( cst.data.id, AstKind::Match(Match { expression: Box::new(expression), cases, }), ) } CstKind::MatchCase { pattern, arrow: _, body, } => { if lowering_type != LoweringType::Expression { return self.create_ast_for_invalid_expression_in_pattern(pattern); }; let pattern = self.lower_cst(pattern, LoweringType::Pattern); // TODO: handle error in arrow let body = self.lower_csts(body); self.create_ast( cst.data.id, AstKind::MatchCase(MatchCase { pattern: Box::new(pattern), body, }), ) } CstKind::Function { opening_curly_brace, parameters_and_arrow, body, closing_curly_brace, } => { if lowering_type != LoweringType::Expression { return self.create_ast_for_invalid_expression_in_pattern(cst); } assert!( opening_curly_brace.kind.is_opening_curly_brace(), "Expected an opening curly brace at the beginning of a function, but found {opening_curly_brace}.", ); let mut errors = vec![]; let (parameters, mut parameter_errors) = if let Some((parameters, arrow)) = parameters_and_arrow { assert!( arrow.kind.is_arrow(), "Expected an arrow after the parameters in a function, but found `{arrow}`.", ); self.lower_parameters(parameters) } else { (vec![], vec![]) }; errors.append(&mut parameter_errors); let body = self.lower_csts(body); if !closing_curly_brace.kind.is_closing_curly_brace() { errors.push(self.create_error( closing_curly_brace, AstError::FunctionMissesClosingCurlyBrace, )); } let ast = self.create_ast( cst.data.id, AstKind::Function(Function { parameters, body, fuzzable: false, }), ); self.wrap_in_errors(cst.data.id, ast, errors) } CstKind::Assignment { left, assignment_sign, body, } => { if lowering_type != LoweringType::Expression { return self.create_ast_for_invalid_expression_in_pattern(cst); }; assert!( matches!(assignment_sign.kind, CstKind::EqualsSign | CstKind::ColonEqualsSign), "Expected an equals sign or colon equals sign for the assignment, but found {assignment_sign} instead.", ); let body = self.lower_csts(body); let (body, errors) = if let CstKind::Call { receiver: name, arguments: parameters, } = &left.kind { let name = match &name.kind { CstKind::Identifier(identifier) => { self.create_string(name.data.id.to_owned(), identifier.to_owned()) } CstKind::Error { error, .. } => { return self.create_ast( cst.data.id, AstKind::Error { child: None, errors: vec![CompilerError { module: self.module.clone(), span: name.data.span.clone(), payload: error.clone().into(), }], }, ); } _ => { return self.create_ast( cst.data.id, AstKind::Error { child: None, errors: vec![CompilerError { module: self.module.clone(), span: name.data.span.clone(), payload: AstError::ExpectedNameOrPatternInAssignment.into(), }], }, ); } }; let (parameters, errors) = self.lower_parameters(parameters); let body = AssignmentBody::Function { name, function: Function { parameters, body, fuzzable: true, }, }; (body, errors) } else { let body = AssignmentBody::Body { pattern: Box::new(self.lower_cst(left, LoweringType::Pattern)), body, }; (body, vec![]) }; let ast = self.create_ast( cst.data.id, AstKind::Assignment(ast::Assignment { is_public: assignment_sign.kind.is_colon_equals_sign(), body, }), ); self.wrap_in_errors(cst.data.id, ast, errors) } CstKind::Error { error, .. } => self.create_ast( cst.data.id, AstKind::Error { child: None, errors: vec![CompilerError { module: self.module.clone(), span: cst.data.span.clone(), payload: error.clone().into(), }], }, ), } } fn lower_struct_access(&mut self, id: cst::Id, struct_: &Cst, dot: &Cst, key: &Cst) -> Ast { let struct_ = self.lower_cst(struct_, LoweringType::Expression); assert!( dot.kind.is_dot(), "Struct access should always have a dot, but instead had {dot}.", ); match key.kind.clone() { CstKind::Identifier(identifier) => { let key = self.create_string(key.data.id.to_owned(), identifier.uppercase_first_letter()); self.create_ast( id, AstKind::StructAccess(ast::StructAccess { struct_: Box::new(struct_), key, }), ) } CstKind::Error { error, .. } => self.create_ast( id.to_owned(), AstKind::Error { child: None, errors: vec![CompilerError { module: self.module.clone(), span: key.data.span.clone(), payload: error.into(), }], }, ), _ => panic!( "Expected an identifier after the dot in a struct access, but found `{}`.", key ), } } fn lower_parameters(&mut self, csts: &[Cst]) -> (Vec<AstString>, Vec<CompilerError>) { let mut errors = vec![]; let parameters = csts .iter() .enumerate() .map(|(index, it)| match self.lower_parameter(it) { Ok(parameter) => parameter, Err(box error) => { errors.push(error); self.create_string(it.data.id, format!("<invalid#{index}>")) } }) .collect(); (parameters, errors) } fn lower_parameter(&mut self, cst: &Cst) -> Result<AstString, Box<CompilerError>> { if let CstKind::Identifier(identifier) = &cst.kind { Ok(self.create_string(cst.data.id.to_owned(), identifier.clone())) } else { Err(Box::new( self.create_error(cst, AstError::ExpectedParameter), )) } } fn create_ast(&mut self, cst_id: cst::Id, kind: AstKind) -> Ast { Ast { id: self.create_next_id(cst_id), kind, } } fn create_string(&mut self, cst_id: cst::Id, value: String) -> AstString { AstString { id: self.create_next_id(cst_id), value, } } fn create_next_id(&mut self, cst_id: cst::Id) -> ast::Id { let id = self.create_next_id_without_mapping(); assert!(self.id_mapping.insert(id.clone(), cst_id).is_none()); id } fn create_next_id_without_mapping(&mut self) -> ast::Id { let id = ast::Id::new(self.module.clone(), self.next_id); self.next_id += 1; id } fn wrap_in_errors(&mut self, cst_id: cst::Id, ast: Ast, errors: Vec<CompilerError>) -> Ast { if errors.is_empty() { return ast; } self.create_ast( cst_id, AstKind::Error { child: Some(Box::new(ast)), errors, }, ) } fn create_error(&self, cst: &Cst, error: AstError) -> CompilerError { CompilerError { module: self.module.clone(), span: cst.data.span.clone(), payload: error.into(), } } fn create_ast_for_invalid_expression_in_pattern(&mut self, cst: &Cst) -> Ast { self.create_ast( cst.data.id, AstKind::Error { child: None, errors: vec![self.create_error(cst, AstError::PatternContainsInvalidExpression)], }, ) } }