text
stringlengths
8
4.13M
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use common_arrow::arrow::compute::merge_sort::MergeSlice; use itertools::Itertools; use crate::types::array::ArrayColumnBuilder; use crate::types::decimal::DecimalColumn; use crate::types::map::KvColumnBuilder; use crate::types::nullable::NullableColumn; use crate::types::number::NumberColumn; use crate::types::AnyType; use crate::types::ArgType; use crate::types::ArrayType; use crate::types::BooleanType; use crate::types::DataType; use crate::types::DateType; use crate::types::MapType; use crate::types::NumberType; use crate::types::StringType; use crate::types::TimestampType; use crate::types::ValueType; use crate::types::VariantType; use crate::with_decimal_type; use crate::with_number_mapped_type; use crate::BlockEntry; use crate::Column; use crate::ColumnBuilder; use crate::DataBlock; use crate::Scalar; use crate::Value; // Block idx, row idx in the block, repeat times pub type BlockRowIndex = (usize, usize, usize); impl DataBlock { pub fn take_blocks(blocks: &[DataBlock], indices: &[BlockRowIndex]) -> Self { debug_assert!(!blocks.is_empty()); let num_columns = blocks[0].num_columns(); let result_size = indices.iter().map(|(_, _, c)| *c).sum(); let result_columns = (0..num_columns) .map(|index| { let columns = blocks .iter() .map(|block| (block.get_by_offset(index), block.num_rows())) .collect_vec(); let ty = columns[0].0.data_type.clone(); if ty.is_null() { return BlockEntry { data_type: ty, value: Value::Scalar(Scalar::Null), }; } // if they are all same scalars if matches!(columns[0].0.value, Value::Scalar(_)) { let all_same_scalar = columns.iter().map(|(entry, _)| &entry.value).all_equal(); if all_same_scalar { return (*columns[0].0).clone(); } } let full_columns: Vec<Column> = columns .iter() .map(|(entry, rows)| match &entry.value { Value::Scalar(s) => { let builder = ColumnBuilder::repeat(&s.as_ref(), *rows, &entry.data_type); builder.build() } Value::Column(c) => c.clone(), }) .collect(); let column = Column::take_column_indices(&full_columns, ty.clone(), indices, result_size); BlockEntry { data_type: ty, value: Value::Column(column), } }) .collect(); DataBlock::new(result_columns, result_size) } pub fn take_by_slice_limit( block: &DataBlock, slice: (usize, usize), limit: Option<usize>, ) -> Self { let columns = block .columns() .iter() .map(|entry| { Self::take_column_by_slices_limit(&[entry.clone()], &[(0, slice.0, slice.1)], limit) }) .collect::<Vec<_>>(); let num_rows = block.num_rows().min(slice.1.min(limit.unwrap_or(slice.1))); DataBlock::new(columns, num_rows) } pub fn take_by_slices_limit_from_blocks( blocks: &[DataBlock], slices: &[MergeSlice], limit: Option<usize>, ) -> Self { debug_assert!(!blocks.is_empty()); let total_num_rows: usize = blocks.iter().map(|c| c.num_rows()).sum(); let result_size: usize = slices.iter().map(|(_, _, c)| *c).sum(); let result_size = total_num_rows.min(result_size.min(limit.unwrap_or(result_size))); let mut result_columns = Vec::with_capacity(blocks[0].num_columns()); for index in 0..blocks[0].num_columns() { let cols = blocks .iter() .map(|c| c.get_by_offset(index).clone()) .collect::<Vec<_>>(); let merged_col = Self::take_column_by_slices_limit(&cols, slices, limit); result_columns.push(merged_col); } DataBlock::new(result_columns, result_size) } pub fn take_column_by_slices_limit( columns: &[BlockEntry], slices: &[MergeSlice], limit: Option<usize>, ) -> BlockEntry { assert!(!columns.is_empty()); let ty = &columns[0].data_type; let num_rows = limit .unwrap_or(usize::MAX) .min(slices.iter().map(|(_, _, c)| *c).sum()); let mut builder = ColumnBuilder::with_capacity(ty, num_rows); let mut remain = num_rows; for (index, start, len) in slices { let len = (*len).min(remain); remain -= len; let col = &columns[*index]; match &col.value { Value::Scalar(scalar) => { let other = ColumnBuilder::repeat(&scalar.as_ref(), len, &col.data_type); builder.append_column(&other.build()); } Value::Column(c) => { let c = c.slice(*start..(*start + len)); builder.append_column(&c); } } if remain == 0 { break; } } let col = builder.build(); BlockEntry { data_type: ty.clone(), value: Value::Column(col), } } } impl Column { pub fn take_column_indices( columns: &[Column], datatype: DataType, indices: &[BlockRowIndex], result_size: usize, ) -> Column { match &columns[0] { Column::Null { .. } => Column::Null { len: result_size }, Column::EmptyArray { .. } => Column::EmptyArray { len: result_size }, Column::EmptyMap { .. } => Column::EmptyMap { len: result_size }, Column::Number(column) => with_number_mapped_type!(|NUM_TYPE| match column { NumberColumn::NUM_TYPE(_) => { let builder = NumberType::<NUM_TYPE>::create_builder(result_size, &[]); Self::take_block_value_types::<NumberType<NUM_TYPE>>(columns, builder, indices) } }), Column::Decimal(column) => with_decimal_type!(|DECIMAL_TYPE| match column { DecimalColumn::DECIMAL_TYPE(_, size) => { let columns = columns .iter() .map(|col| match col { Column::Decimal(DecimalColumn::DECIMAL_TYPE(col, _)) => col, _ => unreachable!(), }) .collect_vec(); let mut builder = Vec::with_capacity(result_size); for &(block_index, row, times) in indices { let val = unsafe { columns[block_index].get_unchecked(row) }; for _ in 0..times { builder.push(*val); } } Column::Decimal(DecimalColumn::DECIMAL_TYPE(builder.into(), *size)) } }), Column::Boolean(_) => { let builder = BooleanType::create_builder(result_size, &[]); Self::take_block_value_types::<BooleanType>(columns, builder, indices) } Column::String(_) => { let builder = StringType::create_builder(result_size, &[]); Self::take_block_value_types::<StringType>(columns, builder, indices) } Column::Timestamp(_) => { let builder = TimestampType::create_builder(result_size, &[]); Self::take_block_value_types::<TimestampType>(columns, builder, indices) } Column::Date(_) => { let builder = DateType::create_builder(result_size, &[]); Self::take_block_value_types::<DateType>(columns, builder, indices) } Column::Array(column) => { let mut offsets = Vec::with_capacity(result_size + 1); offsets.push(0); let builder = ColumnBuilder::with_capacity(&column.values.data_type(), result_size); let builder = ArrayColumnBuilder { builder, offsets }; Self::take_block_value_types::<ArrayType<AnyType>>(columns, builder, indices) } Column::Map(column) => { let mut offsets = Vec::with_capacity(result_size + 1); offsets.push(0); let builder = ColumnBuilder::from_column( ColumnBuilder::with_capacity(&column.values.data_type(), result_size).build(), ); let (key_builder, val_builder) = match builder { ColumnBuilder::Tuple(fields) => (fields[0].clone(), fields[1].clone()), _ => unreachable!(), }; let builder = KvColumnBuilder { keys: key_builder, values: val_builder, }; let builder = ArrayColumnBuilder { builder, offsets }; Self::take_block_value_types::<MapType<AnyType, AnyType>>(columns, builder, indices) } Column::Nullable(_) => { let inner_ty = datatype.as_nullable().unwrap(); let inner_columns = columns .iter() .map(|c| match c { Column::Nullable(c) => c.column.clone(), _ => unreachable!(), }) .collect::<Vec<_>>(); let inner_bitmaps = columns .iter() .map(|c| match c { Column::Nullable(c) => Column::Boolean(c.validity.clone()), _ => unreachable!(), }) .collect::<Vec<_>>(); let inner_column = Self::take_column_indices( &inner_columns, *inner_ty.clone(), indices, result_size, ); let inner_bitmap = Self::take_column_indices( &inner_bitmaps, DataType::Boolean, indices, result_size, ); Column::Nullable(Box::new(NullableColumn { column: inner_column, validity: BooleanType::try_downcast_column(&inner_bitmap).unwrap(), })) } Column::Tuple { .. } => { let inner_ty = datatype.as_tuple().unwrap(); let inner_columns = columns .iter() .map(|c| match c { Column::Tuple(fields) => fields.clone(), _ => unreachable!(), }) .collect::<Vec<_>>(); let fields: Vec<Column> = inner_ty .iter() .enumerate() .map(|(idx, ty)| { let sub_columns = inner_columns .iter() .map(|c| c[idx].clone()) .collect::<Vec<_>>(); Self::take_column_indices(&sub_columns, ty.clone(), indices, result_size) }) .collect(); Column::Tuple(fields) } Column::Variant(_) => { let builder = VariantType::create_builder(result_size, &[]); Self::take_block_value_types::<VariantType>(columns, builder, indices) } } } fn take_block_value_types<T: ValueType>( columns: &[Column], mut builder: T::ColumnBuilder, indices: &[BlockRowIndex], ) -> Column { let columns = columns .iter() .map(|col| T::try_downcast_column(col).unwrap()) .collect_vec(); for &(block_index, row, times) in indices { let val = unsafe { T::index_column_unchecked(&columns[block_index], row) }; for _ in 0..times { T::push_item(&mut builder, val.clone()) } } T::upcast_column(T::build_column(builder)) } }
fn mk_int<'a>() -> Box<i32> { Box::new(0) } fn sum<'a>(lo : i32, hi : i32) -> Box<i32> { let mut acc = mk_int(); for i in 0..(hi+1) { *acc += i }; acc }
use std::io::ErrorKind::WouldBlock; use std::io::BufWriter; use std::io::prelude::*; use std::fs::File; use std::thread; use std::time::Duration; use image; use scrap::{self, Display, Capturer}; fn main() { list_displays(); take_screenshot(); } fn list_displays() { println!("Listing displays:"); let displays = Display::all().unwrap(); for (i, display) in displays.iter().enumerate() { println!(" Display {} [{}x{}]", i + 1, display.width(), display.height()); } } fn take_screenshot() { let one_second = Duration::new(1, 0); let one_frame = one_second / 60; let display = Display::primary().expect("Couldn't find primary display."); let mut capturer = Capturer::new(display).expect("Couldn't begin capture."); let (w, h) = (capturer.width(), capturer.height()); loop { // Wait until there's a frame. let buffer = match capturer.frame() { Ok(buffer) => buffer, Err(error) => { if error.kind() == WouldBlock { // Keep spinning. thread::sleep(one_frame); continue; } else { panic!("Error: {}", error); } } }; println!("Captured! Saving..."); // Flip the ARGB image into a BGRA image. let mut bitflipped = Vec::with_capacity(w * h * 4); let stride = buffer.len() / h; for y in 0..h { for x in 0..w { let i = stride * y + 4 * x; bitflipped.extend_from_slice(&[ buffer[i + 2], buffer[i + 1], buffer[i], 255, ]); } } // Save the image. image::save_buffer("s.png", &buffer, w as u32, h as u32, image::ColorType::Rgba8); // image::save_buffer("s.png", &buffer, w as u32, h as u32, image::ColorType::Bgra8); println!("Image saved to s.png."); dump_buffer(&buffer, h, w); break; } } fn dump_buffer(buffer: &[u8], h: usize, w: usize) { /* TODO: confused with about BufWriter.write ... bfw.write(&buffer); */ let f = File::create("buf.txt").expect("Unable to create file"); let mut bfw = BufWriter::new(f); let stride = buffer.len() / h; for y in 0..h { for x in 0..w { let i = stride * y + 4 * x; bfw.write(buffer[i].to_string().as_bytes()); bfw.write(b":"); bfw.write(buffer[i + 1].to_string().as_bytes()); bfw.write(b":"); bfw.write(buffer[i + 2].to_string().as_bytes()); bfw.write(b":"); bfw.write(buffer[i + 3].to_string().as_bytes()); bfw.write(b" "); } bfw.write(b"\n"); } }
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(catch_expr)] fn use_val<T: Sized>(_x: T) {} pub fn main() { let cfg_res; let _: Result<(), ()> = do catch { Err(())?; cfg_res = 5; Ok::<(), ()>(())?; use_val(cfg_res); }; assert_eq!(cfg_res, 5); //~ ERROR use of possibly uninitialized variable }
use core::cmp::Ordering::{self, Equal, Greater, Less}; use num_bigint::algorithms; use num_bigint::{BigUint, RandPrime}; use num_traits::{FromPrimitive, One}; use rand::prelude::*; use rand::Rng; use crate::errors::{Error, Result}; use num_bigint::algorithms::{__add2, __sub2rev, add2, sub2, sub2rev}; use num_bigint::algorithms::{mac_with_carry, mul3, scalar_mul}; /// This program implements the DSA algorithm defined in FIPS 186-3. /// /// The parameter represents the parameter of the key. /// The bit length of Q must be a multiple of 8. struct Parameters { P: BigUint, Q: BigUint, G: BigUint, } /// PublicKey represents a DSA public key. struct PublicKey { P: BigUint, Q: BigUint, G: BigUint, Y: BigUint, } /// PrivateKey represents a DSA private key. struct PrivateKey { P: BigUint, Q: BigUint, G: BigUint, Y: BigUint, X: BigUint, } const L1024N160: i8 = 0; const L2048N224: i8 = 1; const L2048N256: i8 = 2; const L3072N256: i8 = 3; /// numMRTests is the largest recommendation number selected from Table C.1 of FIPS 186-3. /// It is the quantity used to perform the Miller-Rabin prime number test. const numTests: i8 = 64; fn GenerateParameters<L, N>(params: &mut Parameters, rand: Rng, sizes: i8) -> Result<Vec<u8>> { let mut L; let mut N; match sizes { L1024N160 => { let L = 1024; let N = 160; } L2048N224 => { let L = 2048; let N = 224; } L2048N256 => { let L = 2048; let N = 256; } L2048N256 => { let L = 3072; let N = 256; } _ => { return Err(Error::EncodeError { reason: "dsa: invalid ParameterSizes".into(), }) } } let mut qBytes = vec![0u8; N / 8]; let mut pBytes = vec![0u8; L / 8]; let mut q: BigUint; let mut p: BigUint; let mut rem: BigUint; let mut one = BigUint::one(); 'GeneratePrimes: loop { rand.read(&mut qBytes)?; qBytes[qBytes.len() - 1] |= 1; qBytes[0] = 0x80; if !q.probablyprime(numTests) { continue; } for i in 0..4 * L { rand.read(&mut pBytes)?; pBytes[pBytes.len() - 1] |= 1; pBytes[0] |= 0x80; rem.modpow(p, q); rem.Sub(rem, one); p.Sub(p, rem); if p.bits() < L { i += 1; continue; } if !probably_prime(q, numTests) { i += 1; continue; } params.P = p; params.Q = q; break 'GeneratePrimes; } } let mut h: BigUint; let mut g: BigUint; let mut pm1: BigUint; pm1.Sub(p, one); let mut e: BigUint; e.Div(pm1, q); loop { if g.cmp(one) == 0 { h.add(h, one); continue; } params.G = g; return None; } } // GenerateKey generates a public&private key pair. fn GenerateKey(priva: PrivateKey, rand: Rng) -> Result<Vec<u8>> { if priva.P == None || priva.Q == None || priva.G == None { return Err(Error::ParametersNotSet); } let mut x: BigUint; let mut xBytes = vec![0u8; priva.Q.bits() / 8]; loop { rand.read(&mut xBytes)?; if x.Sign() != 0 && x.cmp(priva.Q) < 0 { break; } } priva.X = x; priva.Y.exp(priva.G, x, priva.P); return None; } // fermatInverse calculates the inverse of GP(k). fn fermatInverse(k: BigUint, P: BigUint) -> BigUint { let mut two: BigUint; let mut pMinus2: BigUint; pMinus2.Sub(P, two); return BigUint.exp(k, pMinus2, P); } // Find the sign of the BigUint variable. fn Sign(a: BigUint) -> i8 { if a.data == 0 { return 0; } if a.sign < 0 { return -1; } return 1; } /// Signature uses the private key priva to sign a hash of any length. /// It returns the signature as a pair of integers. /// The security of the private key depends on the entropy of rand. fn Signature(rand: Rng, priva: PrivateKey, hash: &[i8]) -> (BigUint, BigUint) { let mut n = priva.Q.bits(); if Sign(priva.Q) <= 0 || Sign(priva.P) <= 0 || Sign(priva.G) <= 0 || Sign(priva.X) <= 0 || n % 8 != 0 { return Err(Error::InvalidPublicKey); } n >>= 3; let mut attwmpts = 10; let mut r: BigUint; let mut s: BigUint; while attwmpts > 0 { let mut k: BigUint; let mut buf = vec![0u8; n]; loop { rand.read(buf); if Sign(k) > 0 && priva.Q.cmp(k) > 0 { break; } } let mut kInv = fermatInverse(k, priva.Q); r.exp(priva.G, k, priva.P); r.modpow(r, priva.Q); if Sign(r) == 0 { attwmpts -= 1; continue; } let mut z = k.SetBytes(hash); s.Mul(priva.X, r); s.add(s, z); s.modpow(s, priva.Q); s.Mul(s, kInv); s.modpow(s, priva.Q); if Sign(s) != 0 { break; } attwmpts -= 1; } if attwmpts == 0 { return (None, None); } return (r, s); } /// Use the public key to verify the signature in the hash r, s. /// It reports whether the signature is valid. fn Verify(publ: PublicKey, hash: &[u8], r: BigUint, s: BigUint) -> bool { if Sign(publ.P) == 0 { return false; } if Sign(r) < 1 || r.cmp(publ.Q) >= 0 { return false; } if Sign(s) < 1 || s.cmp(publ.Q) >= 0 { return false; } let mut w: BigUint; w.modpow(s, publ.Q); if w == None { return false; } let mut n = publ.Q.bits(); if n % 8 != 0 { return false; } let mut z: BigUint; let mut u1: BigUint; u1.Mul(z, w); u1.modpow(u1, publ.Q); let mut u2 = w.Mul(r, w); u2.modpow(u2, publ.Q); let mut v = u1.Exp(publ.G, u1, publ.P); u2.Exp(publ.Y, u2, publ.P); v.Mul(v, u2); v.modpow(v, publ.P); v.modpow(v, publ.Q); return v.cmp(r) == 0; }
extern crate libc; use libc::{pthread_self, pthread_getname_np, pthread_setname_np}; use std::ffi::{CString, CStr, OsStr}; use std::io::{self, Error}; fn main() { #[cfg(target_os = "macos")] let lalala = "la la la la la la la123456789012345678901234567890 la la la l63"; #[cfg(not(any(target_os = "windows", target_os = "macos")))] let lalala = "123456789012345"; println!("{:?}.len() = {}", lalala, lalala.len()); let _original = getname().expect("getname default"); setname(lalala).expect("setname"); let new = getname().expect("getname changed"); assert_eq!(lalala, new); /*unsafe { libc::getchar(); }*/ } fn getname() -> std::io::Result<String> { unsafe { let my_id = pthread_self(); let mut name = [0i8; 256]; let ret = pthread_getname_np(my_id, name.as_mut_ptr(), name.len()); let name = CStr::from_ptr(name.as_ptr()) .to_string_lossy() .to_owned() .into_owned(); println!("{} => {:?}", ret, name); if ret == 0 { Ok(name) } else { Err(Error::last_os_error()) } } } /// This function will truncate the new name to 63 Bytes on mac os x #[cfg(target_os = "macos")] fn setname(new: &str) -> io::Result<()> { let new = CString::new(&new[0..63]).expect("invalide str supplied"); unsafe { let ret = pthread_setname_np(new.as_ptr() as *const i8); if ret == 0 { Ok(()) } else { Err(Error::last_os_error()) } } } #[cfg(target_os = "windows")] fn setname(new: &str) -> io::Result<()> { } /// This function will truncate the new name to to 15 Bytes on linux. #[cfg(not(any(target_os = "windows", target_os = "macos")))] fn setname(new: &str) -> io::Result<()> { let new = CString::new(&new[0..15]).expect("invalide str supplied"); unsafe { let my_id = pthread_self(); let ret = pthread_setname_np(my_id, new.as_ptr() as *const i8); if ret == 0 { Ok(()) } else { println!("setname ret = {}", ret); Err(Error::last_os_error()) } } }
use super::input_to_string; use super::strings::{sass_string, sass_string_dq, sass_string_sq}; use super::util::{opt_spacelike, spacelike2}; use nom::types::CompleteByteSlice as Input; use selectors::{Selector, SelectorPart, Selectors}; named!(pub selectors<Input, Selectors>, map!(separated_nonempty_list!( complete!(do_parse!(tag!(",") >> opt!(is_a!(", \t\n")) >> ())), selector), Selectors::new)); named!(pub selector<Input, Selector>, map!(many1!(selector_part), |s: Vec<SelectorPart>| { let mut s = s; if s.last() == Some(&SelectorPart::Descendant) { s.pop(); } Selector(s) })); named!(selector_part<Input, SelectorPart>, alt_complete!( map!(sass_string, SelectorPart::Simple) | value!(SelectorPart::Simple("*".into()), tag!("*")) | do_parse!(tag!("::") >> name: sass_string >> arg: opt!(delimited!(tag!("("), selectors, tag!(")"))) >> (SelectorPart::PseudoElement { name, arg, })) | do_parse!(tag!(":") >> name: sass_string >> arg: opt!(delimited!(tag!("("), selectors, tag!(")"))) >> (SelectorPart::Pseudo { name, arg, })) | do_parse!(tag!("[") >> opt_spacelike >> name: sass_string >> opt_spacelike >> op: map_res!( alt_complete!(tag!("*=") | tag!("|=") | tag!("=")), input_to_string ) >> opt_spacelike >> val: alt_complete!(sass_string_dq | sass_string_sq | sass_string) >> opt_spacelike >> tag!("]") >> (SelectorPart::Attribute { name, op, val, })) | do_parse!(tag!("[") >> opt_spacelike >> name: sass_string >> opt_spacelike >> tag!("]") >> (SelectorPart::Attribute { name, op: "".to_string(), val: "".into(), })) | value!(SelectorPart::BackRef, tag!("&")) | delimited!(opt_spacelike, alt!(value!(SelectorPart::RelOp(b'>'), tag!(">")) | value!(SelectorPart::RelOp(b'+'), tag!("+")) | value!(SelectorPart::RelOp(b'~'), tag!("~")) | value!(SelectorPart::RelOp(b'\\'), tag!("\\"))), opt_spacelike) | value!(SelectorPart::Descendant, spacelike2) )); #[cfg(test)] mod test { use super::*; use sass::{SassString, StringPart}; use value::Quotes; #[test] fn simple_selector() { assert_eq!( selector(Input(b"foo ")), Ok(( Input(b""), Selector(vec![SelectorPart::Simple("foo".into())]) )) ) } #[test] fn escaped_simple_selector() { assert_eq!( selector(Input(b"\\E9m ")), Ok(( Input(b""), Selector(vec![SelectorPart::Simple("\\E9m".into())]) )) ) } #[test] fn selector2() { assert_eq!( selector(Input(b"foo bar ")), Ok(( Input(b""), Selector(vec![ SelectorPart::Simple("foo".into()), SelectorPart::Descendant, SelectorPart::Simple("bar".into()), ]) )) ) } #[test] fn child_selector() { assert_eq!( selector(Input(b"foo > bar ")), Ok(( Input(b""), Selector(vec![ SelectorPart::Simple("foo".into()), SelectorPart::RelOp(b'>'), SelectorPart::Simple("bar".into()), ]) )) ) } #[test] fn foo1_selector() { assert_eq!( selector(Input(b"[data-icon='test-1'] ")), Ok(( Input(b""), Selector(vec![SelectorPart::Attribute { name: "data-icon".into(), op: "=".into(), val: SassString::new( vec![StringPart::Raw("test-1".into())], Quotes::Single, ), }]) )) ) } #[test] fn pseudo_selector() { assert_eq!( selector(Input(b":before ")), Ok(( Input(b""), Selector(vec![SelectorPart::Pseudo { name: "before".into(), arg: None, }]) )) ) } #[test] fn pseudo_on_simple_selector() { assert_eq!( selector(Input(b"figure:before ")), Ok(( Input(b""), Selector(vec![ SelectorPart::Simple("figure".into()), SelectorPart::Pseudo { name: "before".into(), arg: None, }, ]) )) ) } #[test] fn selectors_simple() { assert_eq!( selectors(Input(b"foo, bar ")), Ok(( Input(b""), Selectors::new(vec![ Selector(vec![SelectorPart::Simple("foo".into())]), Selector(vec![SelectorPart::Simple("bar".into())]), ]) )) ) } }
#![warn(clippy::all)] #![warn(clippy::pedantic)] fn main() { run(); } fn run() { let start = std::time::Instant::now(); // code goes here let mut res = 0; for a in 100..1000 { for b in 100..1000 { let c = a * b; if is_palindrome(c) && c > res { res = c; } } } let span = start.elapsed().as_nanos(); println!("{} {}", res, span); } fn is_palindrome(n: u64) -> bool { let copy = n; let mut n = n; let mut rev = 0; while n > 0 { rev *= 10; rev += n % 10; n /= 10; } rev == copy }
// -*- coding: utf-8; mode: rust; -*- // // To the extent possible under law, the authors have waived all // copyright and related or neighboring rights to zkp, // using the Creative Commons "CC0" public domain dedication. See // <http://creativecommons.org/publicdomain/zero/1.0/> for full // details. // // Authors: // - Henry de Valence <hdevalence@hdevalence.ca> #![feature(test)] extern crate bincode; extern crate curve25519_dalek; extern crate serde; #[macro_use] extern crate serde_derive; extern crate sha2; #[macro_use] extern crate zkp; extern crate test; mod cmz { // Proof statement for "credential presentation with 10 hidden attributes" from CMZ'13. define_proof! { cred_show_10, "CMZ cred show n=10", (m_1, m_2, m_3, m_4, m_5, m_6, m_7, m_8, m_9, m_10, z_1, z_2, z_3, z_4, z_5, z_6, z_7, z_8, z_9, z_10, minus_z_Q), (C_1, C_2, C_3, C_4, C_5, C_6, C_7, C_8, C_9, C_10, P, Q, V), (X_1, X_2, X_3, X_4, X_5, X_6, X_7, X_8, X_9, X_10, A, B) : C_1 = (P ^ m_1 * A ^ z_1 ) && C_2 = (P ^ m_2 * A ^ z_2 ) && C_3 = (P ^ m_3 * A ^ z_3 ) && C_4 = (P ^ m_4 * A ^ z_4 ) && C_5 = (P ^ m_5 * A ^ z_5 ) && C_6 = (P ^ m_6 * A ^ z_6 ) && C_7 = (P ^ m_7 * A ^ z_7 ) && C_8 = (P ^ m_8 * A ^ z_8 ) && C_9 = (P ^ m_9 * A ^ z_9 ) && C_10 = (P ^ m_10 * A ^ z_10 ) && V = (X_1^m_1 * X_2 ^ m_2* X_3 ^ m_3* X_4 ^ m_4* X_5 ^ m_5* X_6 ^ m_6 * X_7 ^ m_7* X_8 ^ m_8* X_9 ^ m_9* X_10 ^ m_10* Q ^ minus_z_Q) } } define_proof! {dleq, "DLEQ proof", (x), (A, B, H), (G) : A = (G ^ x) && B = (H ^ x) }
use glam::{Mat3, Mat4, Quat, Vec2}; use log::debug; use serde_derive::{Deserialize, Serialize}; use std::collections::VecDeque; /// Transform of an element to place it on the screen #[derive(Debug, Clone, Copy, Serialize, Deserialize, Default)] pub struct Transform { /// Translation along x-y pub translation: Vec2, /// Scale along x-y pub scale: Vec2, /// rotation along z pub rotation: f32, #[serde(default = "default_dirty")] pub dirty: bool, } impl Transform { /// Get the model matrix for the transform pub fn to_model(&self) -> Mat4 { Mat4::from_scale_rotation_translation( self.scale.extend(0.0), Quat::from_rotation_z(self.rotation), self.translation.extend(0.0), ) } pub fn to_mat(&self) -> glam::Mat3 { glam::Mat3::from_scale_angle_translation(self.scale, self.rotation, self.translation) } pub fn translate(&mut self, translation: Vec2) { self.translation += translation; self.dirty = true; } } /// Transform relative the the parent component. #[derive(Debug, Serialize, Deserialize, Clone)] pub struct LocalTransform { pub translation: Vec2, pub scale: Vec2, pub rotation: f32, #[serde(default = "default_dirty")] pub dirty: bool, } impl From<Transform> for LocalTransform { fn from(t: Transform) -> Self { Self { translation: t.translation, scale: t.scale, rotation: t.rotation, dirty: true, } } } fn default_dirty() -> bool { true } impl LocalTransform { pub fn new(translation: Vec2, rotation: f32, scale: Vec2) -> Self { Self { translation, scale, rotation, dirty: true, } } pub fn to_model(&self) -> glam::Mat4 { glam::Mat4::from_scale_rotation_translation( self.scale.extend(0.0), Quat::from_rotation_z(self.rotation), self.translation.extend(0.0), ) } pub fn to_mat(&self) -> glam::Mat3 { glam::Mat3::from_scale_angle_translation(self.scale, self.rotation, self.translation) } } pub struct HasParent { pub entity: hecs::Entity, } pub struct HasChildren { pub children: Vec<hecs::Entity>, } pub fn update_transforms(world: &mut hecs::World) { let mut to_process = VecDeque::new(); // first gather the entities to update. for (e, (transform, has_children)) in world.query::<(&mut Transform, &HasChildren)>().iter() { // Root entities. if let Ok(_) = world.get::<HasParent>(e) { continue; } debug!("Will process {:?}", e); // Process all parents even if their transform is not dirty. The reason is that children // can be moved independently, so we would need to update their children. for child in &has_children.children { to_process.push_back((transform.clone(), *child)); } transform.dirty = false; } debug!("Local Transform to update = {:?}", to_process); // process in order of insertion. while let Some((t, child)) = to_process.pop_front() { let parent_matrix = t.to_mat(); // First, calculate the new transform. let mut global_transform = world .get_mut::<Transform>(child) .expect("Child component should have a global transform"); let mut local_transform = world .get_mut::<LocalTransform>(child) .expect("Child component should have a local transform"); if local_transform.dirty || t.dirty { debug!("Will update transforms"); // Need to recalculate the global transform. let local_matrix = local_transform.to_mat(); debug!("Local Matrix = {:#?}", local_matrix); let new_global_matrix = Mat3::from_scale_angle_translation(Vec2::one(), t.rotation, t.translation) * local_matrix; debug!("parent Matrix = {:#?}", parent_matrix); debug!("new global Matrix = {:#?}", new_global_matrix); let (rot, translation) = decompose_mat3(new_global_matrix); global_transform.rotation = rot; global_transform.translation = translation; global_transform.dirty = true; } if let Ok(children) = world.get::<HasChildren>(child) { for child_of_child in &children.children { to_process.push_back((*global_transform, *child_of_child)); } } global_transform.dirty = false; local_transform.dirty = false; } } /// assume scale is always 1 to simplify. Only true in that specific case of course. fn decompose_mat3(mat: Mat3) -> (f32, Vec2) { let translation = glam::vec2(mat.z_axis.x, mat.z_axis.y); let angle = mat.x_axis.y.atan2(mat.x_axis.x); (angle, translation) }
use crate::common::get_rng; use rand::prelude::*; #[derive(Debug, Clone, PartialEq)] pub struct Matrix { cells: Vec<Cell>, width: usize, height: usize, score: f64, } #[derive(Debug, Clone, PartialEq)] pub enum Cell { Empty, Filled, TopRight, BottomRight, BottomLeft, TopLeft, } impl Matrix { pub fn random(width: usize, height: usize) -> Matrix { let mut rng = get_rng(); let matrix = Matrix { cells: (0..width * height).map(|_| rng.gen()).collect(), width, height, score: 0.0, }; let score = matrix.calculate_score(); Matrix { score, ..matrix } } pub fn get(&self, x: isize, y: isize) -> &Cell { if x < 0 || self.width as isize <= x || y < 0 || self.height as isize <= y { &Cell::Filled } else { &self.cells[y as usize * self.width + x as usize] } } pub fn rows(&self) -> impl Iterator<Item = &[Cell]> { self.cells.chunks(self.width) } pub fn random_neighbor(&self) -> Matrix { let mut rng = get_rng(); let mut neighbor = self.clone(); *neighbor.cells.iter_mut().choose(&mut rng).unwrap() = rng.gen(); neighbor.score = neighbor.calculate_score(); neighbor } fn calculate_score(&self) -> f64 { let mut errors = 0; for y in -1..=self.height as isize { for x in -1..=self.width as isize { let center = self.get(x, y); let top = self.get(x, y - 1); let top_right = self.get(x + 1, y - 1); let right = self.get(x + 1, y); let bottom_right = self.get(x + 1, y + 1); let bottom = self.get(x, y + 1); let bottom_left = self.get(x - 1, y + 1); let left = self.get(x - 1, y); let top_left = self.get(x - 1, y - 1); match (center, right) { (Cell::Empty, _) => {} (_, Cell::Empty) => {} (Cell::Filled, Cell::TopRight) | (Cell::Filled, Cell::BottomRight) => { errors += 1 } (Cell::Filled, Cell::Filled) | (Cell::Filled, Cell::BottomLeft) | (Cell::Filled, Cell::TopLeft) => {} (Cell::TopRight, Cell::TopRight) | (Cell::TopRight, Cell::BottomRight) => { errors += 1 } (Cell::TopRight, Cell::Filled) | (Cell::TopRight, Cell::BottomLeft) | (Cell::TopRight, Cell::TopLeft) => {} (Cell::BottomRight, Cell::TopRight) | (Cell::BottomRight, Cell::BottomRight) => errors += 1, (Cell::BottomRight, Cell::Filled) | (Cell::BottomRight, Cell::BottomLeft) | (Cell::BottomRight, Cell::TopLeft) => {} (Cell::BottomLeft, Cell::Filled) | (Cell::BottomLeft, Cell::TopRight) | (Cell::BottomLeft, Cell::BottomLeft) | (Cell::BottomLeft, Cell::TopLeft) => errors += 1, (Cell::BottomLeft, Cell::BottomRight) => {} (Cell::TopLeft, Cell::Filled) | (Cell::TopLeft, Cell::BottomRight) | (Cell::TopLeft, Cell::BottomLeft) | (Cell::TopLeft, Cell::TopLeft) => errors += 1, (Cell::TopLeft, Cell::TopRight) => {} } match (center, bottom) { (Cell::Empty, _) => {} (_, Cell::Empty) => {} (Cell::Filled, Cell::Filled) | (Cell::Filled, Cell::TopRight) | (Cell::Filled, Cell::TopLeft) => {} (Cell::Filled, Cell::BottomRight) | (Cell::Filled, Cell::BottomLeft) => { errors += 1 } (Cell::TopRight, Cell::BottomRight) => {} (Cell::TopRight, Cell::Filled) | (Cell::TopRight, Cell::TopRight) | (Cell::TopRight, Cell::BottomLeft) | (Cell::TopRight, Cell::TopLeft) => errors += 1, (Cell::BottomRight, Cell::Filled) | (Cell::BottomRight, Cell::TopRight) | (Cell::BottomRight, Cell::TopLeft) => {} (Cell::BottomRight, Cell::BottomRight) | (Cell::BottomRight, Cell::BottomLeft) => errors += 1, (Cell::BottomLeft, Cell::Filled) | (Cell::BottomLeft, Cell::TopRight) | (Cell::BottomLeft, Cell::TopLeft) => {} (Cell::BottomLeft, Cell::BottomRight) | (Cell::BottomLeft, Cell::BottomLeft) => errors += 1, (Cell::TopLeft, Cell::BottomLeft) => {} (Cell::TopLeft, Cell::Filled) | (Cell::TopLeft, Cell::TopRight) | (Cell::TopLeft, Cell::BottomRight) | (Cell::TopLeft, Cell::TopLeft) => errors += 1, } match (center, top_right) { (Cell::BottomRight, Cell::BottomRight) => {} (Cell::TopLeft, Cell::TopLeft) => {} (Cell::BottomRight, Cell::TopLeft) if top == &Cell::TopRight && right == &Cell::BottomLeft => {} (Cell::BottomRight, Cell::TopLeft) => errors += 1, (Cell::TopLeft, Cell::BottomRight) if right == &Cell::TopRight && top == &Cell::BottomLeft => {} (Cell::TopLeft, Cell::BottomRight) => errors += 1, (Cell::BottomRight, _) if top == &Cell::TopRight => {} (Cell::BottomRight, _) => errors += 1, (Cell::TopLeft, _) if right == &Cell::TopRight => {} (Cell::TopLeft, _) => errors += 1, (_, Cell::BottomRight) if top == &Cell::BottomLeft => {} (_, Cell::BottomRight) => errors += 1, (_, Cell::TopLeft) if right == &Cell::BottomLeft => {} (_, Cell::TopLeft) => errors += 1, (Cell::Empty, _) => {} (_, Cell::Empty) => {} (Cell::Filled, Cell::Filled) | (Cell::Filled, Cell::BottomLeft) => {} (Cell::Filled, Cell::TopRight) => errors += 1, (Cell::TopRight, Cell::Filled) | (Cell::TopRight, Cell::BottomLeft) => {} (Cell::TopRight, Cell::TopRight) => errors += 1, (Cell::BottomLeft, Cell::TopRight) => {} (Cell::BottomLeft, Cell::Filled) | (Cell::BottomLeft, Cell::BottomLeft) => { errors += 1 } } match (center, bottom_right) { (Cell::TopRight, Cell::TopRight) => {} (Cell::BottomLeft, Cell::BottomLeft) => {} (Cell::BottomLeft, Cell::TopRight) if right == &Cell::BottomRight && bottom == &Cell::TopLeft => {} (Cell::BottomLeft, Cell::TopRight) => errors += 1, (Cell::TopRight, Cell::BottomLeft) if bottom == &Cell::BottomRight && right == &Cell::TopLeft => {} (Cell::TopRight, Cell::BottomLeft) => errors += 1, (Cell::TopRight, _) if bottom == &Cell::BottomRight => {} (Cell::TopRight, _) => errors += 1, (Cell::BottomLeft, _) if right == &Cell::BottomRight => {} (Cell::BottomLeft, _) => errors += 1, (_, Cell::TopRight) if bottom == &Cell::TopLeft => {} (_, Cell::TopRight) => errors += 1, (_, Cell::BottomLeft) if right == &Cell::TopLeft => {} (_, Cell::BottomLeft) => errors += 1, (Cell::Empty, _) => {} (_, Cell::Empty) => {} (Cell::Filled, Cell::Filled) | (Cell::Filled, Cell::TopLeft) => {} (Cell::Filled, Cell::BottomRight) => errors += 1, (Cell::BottomRight, Cell::Filled) | (Cell::BottomRight, Cell::TopLeft) => {} (Cell::BottomRight, Cell::BottomRight) => errors += 1, (Cell::TopLeft, Cell::BottomRight) => {} (Cell::TopLeft, Cell::Filled) | (Cell::TopLeft, Cell::TopLeft) => errors += 1, } match center { Cell::Empty => {} Cell::Filled => { for window in [ top, top_right, right, bottom_right, bottom, bottom_left, left, top_left, top, top_right, ] .windows(3) { if window == [&Cell::Empty; 3] { errors += 3 } } } Cell::TopRight => { if [top, top_right, right] == [&Cell::Empty; 3] { errors += 3 } } Cell::BottomRight => { if [right, bottom_right, bottom] == [&Cell::Empty; 3] { errors += 3 } } Cell::BottomLeft => { if [bottom, bottom_left, left] == [&Cell::Empty; 3] { errors += 3 } } Cell::TopLeft => { if [left, top_left, top] == [&Cell::Empty; 3] { errors += 3 } } } } } let cells_len = self.cells.len() as f64; let density = self .cells .iter() .map(|cell| match cell { Cell::Empty => 0.0, Cell::Filled => 1.0, _ => 0.5, }) .sum::<f64>(); (f64::from(errors) * 0.75 + density * 0.25) / cells_len } pub fn score(&self) -> f64 { self.score } } impl Distribution<Cell> for rand::distributions::Standard { fn sample<R>(&self, rng: &mut R) -> Cell where R: Rng + ?Sized, { [ Cell::Empty, Cell::Filled, Cell::TopRight, Cell::BottomRight, Cell::BottomLeft, Cell::TopLeft, ] .choose(rng) .unwrap() .clone() } }
use std; use std::os::raw::c_void; use core::structs::MLP; use core::points::array_to_point; use core::mlp::*; #[no_mangle] pub extern fn create_model(entries: *mut c_void, length: i32, class: bool) -> *mut MLP { let mlp: &[i32]; unsafe{ mlp = std::slice::from_raw_parts(entries as *mut i32, length as usize); } let mut max: i32 = 0; let mut neurals = mlp.to_vec(); for nb in neurals.iter_mut() { *nb += 1; if *nb > max { max = *nb; } } let weights = init_weights(&neurals, max); let output = init_all(&neurals); let delta = init_all(&neurals); let m = MLP { weights, output, neurals: neurals, delta, classification : class }; Box::into_raw(Box::new(m)) } #[no_mangle] pub unsafe extern fn train_weights(mlp: *mut MLP, nb: i32, pts: *mut c_void, length: i32) { let points = array_to_point(std::slice::from_raw_parts(pts as *mut f64, length as usize)); for _i in 0..nb{ for j in 0..points.len(){ let point = &points[j as usize]; train_neural(mlp, &point); } } } #[no_mangle] pub unsafe extern fn prediction(mlp: *mut MLP, point: [f64; 2]) -> f64 { default_bottom(mlp); (*mlp).output[0][1] = point[0]; (*mlp).output[0][2] = point[1]; for i in 1..(*mlp).neurals.len() { let nb = (*mlp).neurals[i]; for j in 1..nb { (*mlp).output[i][j as usize] = calculate_output_prediction(mlp, i as i32,j); } } (*mlp).output[(*mlp).neurals.len() - 1][1] }
use std::{collections::HashSet, fmt}; use crossterm::style::Color; use lazy_static::lazy_static; use strum::IntoEnumIterator; use strum_macros::EnumIter; lazy_static! { pub static ref ALL_VALUES: HashSet<Value> = Value::iter().collect(); } #[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, EnumIter)] pub enum Value { One, Two, Three, Four, Five, Six, Seven, Eight, Nine, } impl Value { pub fn color(self) -> Color { match self { Self::One => Color::Rgb { r: 255, g: 0, b: 0 }, Self::Two => Color::Rgb { r: 255, g: 120, b: 0, }, Self::Three => Color::Rgb { r: 255, g: 255, b: 0, }, Self::Four => Color::Rgb { r: 120, g: 255, b: 0, }, Self::Five => Color::Rgb { r: 0, g: 180, b: 80, }, Self::Six => Color::Rgb { r: 0, g: 150, b: 255, }, Self::Seven => Color::Rgb { r: 60, g: 80, b: 220, }, Self::Eight => Color::Rgb { r: 140, g: 40, b: 255, }, Self::Nine => Color::Rgb { r: 240, g: 20, b: 255, }, } } } impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let val = match self { Self::One => '1', Self::Two => '2', Self::Three => '3', Self::Four => '4', Self::Five => '5', Self::Six => '6', Self::Seven => '7', Self::Eight => '8', Self::Nine => '9', }; write!(f, "{}", val) } } impl From<u8> for Value { fn from(number: u8) -> Self { match number { 1 => Self::One, 2 => Self::Two, 3 => Self::Three, 4 => Self::Four, 5 => Self::Five, 6 => Self::Six, 7 => Self::Seven, 8 => Self::Eight, 9 => Self::Nine, _ => panic!("value cannot be less than 1, or greater than 9"), } } }
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::ops::{Index, IndexMut}; struct Foo { x: isize, y: isize, } impl Index<isize> for Foo { type Output = isize; fn index(&self, z: isize) -> &isize { if z == 0 { &self.x } else { &self.y } } } impl IndexMut<isize> for Foo { fn index_mut(&mut self, z: isize) -> &mut isize { if z == 0 { &mut self.x } else { &mut self.y } } } trait Int { fn get(self) -> isize; fn get_from_ref(&self) -> isize; fn inc(&mut self); } impl Int for isize { fn get(self) -> isize { self } fn get_from_ref(&self) -> isize { *self } fn inc(&mut self) { *self += 1; } } fn main() { let mut f = Foo { x: 1, y: 2, }; assert_eq!(f[1], 2); f[0] = 3; assert_eq!(f[0], 3); { let p = &mut f[1]; *p = 4; } { let p = &f[1]; assert_eq!(*p, 4); } // Test calling methods with `&mut self`, `self, and `&self` receivers: f[1].inc(); assert_eq!(f[1].get(), 5); assert_eq!(f[1].get_from_ref(), 5); }
use async_graphql::*; use futures_util::Stream; use serde::Deserialize; #[tokio::test] pub async fn test_type_visible() { #[derive(SimpleObject)] #[graphql(visible = false)] struct MyObj { a: i32, } struct Query; #[Object] #[allow(unreachable_code)] impl Query { async fn obj(&self) -> MyObj { todo!() } } let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema .execute(r#"{ __type(name: "MyObj") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null, }) ); #[derive(Deserialize)] struct QueryResponse { #[serde(rename = "__schema")] schema: SchemaResponse, } #[derive(Deserialize)] struct SchemaResponse { types: Vec<TypeResponse>, } #[derive(Deserialize)] struct TypeResponse { name: String, } let resp: QueryResponse = from_value( schema .execute(r#"{ __schema { types { name } } }"#) .await .into_result() .unwrap() .data, ) .unwrap(); assert!(!resp.schema.types.into_iter().any(|ty| ty.name == "MyObj")); } #[tokio::test] pub async fn test_field_visible() { #[derive(SimpleObject)] struct MyObj { a: i32, #[graphql(visible = false)] b: i32, } struct Query; #[Object] #[allow(unreachable_code)] impl Query { async fn obj(&self) -> MyObj { todo!() } #[graphql(visible = false)] async fn c(&self) -> i32 { todo!() } } let schema = Schema::new(Query, EmptyMutation, EmptySubscription); #[derive(Debug, Deserialize)] struct QueryResponse { #[serde(rename = "__type")] ty: TypeResponse, } #[derive(Debug, Deserialize)] struct TypeResponse { fields: Vec<FieldResposne>, } #[derive(Debug, Deserialize)] struct FieldResposne { name: String, } let resp: QueryResponse = from_value( schema .execute(r#"{ __type(name: "MyObj") { fields { name } } }"#) .await .into_result() .unwrap() .data, ) .unwrap(); assert_eq!( resp.ty .fields .iter() .map(|field| field.name.as_str()) .collect::<Vec<_>>(), vec!["a"] ); let resp: QueryResponse = from_value( schema .execute(r#"{ __type(name: "Query") { fields { name } } }"#) .await .into_result() .unwrap() .data, ) .unwrap(); assert_eq!( resp.ty .fields .iter() .map(|field| field.name.as_str()) .collect::<Vec<_>>(), vec!["obj"] ); } #[tokio::test] pub async fn test_enum_value_visible() { #[derive(Enum, Eq, PartialEq, Copy, Clone)] enum MyEnum { A, B, #[graphql(visible = false)] C, } struct Query; #[Object] #[allow(unreachable_code)] impl Query { async fn e(&self) -> MyEnum { todo!() } } let schema = Schema::new(Query, EmptyMutation, EmptySubscription); #[derive(Debug, Deserialize)] struct QueryResponse { #[serde(rename = "__type")] ty: TypeResponse, } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct TypeResponse { enum_values: Vec<EnumValueResponse>, } #[derive(Debug, Deserialize)] struct EnumValueResponse { name: String, } let resp: QueryResponse = from_value( schema .execute(r#"{ __type(name: "MyEnum") { enumValues { name } } }"#) .await .into_result() .unwrap() .data, ) .unwrap(); assert_eq!( resp.ty .enum_values .iter() .map(|value| value.name.as_str()) .collect::<Vec<_>>(), vec!["A", "B"] ); } #[tokio::test] pub async fn test_visible_fn() { mod nested { use async_graphql::Context; pub struct IsAdmin(pub bool); pub fn is_admin(ctx: &Context<'_>) -> bool { ctx.data_unchecked::<IsAdmin>().0 } } use nested::IsAdmin; #[derive(SimpleObject)] #[graphql(visible = "nested::is_admin")] struct MyObj { a: i32, } struct Query; #[Object] #[allow(unreachable_code)] impl Query { async fn obj(&self) -> MyObj { todo!() } } let schema = Schema::new(Query, EmptyMutation, EmptySubscription); assert_eq!( schema .execute(Request::new(r#"{ __type(name: "MyObj") { name } }"#).data(IsAdmin(false))) .await .into_result() .unwrap() .data, value!({ "__type": null, }) ); assert_eq!( schema .execute(Request::new(r#"{ __type(name: "MyObj") { name } }"#).data(IsAdmin(true))) .await .into_result() .unwrap() .data, value!({ "__type": { "name": "MyObj", }, }) ); } #[tokio::test] pub async fn test_indirect_hiding_type() { #[derive(Enum, Eq, PartialEq, Copy, Clone)] enum MyEnum1 { A, } #[derive(Enum, Eq, PartialEq, Copy, Clone)] enum MyEnum2 { A, } struct MyDirective; impl CustomDirective for MyDirective {} #[Directive(location = "field")] fn my_directive1(_a: MyEnum1) -> impl CustomDirective { MyDirective } #[Directive(location = "field", visible = false)] fn my_directive2(_a: MyEnum2) -> impl CustomDirective { MyDirective } #[derive(SimpleObject)] struct MyObj1 { a: i32, b: MyObj2, c: MyObj3, } #[derive(SimpleObject)] struct MyObj2 { a: i32, } #[derive(SimpleObject)] struct MyObj3 { a: i32, #[graphql(visible = false)] b: MyObj5, } #[derive(SimpleObject)] #[graphql(visible = false)] struct MyObj4 { a: i32, } #[derive(SimpleObject)] struct MyObj5 { a: i32, } #[derive(InputObject)] struct MyInputObj1 { a: i32, b: MyInputObj2, c: MyInputObj3, } #[derive(InputObject)] struct MyInputObj2 { a: i32, } #[derive(InputObject)] struct MyInputObj3 { a: i32, #[graphql(visible = false)] b: MyInputObj4, } #[derive(InputObject)] struct MyInputObj4 { a: i32, } #[derive(InputObject)] struct MyInputObj5 { a: i32, } #[derive(Union)] enum MyUnion { MyObj3(MyObj3), MyObj4(MyObj4), } #[derive(Interface)] #[graphql(field(name = "a", type = "&i32"))] enum MyInterface { MyObj3(MyObj3), MyObj4(MyObj4), } #[derive(Interface)] #[graphql(visible = false, field(name = "a", type = "&i32"))] enum MyInterface2 { MyObj3(MyObj3), MyObj4(MyObj4), } struct Query; #[Object] #[allow(unreachable_code)] impl Query { #[graphql(visible = false)] async fn obj1(&self) -> MyObj1 { todo!() } async fn obj3(&self) -> MyObj3 { todo!() } #[graphql(visible = false)] async fn input_obj1(&self, _obj: MyInputObj1) -> i32 { todo!() } async fn input_obj3(&self, _obj: MyInputObj3) -> i32 { todo!() } async fn input_obj5(&self, #[graphql(visible = false)] _obj: Option<MyInputObj5>) -> i32 { todo!() } async fn union1(&self) -> MyUnion { todo!() } async fn interface1(&self) -> MyInterface { todo!() } async fn interface2(&self) -> MyInterface2 { todo!() } } let schema = Schema::build(Query, EmptyMutation, EmptySubscription) .directive(my_directive1) .directive(my_directive2) .finish(); assert_eq!( schema .execute(r#"{ __type(name: "MyObj1") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyObj2") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyObj3") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "name": "MyObj3" } }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInputObj1") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInputObj2") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInputObj3") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "name": "MyInputObj3" } }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyUnion") { possibleTypes { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "possibleTypes": [{ "name": "MyObj3" }] } }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInterface") { possibleTypes { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "possibleTypes": [{ "name": "MyObj3" }] } }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInterface2") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyObj3") { interfaces { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "interfaces": [{ "name": "MyInterface" }] } }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyObj3") { fields { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "fields": [ { "name": "a" }, ]}}) ); assert_eq!( schema .execute(r#"{ __type(name: "MyObj5") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInputObj3") { inputFields { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "inputFields": [ { "name": "a" }, ]}}) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInputObj4") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyInputObj5") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyEnum1") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": { "name": "MyEnum1" } }) ); assert_eq!( schema .execute(r#"{ __type(name: "MyEnum2") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); } #[tokio::test] pub async fn root() { struct Query; #[Object] #[allow(unreachable_code)] impl Query { async fn value(&self) -> i32 { todo!() } } struct Mutation; #[Object(visible = false)] #[allow(unreachable_code)] impl Mutation { async fn value(&self) -> i32 { todo!() } } struct Subscription; #[Subscription(visible = false)] #[allow(unreachable_code)] impl Subscription { async fn value(&self) -> impl Stream<Item = i32> { futures_util::stream::iter(vec![1, 2, 3]) } } let schema = Schema::new(Query, Mutation, Subscription); assert_eq!( schema .execute(r#"{ __type(name: "Mutation") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); let schema = Schema::new(Query, Mutation, Subscription); assert_eq!( schema .execute(r#"{ __schema { mutationType { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__schema": { "mutationType": null } }) ); assert_eq!( schema .execute(r#"{ __type(name: "Subscription") { name } }"#) .await .into_result() .unwrap() .data, value!({ "__type": null }) ); let schema = Schema::new(Query, Mutation, Subscription); assert_eq!( schema .execute(r#"{ __schema { subscriptionType { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__schema": { "subscriptionType": null } }) ); let schema = Schema::new(Query, Mutation, Subscription); assert_eq!( schema .execute(r#"{ __schema { queryType { name } } }"#) .await .into_result() .unwrap() .data, value!({ "__schema": { "queryType": { "name": "Query" } } }) ); }
pub mod mutations; pub mod pending; pub mod queries; pub mod service; use crate::auth::pwd::create_pwd_hash; use crate::datastore::prelude::*; use chrono::prelude::*; use juniper::ID; #[derive(juniper::GraphQLObject, Debug, Clone)] #[graphql(description = "A user in a taskach system")] pub struct User { pub id: ID, /// firstname pub first_name: String, /// lastname pub last_name: String, /// email pub email: String, /// phone pub phone: String, #[graphql(skip)] /// user is enabled pub active: bool, #[graphql(skip)] pub password_hash: String, pub created_at: DateTime<Utc>, pub updated_at: DateTime<Utc>, } impl From<&Entity> for User { fn from(entity: &Entity) -> Self { Self { id: DbValue::Id(entity).into(), first_name: DbValue::Str("first_name", entity).into(), last_name: DbValue::Str("last_name", entity).into(), email: DbValue::Str("email", entity).into(), phone: DbValue::Str("phone", entity).into(), active: DbValue::Bool("active", entity).into(), password_hash: DbValue::Blob("password_hash", entity).into(), created_at: DbValue::Timestamp("created_at", entity).into(), updated_at: DbValue::Timestamp("updated_at", entity).into(), } } } #[derive(juniper::GraphQLInputObject, Clone)] pub struct NewUserInput { first_name: String, last_name: String, email: String, phone: String, password: String, } #[derive(juniper::GraphQLInputObject, Default)] pub struct UpdateUserInput { first_name: Option<String>, last_name: Option<String>, phone: Option<String>, /// For internal usage (You cannot change it) active: Option<bool>, } impl User { fn new(user: NewUserInput) -> Result<DbProperties, String> { let password = create_pwd_hash(user.password)?; let db_values = fields_to_db_values(&[ AppValue::Str("first_name", Some(user.first_name)), AppValue::Str("last_name", Some(user.last_name)), AppValue::Str("email", Some(user.email)), AppValue::Str("phone", Some(user.phone)), AppValue::Byte("password_hash", Some(password)), AppValue::Bool("active", Some(true)), ]); Ok(db_values) } fn update(user: UpdateUserInput) -> DbProperties { fields_to_db_values(&[ AppValue::Str("first_name", user.first_name), AppValue::Str("last_name", user.last_name), AppValue::Str("phone", user.phone), ]) } }
use bit_set::BitSet; use std::vec::IntoIter; pub trait Join { type Item; fn open(&self) -> BitSet; fn join(self) -> JoinIterator<Self> where Self: Sized, { JoinIterator::new(self) } fn get(&self, index: usize) -> Self::Item; } pub struct JoinIterator<T: Join> { keys: IntoIter<usize>, join: T, } impl<T> JoinIterator<T> where T: Join, { pub fn new(join: T) -> Self { let keys = join.open(); let mut vec = Vec::new(); for key in keys.iter() { vec.push(key); } JoinIterator { keys: vec.into_iter(), join, } } } impl<T> Iterator for JoinIterator<T> where T: Join, { type Item = T::Item; fn next(&mut self) -> Option<T::Item> { self.keys.next().map(|idx| self.join.get(idx)) } } macro_rules! impl_data { ( $($ty:ident),* ) => { impl<$($ty),*> Join for ( $( $ty , )* ) where $( $ty : Join ),* { type Item = ( $($ty::Item,)* ); fn open(&self) -> BitSet { #![allow(unused_variables, non_snake_case)] let mut base = BitSet::new(); let ( $($ty, )* ) = self; $( base.intersect_with(&$ty.open()); )* base } fn get(&self, index: usize) -> Self::Item { #![allow(unused_variables, non_snake_case)] let ( $($ty,)* ) = self; ( $( $ty.get(index), )* ) } } }; } mod impl_data { #![cfg_attr(rustfmt, rustfmt_skip)] use super::*; impl_data!(A); impl_data!(A, B); impl_data!(A, B, C); impl_data!(A, B, C, D); impl_data!(A, B, C, D, E); impl_data!(A, B, C, D, E, F); impl_data!(A, B, C, D, E, F, G); impl_data!(A, B, C, D, E, F, G, H); impl_data!(A, B, C, D, E, F, G, H, I); impl_data!(A, B, C, D, E, F, G, H, I, J); impl_data!(A, B, C, D, E, F, G, H, I, J, K); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y); impl_data!(A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z); }
#![feature(test)] #![feature(box_syntax)] #![cfg_attr(feature = "unstable", feature(try_trait))] #![warn(unsafe_code)] #![warn(rust_2018_idioms)] extern crate test; use auto_enums::auto_enum; use rand::Rng; use test::Bencher; fn iter_no_branch(_x: u32) -> impl Iterator<Item = i64> { (0..).map(|x| x + 2 - 1) } fn iter_boxed2(x: u32) -> Box<dyn Iterator<Item = i64>> { match x { 0 => box (0..).map(|x| (x * 2) - 1), _ => box (0..).map(|x| (x + 1) / 2), } } fn iter_boxed16(x: u32) -> Box<dyn Iterator<Item = i64>> { match x { 0 => box (0..).map(|x| (x * 2) * 2), 1 => box (0..).map(|x| (x - 1) / 2), 2 => box (0..).map(|x| (x * 2) - 1), 3 => box (0..).map(|x| (x + 1) + 2), 4 => box (0..).map(|x| (x / 2) - 1), 5 => box (0..).map(|x| (x * 2) - 1), 6 => box (0..).map(|x| (x / 2) + 1), 7 => box (0..).map(|x| (x * 2) + 1), 8 => box (0..).map(|x| (x / 2) / 2), 9 => box (0..).map(|x| (x / 2) - 2), 10 => box (0..).map(|x| (x + 2) * 2), 11 => box (0..).map(|x| (x + 2) / 2), 12 => box (0..).map(|x| (x - 1) - 2), 13 => box (0..).map(|x| (x - 2) * 2), 14 => box (0..).map(|x| (x - 2) / 2), _ => box (0..).map(|x| (x + 1) / 2), } } #[auto_enum(Iterator)] fn iter_enum2(x: u32) -> impl Iterator<Item = i64> { match x { 0 => (0..).map(|x| (x * 2) - 1), _ => (0..).map(|x| (x + 1) / 2), } } #[auto_enum(Iterator)] fn iter_enum4(x: u32) -> impl Iterator<Item = i64> { match x { 0 => (0..).map(|x| (x * 2) * 2), 1 => (0..).map(|x| (x - 1) / 2), 2 => (0..).map(|x| (x * 2) - 1), _ => (0..).map(|x| (x + 1) / 2), } } #[auto_enum(Iterator)] fn iter_enum8(x: u32) -> impl Iterator<Item = i64> { match x { 0 => (0..).map(|x| (x * 2) * 2), 1 => (0..).map(|x| (x - 1) / 2), 2 => (0..).map(|x| (x * 2) - 1), 3 => (0..).map(|x| (x + 1) + 2), 4 => (0..).map(|x| (x / 2) - 1), 5 => (0..).map(|x| (x * 2) - 1), 6 => (0..).map(|x| (x / 2) + 1), _ => (0..).map(|x| (x + 1) / 2), } } #[auto_enum(Iterator)] fn iter_enum12(x: u32) -> impl Iterator<Item = i64> { match x { 0 => (0..).map(|x| (x * 2) * 2), 1 => (0..).map(|x| (x - 1) / 2), 2 => (0..).map(|x| (x * 2) - 1), 3 => (0..).map(|x| (x + 1) + 2), 4 => (0..).map(|x| (x / 2) - 1), 5 => (0..).map(|x| (x * 2) - 1), 6 => (0..).map(|x| (x / 2) + 1), 7 => (0..).map(|x| (x * 2) + 1), 8 => (0..).map(|x| (x / 2) / 2), 9 => (0..).map(|x| (x / 2) - 2), 10 => (0..).map(|x| (x / 2) + 1), _ => (0..).map(|x| (x + 1) / 2), } } #[auto_enum(Iterator)] fn iter_enum16(x: u32) -> impl Iterator<Item = i64> { match x { 0 => (0..).map(|x| (x * 2) * 2), 1 => (0..).map(|x| (x - 1) / 2), 2 => (0..).map(|x| (x * 2) - 1), 3 => (0..).map(|x| (x + 1) + 2), 4 => (0..).map(|x| (x / 2) - 1), 5 => (0..).map(|x| (x * 2) - 1), 6 => (0..).map(|x| (x / 2) + 1), 7 => (0..).map(|x| (x * 2) + 1), 8 => (0..).map(|x| (x / 2) / 2), 9 => (0..).map(|x| (x / 2) - 2), 10 => (0..).map(|x| (x + 2) * 2), 11 => (0..).map(|x| (x + 2) / 2), 12 => (0..).map(|x| (x - 1) - 2), 13 => (0..).map(|x| (x - 2) * 2), 14 => (0..).map(|x| (x - 2) / 2), _ => (0..).map(|x| (x + 1) / 2), } } macro_rules! bench_next { ($($fn:ident, $iter:ident, $max:expr, $num:expr,)*) => {$( #[bench] fn $fn(b: &mut Bencher) { let mut rng = rand::thread_rng(); b.iter(|| { let mut iter = $iter(rng.gen_range(0, $max)); (0..$num).for_each(|_| assert!(iter.next().is_some())) }) } )*}; } macro_rules! bench_fold { ($($fn:ident, $iter:ident, $max:expr, $num:expr,)*) => {$( #[bench] fn $fn(b: &mut Bencher) { let mut rng = rand::thread_rng(); b.iter(|| { let iter = $iter(rng.gen_range(0, $max)); iter.take($num).fold(0, |sum, x| sum + x) }) } )*}; } macro_rules! bench_fold_semi { ($($fn:ident, $iter:ident, $max:expr, $num:expr,)*) => {$( #[bench] fn $fn(b: &mut Bencher) { let mut rng = rand::thread_rng(); b.iter(|| { let iter = $iter(rng.gen_range(0, $max)); iter.take($num).fold(0, |sum, x| sum + x); }) } )*}; } bench_next! { bench_next100_boxed02, iter_boxed2, 2, 100, bench_next100_boxed16, iter_boxed16, 16, 100, bench_next1000_boxed02, iter_boxed2, 2, 1000, bench_next1000_boxed16, iter_boxed16, 16, 1000, bench_next100_enum02, iter_enum2, 2, 100, bench_next100_enum16, iter_enum16, 16, 100, bench_next1000_enum02, iter_enum2, 2, 1000, bench_next1000_enum16, iter_enum16, 16, 1000, bench_next100_no_branch, iter_no_branch, 10, 100, bench_next1000_no_branch, iter_no_branch, 10, 1000, } bench_fold! { bench_fold100_boxed02, iter_boxed2, 2, 100, bench_fold100_boxed16, iter_boxed16, 16, 100, bench_fold1000_boxed02, iter_boxed2, 2, 1000, bench_fold1000_boxed16, iter_boxed16, 16, 1000, bench_fold100_enum02, iter_enum2, 2, 100, bench_fold100_enum04, iter_enum4, 4, 100, bench_fold100_enum08, iter_enum8, 8, 100, bench_fold100_enum12, iter_enum12, 12, 100, bench_fold100_enum16, iter_enum16, 16, 100, bench_fold1000_enum02, iter_enum2, 2, 1000, bench_fold1000_enum04, iter_enum4, 4, 1000, bench_fold1000_enum08, iter_enum8, 8, 1000, bench_fold1000_enum12, iter_enum12, 12, 1000, bench_fold1000_enum16, iter_enum16, 16, 1000, bench_fold100_no_branch, iter_no_branch, 10, 100, bench_fold1000_no_branch, iter_no_branch, 10, 1000, } bench_fold_semi! { bench_fold100semi_boxed02, iter_boxed2, 2, 100, bench_fold100semi_boxed16, iter_boxed16, 16, 100, bench_fold1000semi_boxed02, iter_boxed2, 2, 1000, bench_fold1000semi_boxed16, iter_boxed16, 16, 1000, bench_fold100semi_enum02, iter_enum2, 2, 100, bench_fold100semi_enum16, iter_enum16, 16, 100, bench_fold1000semi_enum02, iter_enum2, 2, 1000, bench_fold1000semi_enum16, iter_enum16, 16, 1000, bench_fold100semi_no_branch, iter_no_branch, 10, 100, bench_fold1000semi_no_branch, iter_no_branch, 10, 1000, } /* running 36 tests test bench_fold1000_boxed02 ... bench: 2,111 ns/iter (+/- 177) test bench_fold1000_boxed16 ... bench: 2,466 ns/iter (+/- 152) test bench_fold1000_enum02 ... bench: 136 ns/iter (+/- 2) test bench_fold1000_enum04 ... bench: 249 ns/iter (+/- 14) test bench_fold1000_enum08 ... bench: 1,114 ns/iter (+/- 51) test bench_fold1000_enum12 ... bench: 1,144 ns/iter (+/- 67) test bench_fold1000_enum16 ... bench: 1,523 ns/iter (+/- 135) test bench_fold1000_no_branch ... bench: 14 ns/iter (+/- 0) test bench_fold1000semi_boxed02 ... bench: 2,100 ns/iter (+/- 420) test bench_fold1000semi_boxed16 ... bench: 2,133 ns/iter (+/- 193) test bench_fold1000semi_enum02 ... bench: 19 ns/iter (+/- 0) test bench_fold1000semi_enum16 ... bench: 19 ns/iter (+/- 0) test bench_fold1000semi_no_branch ... bench: 14 ns/iter (+/- 0) test bench_fold100_boxed02 ... bench: 309 ns/iter (+/- 28) test bench_fold100_boxed16 ... bench: 320 ns/iter (+/- 35) test bench_fold100_enum02 ... bench: 20 ns/iter (+/- 0) test bench_fold100_enum04 ... bench: 22 ns/iter (+/- 0) test bench_fold100_enum08 ... bench: 135 ns/iter (+/- 3) test bench_fold100_enum12 ... bench: 185 ns/iter (+/- 14) test bench_fold100_enum16 ... bench: 176 ns/iter (+/- 8) test bench_fold100_no_branch ... bench: 14 ns/iter (+/- 0) test bench_fold100semi_boxed02 ... bench: 342 ns/iter (+/- 26) test bench_fold100semi_boxed16 ... bench: 318 ns/iter (+/- 23) test bench_fold100semi_enum02 ... bench: 20 ns/iter (+/- 0) test bench_fold100semi_enum16 ... bench: 20 ns/iter (+/- 0) test bench_fold100semi_no_branch ... bench: 14 ns/iter (+/- 0) test bench_next1000_boxed02 ... bench: 2,105 ns/iter (+/- 597) test bench_next1000_boxed16 ... bench: 2,124 ns/iter (+/- 108) test bench_next1000_enum02 ... bench: 20 ns/iter (+/- 0) test bench_next1000_enum16 ... bench: 20 ns/iter (+/- 1) test bench_next1000_no_branch ... bench: 14 ns/iter (+/- 0) test bench_next100_boxed02 ... bench: 336 ns/iter (+/- 35) test bench_next100_boxed16 ... bench: 313 ns/iter (+/- 18) test bench_next100_enum02 ... bench: 20 ns/iter (+/- 0) test bench_next100_enum16 ... bench: 20 ns/iter (+/- 0) test bench_next100_no_branch ... bench: 14 ns/iter (+/- 0) test result: ok. 0 passed; 0 failed; 0 ignored; 36 measured; 0 filtered out */
/* * Copyright (C) 2020-2022 Zixiao Han */ pub static ENGINE_NAME: &str = "FoxSEE"; pub static VERSION: &str = "v8.5"; pub static AUTHOR: &str = "Zixiao Han"; pub const DEFAULT_HASH_SIZE_MB: usize = 128; pub const DEFAULT_HASH_SIZE_UNIT: usize = 4194304; pub const MIN_HASH_SIZE_MB: usize = 1; pub const MIN_HASH_SIZE_UNIT: usize = 32768; pub const MAX_HASH_SIZE_MB: usize = 512; pub const BOARD_SIZE: usize = 64; pub const DIM_SIZE: usize = 8; pub const PIECE_CODE_RANGE: usize = 131; pub const MAX_CAP_COUNT: usize = 64; pub const MAX_MOV_COUNT: usize = 128; pub const MOV_REG: u8 = 1; pub const MOV_PROMO: u8 = 2; pub const MOV_CAS: u8 = 3; pub const MOV_ENP: u8 = 4; pub const MOV_CR_ENP: u8 = 5; pub const CAS_SQUARE_WK: usize = 6; pub const CAS_SQUARE_WQ: usize = 2; pub const CAS_SQUARE_BK: usize = 62; pub const CAS_SQUARE_BQ: usize = 58; pub const PLAYER_W: u8 = 0b10; pub const PLAYER_B: u8 = 0b01; pub const PLAYER_SWITCH: u8 = 0b11; pub const WP: u8 = 6; pub const WN: u8 = 10; pub const WB: u8 = 18; pub const WR: u8 = 34; pub const WQ: u8 = 66; pub const WK: u8 = 130; pub const BP: u8 = 5; pub const BN: u8 = 9; pub const BB: u8 = 17; pub const BR: u8 = 33; pub const BQ: u8 = 65; pub const BK: u8 = 129; pub const P: u8 = 0b100; pub const N: u8 = 0b1000; pub const B: u8 = 0b10000; pub const R: u8 = 0b100000; pub const Q: u8 = 0b1000000; pub const K: u8 = 0b10000000; #[inline] pub const fn get_opposite_player(player: u8) -> u8 { player ^ PLAYER_SWITCH } #[inline] pub const fn on_same_side(player: u8, piece_code: u8) -> bool { player & piece_code == player } #[inline] pub fn get_passer_rank(player: u8, index: usize) -> usize { if player == PLAYER_W { index / 8 } else { 7 - index / 8 } } #[inline] pub const fn is_k(piece_code: u8) -> bool { piece_code & K != 0 } #[inline] pub const fn is_q(piece_code: u8) -> bool { piece_code & Q != 0 } #[inline] pub const fn is_r(piece_code: u8) -> bool { piece_code & R != 0 } #[inline] pub const fn is_b(piece_code: u8) -> bool { piece_code & B != 0 } #[inline] pub const fn is_n(piece_code: u8) -> bool { piece_code & N != 0 } #[inline] pub const fn is_p(piece_code: u8) -> bool { piece_code & P != 0 } #[cfg(test)] mod tests { use super::*; #[test] fn test_switch_player() { let p = PLAYER_W; assert_eq!(PLAYER_B, get_opposite_player(p)); let p = PLAYER_B; assert_eq!(PLAYER_W, get_opposite_player(p)); } #[test] fn test_get_passer_rank() { assert_eq!(0, get_passer_rank(PLAYER_W, 6)); assert_eq!(1, get_passer_rank(PLAYER_W, 9)); assert_eq!(7, get_passer_rank(PLAYER_W, 62)); assert_eq!(7, get_passer_rank(PLAYER_B, 2)); assert_eq!(3, get_passer_rank(PLAYER_B, 35)); assert_eq!(7, get_passer_rank(PLAYER_W, 58)); } #[test] fn test_piece_type() { assert!(is_k(WK)); assert!(is_k(BK)); assert!(is_q(WQ)); assert!(is_q(BQ)); assert!(is_r(WR)); assert!(is_r(BR)); assert!(is_b(WB)); assert!(is_b(BB)); assert!(is_n(WN)); assert!(is_n(BN)); assert!(is_p(WP)); assert!(is_p(BP)); assert!(!is_q(BK)); assert!(!is_n(WK)); assert!(!is_b(BR)); assert!(!is_k(WP)); assert!(!is_q(BB)); assert!(!is_p(WN)); assert!(!is_k(WQ)); assert!(!is_k(BQ)); assert!(!is_k(WR)); assert!(!is_k(BR)); assert!(!is_k(WB)); assert!(!is_k(BB)); assert!(!is_k(WN)); assert!(!is_k(BN)); assert!(!is_k(WP)); assert!(!is_k(BP)); } #[test] fn test_checkside() { assert!(!on_same_side(PLAYER_W, 0)); assert!(!on_same_side(PLAYER_B, 0)); assert!(on_same_side(PLAYER_W, WK)); assert!(on_same_side(PLAYER_W, WQ)); assert!(on_same_side(PLAYER_W, WR)); assert!(on_same_side(PLAYER_W, WB)); assert!(on_same_side(PLAYER_W, WN)); assert!(on_same_side(PLAYER_W, WP)); assert!(on_same_side(PLAYER_B, BK)); assert!(on_same_side(PLAYER_B, BQ)); assert!(on_same_side(PLAYER_B, BR)); assert!(on_same_side(PLAYER_B, BB)); assert!(on_same_side(PLAYER_B, BN)); assert!(on_same_side(PLAYER_B, BP)); assert!(!on_same_side(PLAYER_W, BK)); assert!(!on_same_side(PLAYER_W, BQ)); assert!(!on_same_side(PLAYER_W, BR)); assert!(!on_same_side(PLAYER_W, BB)); assert!(!on_same_side(PLAYER_W, BN)); assert!(!on_same_side(PLAYER_W, BP)); assert!(!on_same_side(PLAYER_B, WK)); assert!(!on_same_side(PLAYER_B, WQ)); assert!(!on_same_side(PLAYER_B, WR)); assert!(!on_same_side(PLAYER_B, WB)); assert!(!on_same_side(PLAYER_B, WN)); assert!(!on_same_side(PLAYER_B, WP)); } }
use super::Number; use std::ops::{Add, Sub, AddAssign, SubAssign}; #[derive(Debug, Copy, Clone, PartialEq)] pub struct Vector<N: Number = f32> { pub x: N, pub y: N, } pub type Position<N = f32> = Vector<N>; pub type Point<N = f32> = Vector<N>; pub type Scale<N = f32> = Vector<N>; pub type Delta<N = f32> = Vector<N>; impl<N: Number> Vector<N> { pub fn new(x: N, y: N) -> Self { Self { x, y } } pub fn zero() -> Self { Self::new(N::zero(), N::zero()) } pub fn set(&mut self, x: N, y: N) { self.x = x; self.y = y; } pub fn set_with(&mut self, other: &Self) { self.x = other.x; self.y = other.y; } } impl<N: Number> Add for Vector<N> { type Output = Self; fn add(self, other: Self) -> Self::Output { Self::new(self.x + other.x, self.y + other.y) } } impl<N: Number> Sub for Vector<N> { type Output = Self; fn sub(self, other: Self) -> Self::Output { Self::new(self.x - other.x, self.y - other.y) } } impl<N: Number> AddAssign for Vector<N> { fn add_assign(&mut self, other: Self) { self.x += other.x; self.y += other.y; } } impl<N: Number> SubAssign for Vector<N> { fn sub_assign(&mut self, other: Self) { self.x -= other.x; self.y -= other.y; } } impl<N: Number> From<(N, N)> for Vector<N> { fn from((x, y): (N, N)) -> Self { Self::new(x, y) } } impl<N: Number> Into<(N, N)> for Vector<N> { fn into(self) -> (N, N) { (self.x, self.y) } } #[cfg(test)] mod tests { use super::Vector; #[test] fn operator() { let mut vec = Vector::<f32>::new(100.0, 50.0); vec = vec + Vector::<f32>::new(40.0, 20.0); assert_eq!(vec, Vector::<f32>::new(140.0, 70.0)); vec = vec - Vector::<f32>::new(40.0, 70.0); assert_eq!(vec, Vector::<f32>::new(100.0, 0.0)); vec += Vector::<f32>::new(20.0, 50.0); assert_eq!(vec, Vector::new(120.0, 50.0)); vec -= Vector::<f32>::new(80.0, 10.0); assert_eq!(vec, Vector::new(40.0, 40.0)); } }
use std::default::Default; use std::string::ToString; use yew::prelude::*; use yew::services::ConsoleService; use super::editor_row::PrefabEditorRow; use crate::components::prefab::scene::form::Form as SceneForm; use crate::components::prefab::ui::form::Form as UIForm; #[derive(Debug, Clone, PartialEq)] pub enum WindowMsg { AddRow, ShowPrefabForm(PrefabForms), } #[derive(Debug, Clone, PartialEq)] pub enum PrefabForms { UI, Scene, } /// Primary window for the Prefab Editor pub struct PrefabEditorWindow { pub rows: Vec<PrefabEditorRow>, pub add_row_callback: Callback<()>, pub selected_prefab: Option<PrefabForms>, pub on_prefab_create: Callback<WindowMsg>, console: ConsoleService, } /// Props for the Prefab Editor Window #[derive(Clone, PartialEq)] pub struct PrefabEditorProps { pub on_prefab_create: Callback<WindowMsg>, pub rows: Vec<PrefabEditorRow>, pub add_row_callback: Callback<()>, pub selected_prefab: Option<PrefabForms>, } impl Default for PrefabEditorProps { fn default() -> Self { PrefabEditorProps { on_prefab_create: Callback::from(|_| return), rows: vec![], add_row_callback: Callback::from(|_| return), selected_prefab: None, } } } impl Default for PrefabEditorWindow { fn default() -> Self { PrefabEditorWindow { console: ConsoleService::new(), rows: vec![], add_row_callback: Callback::from(|_| return), selected_prefab: None, on_prefab_create: Callback::from(|_| return), } } } impl PrefabEditorWindow { pub fn add_row(&mut self, name: String, value: String, add_row_callback: Callback<()>) { let row = PrefabEditorRow { name, value, on_add: add_row_callback, }; self.rows.push(row) } } pub fn on_prefab_select(value: String) -> String { value } impl Component for PrefabEditorWindow { type Message = WindowMsg; type Properties = PrefabEditorProps; fn create(props: Self::Properties, mut link: ComponentLink<Self>) -> Self { let mut window = PrefabEditorWindow { console: ConsoleService::new(), rows: props.rows, add_row_callback: link.send_back(|_| WindowMsg::AddRow), selected_prefab: None, on_prefab_create: link.send_back(|_| WindowMsg::ShowPrefabForm(PrefabForms::Scene)), }; window.add_row( "Field Name".to_string(), "Field Value".to_string(), window.add_row_callback.clone(), ); window } fn update(&mut self, msg: Self::Message) -> ShouldRender { self.console.log("Message received"); match msg { WindowMsg::AddRow => { self.add_row( "Test1".to_string(), "Test2".to_string(), self.add_row_callback.clone(), ); } WindowMsg::ShowPrefabForm(form) => { self.console.log("Received show prefab form"); self.selected_prefab = Some(form); } }; true } fn change(&mut self, props: Self::Properties) -> ShouldRender { self.rows = props.rows; self.add_row_callback = props.add_row_callback; self.selected_prefab = props.selected_prefab; self.on_prefab_create = props.on_prefab_create; true } } impl Renderable<PrefabEditorWindow> for PrefabEditorWindow { fn view(&self) -> Html<Self> { let f = { match &self.selected_prefab { Some(f) => match f { PrefabForms::UI => { html! { <UIForm: /> } } PrefabForms::Scene => { html! { <SceneForm: /> } } }, None => { html! { <div /> } } } }; html! { <div> { f } </div> } } }
macro_rules! pow { ($intrinsic:ident: $fty:ty, $ity:ident) => { /// Returns `a` raised to the power `b` #[cfg_attr(not(test), no_mangle)] pub extern "C" fn $intrinsic(a: $fty, b: $ity) -> $fty { let (mut a, mut b) = (a, b); let recip = b < 0; let mut r: $fty = 1.0; loop { if (b & 1) != 0 { r *= a; } b = sdiv!($ity, b, 2); if b == 0 { break; } a *= a; } if recip { 1.0 / r } else { r } } } } pow!(__powisf2: f32, i32); pow!(__powidf2: f64, i32);
// Copyright 2019. The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the // following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following // disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the // following disclaimer in the documentation and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote // products derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use crate::{output_manager_service::TxId, transaction_service::error::TransactionStorageError}; use chrono::{NaiveDateTime, Utc}; use log::*; use serde::{Deserialize, Serialize}; use std::{ collections::HashMap, convert::TryFrom, fmt::{Display, Error, Formatter}, sync::Arc, }; use tari_comms::types::CommsPublicKey; use tari_core::transactions::{ tari_amount::{uT, MicroTari}, transaction::Transaction, types::{BlindingFactor, Commitment}, ReceiverTransactionProtocol, SenderTransactionProtocol, }; const LOG_TARGET: &str = "wallet::transaction_service::database"; /// This trait defines the required behaviour that a storage backend must provide for the Transactionservice. /// Data is passed to and from the backend via the [DbKey], [DbValue], and [DbValueKey] enums. If new data types are /// required to be supported by the backends then these enums can be updated to reflect this requirement and the trait /// will remain the same pub trait TransactionBackend: Send + Sync { /// Retrieve the record associated with the provided DbKey fn fetch(&self, key: &DbKey) -> Result<Option<DbValue>, TransactionStorageError>; /// Check if a record with the provided key exists in the backend. fn contains(&self, key: &DbKey) -> Result<bool, TransactionStorageError>; /// Modify the state the of the backend with a write operation fn write(&self, op: WriteOperation) -> Result<Option<DbValue>, TransactionStorageError>; /// Check if a transaction exists in any of the collections fn transaction_exists(&self, tx_id: TxId) -> Result<bool, TransactionStorageError>; /// Complete outbound transaction, this operation must delete the `OutboundTransaction` with the provided /// `TxId` and insert the provided `CompletedTransaction` into `CompletedTransactions`. fn complete_outbound_transaction( &self, tx_id: TxId, completed_transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError>; /// Complete inbound transaction, this operation must delete the `InboundTransaction` with the provided /// `TxId` and insert the provided `CompletedTransaction` into `CompletedTransactions`. fn complete_inbound_transaction( &self, tx_id: TxId, completed_transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError>; /// Complete pending coinbase transaction, this operation must delete the `PendingCoinbaseTransaction` with the /// provided `TxId` and insert the provided `CompletedTransaction` into `CompletedTransactions`. fn complete_coinbase_transaction( &self, tx_id: TxId, completed_transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError>; /// Indicated that a completed transaction has been broadcast to the mempools fn broadcast_completed_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError>; /// Indicated that a completed transaction has been detected as mined on the base layer fn mine_completed_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError>; /// Cancel Completed transaction, this will update the transaction status fn cancel_completed_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError>; /// Cancel Completed transaction, this will update the transaction status fn cancel_pending_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError>; /// Update a completed transactions timestamp for use in test data generation #[cfg(feature = "test_harness")] fn update_completed_transaction_timestamp( &self, tx_id: TxId, timestamp: NaiveDateTime, ) -> Result<(), TransactionStorageError>; } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub enum TransactionStatus { /// This transaction has been completed between the parties but has not been broadcast to the base layer network. Completed, /// This transaction has been broadcast to the base layer network and is currently in one or more base node /// mempools. Broadcast, /// This transaction has been mined and included in a block. Mined, /// This transaction was generated as part of importing a spendable UTXO Imported, /// This transaction is still being negotiated by the parties Pending, /// This transaction has been cancelled Cancelled, } impl TryFrom<i32> for TransactionStatus { type Error = TransactionStorageError; fn try_from(value: i32) -> Result<Self, Self::Error> { match value { 0 => Ok(TransactionStatus::Completed), 1 => Ok(TransactionStatus::Broadcast), 2 => Ok(TransactionStatus::Mined), 3 => Ok(TransactionStatus::Imported), 4 => Ok(TransactionStatus::Pending), 5 => Ok(TransactionStatus::Cancelled), _ => Err(TransactionStorageError::ConversionError), } } } impl Default for TransactionStatus { fn default() -> Self { TransactionStatus::Pending } } impl Display for TransactionStatus { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { // No struct or tuple variants write!(f, "{:?}", self) } } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct InboundTransaction { pub tx_id: TxId, pub source_public_key: CommsPublicKey, pub amount: MicroTari, pub receiver_protocol: ReceiverTransactionProtocol, pub status: TransactionStatus, pub message: String, pub timestamp: NaiveDateTime, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct OutboundTransaction { pub tx_id: TxId, pub destination_public_key: CommsPublicKey, pub amount: MicroTari, pub fee: MicroTari, pub sender_protocol: SenderTransactionProtocol, pub status: TransactionStatus, pub message: String, pub timestamp: NaiveDateTime, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct PendingCoinbaseTransaction { pub tx_id: TxId, pub amount: MicroTari, pub commitment: Commitment, pub timestamp: NaiveDateTime, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] pub struct CompletedTransaction { pub tx_id: TxId, pub source_public_key: CommsPublicKey, pub destination_public_key: CommsPublicKey, pub amount: MicroTari, pub fee: MicroTari, pub transaction: Transaction, pub status: TransactionStatus, pub message: String, pub timestamp: NaiveDateTime, } #[derive(Debug, Clone, PartialEq)] pub enum DbKey { PendingOutboundTransaction(TxId), PendingInboundTransaction(TxId), CompletedTransaction(TxId), PendingCoinbaseTransaction(TxId), PendingOutboundTransactions, PendingInboundTransactions, PendingCoinbaseTransactions, CompletedTransactions, } #[derive(Debug)] pub enum DbValue { PendingOutboundTransaction(Box<OutboundTransaction>), PendingInboundTransaction(Box<InboundTransaction>), PendingCoinbaseTransaction(Box<PendingCoinbaseTransaction>), CompletedTransaction(Box<CompletedTransaction>), PendingOutboundTransactions(HashMap<TxId, OutboundTransaction>), PendingInboundTransactions(HashMap<TxId, InboundTransaction>), PendingCoinbaseTransactions(HashMap<TxId, PendingCoinbaseTransaction>), CompletedTransactions(HashMap<TxId, CompletedTransaction>), } pub enum DbKeyValuePair { PendingOutboundTransaction(TxId, Box<OutboundTransaction>), PendingInboundTransaction(TxId, Box<InboundTransaction>), PendingCoinbaseTransaction(TxId, Box<PendingCoinbaseTransaction>), CompletedTransaction(TxId, Box<CompletedTransaction>), } pub enum WriteOperation { Insert(DbKeyValuePair), Remove(DbKey), } impl From<CompletedTransaction> for InboundTransaction { fn from(ct: CompletedTransaction) -> Self { Self { tx_id: ct.tx_id, source_public_key: ct.source_public_key, amount: ct.amount, receiver_protocol: ReceiverTransactionProtocol::new_placeholder(), status: ct.status, message: ct.message, timestamp: ct.timestamp, } } } impl From<CompletedTransaction> for OutboundTransaction { fn from(ct: CompletedTransaction) -> Self { Self { tx_id: ct.tx_id, destination_public_key: ct.destination_public_key, amount: ct.amount, fee: ct.fee, sender_protocol: SenderTransactionProtocol::new_placeholder(), status: ct.status, message: ct.message, timestamp: ct.timestamp, } } } // Private macro that pulls out all the boiler plate of extracting a DB query result from its variants macro_rules! fetch { ($db:ident, $key_val:expr, $key_var:ident) => {{ let key = DbKey::$key_var($key_val); match $db.fetch(&key) { Ok(None) => Err(TransactionStorageError::ValueNotFound(key)), Ok(Some(DbValue::$key_var(k))) => Ok(*k), Ok(Some(other)) => unexpected_result(key, other), Err(e) => log_error(key, e), } }}; } /// This structure holds an inner type that implements the `TransactionBackend` trait and contains the more complex /// data access logic required by the module built onto the functionality defined by the trait #[derive(Clone)] pub struct TransactionDatabase<T> where T: TransactionBackend + 'static { db: Arc<T>, } impl<T> TransactionDatabase<T> where T: TransactionBackend + 'static { pub fn new(db: T) -> Self { Self { db: Arc::new(db) } } pub async fn add_pending_inbound_transaction( &self, tx_id: TxId, inbound_tx: InboundTransaction, ) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || { db_clone.write(WriteOperation::Insert(DbKeyValuePair::PendingInboundTransaction( tx_id, Box::new(inbound_tx), ))) }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } pub async fn add_pending_outbound_transaction( &self, tx_id: TxId, outbound_tx: OutboundTransaction, ) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || { db_clone.write(WriteOperation::Insert(DbKeyValuePair::PendingOutboundTransaction( tx_id, Box::new(outbound_tx), ))) }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } pub async fn remove_pending_outbound_transaction(&self, tx_id: TxId) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || { db_clone.write(WriteOperation::Remove(DbKey::PendingOutboundTransaction(tx_id))) }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } pub async fn add_pending_coinbase_transaction( &self, tx_id: TxId, coinbase_tx: PendingCoinbaseTransaction, ) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || { db_clone.write(WriteOperation::Insert(DbKeyValuePair::PendingCoinbaseTransaction( tx_id, Box::new(coinbase_tx), ))) }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } /// Check if a transaction with the specified TxId exists in any of the collections pub async fn transaction_exists(&self, tx_id: TxId) -> Result<bool, TransactionStorageError> { let db_clone = self.db.clone(); let tx_id_clone = tx_id; tokio::task::spawn_blocking(move || db_clone.transaction_exists(tx_id_clone)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string()))) .and_then(|inner_result| inner_result) } pub async fn insert_completed_transaction( &self, tx_id: TxId, transaction: CompletedTransaction, ) -> Result<Option<DbValue>, TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || { db_clone.write(WriteOperation::Insert(DbKeyValuePair::CompletedTransaction( tx_id, Box::new(transaction), ))) }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string()))) .and_then(|inner_result| inner_result) } pub async fn get_pending_outbound_transaction( &self, tx_id: TxId, ) -> Result<OutboundTransaction, TransactionStorageError> { let db_clone = self.db.clone(); let result = tokio::task::spawn_blocking(move || fetch!(db_clone, tx_id, PendingOutboundTransaction)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(result) } pub async fn get_pending_inbound_transaction( &self, tx_id: TxId, ) -> Result<InboundTransaction, TransactionStorageError> { let db_clone = self.db.clone(); let result = tokio::task::spawn_blocking(move || fetch!(db_clone, tx_id, PendingInboundTransaction)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(result) } pub async fn get_pending_coinbase_transaction( &self, tx_id: TxId, ) -> Result<PendingCoinbaseTransaction, TransactionStorageError> { let db_clone = self.db.clone(); let result = tokio::task::spawn_blocking(move || fetch!(db_clone, tx_id, PendingCoinbaseTransaction)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(result) } pub async fn get_completed_transaction( &self, tx_id: TxId, ) -> Result<CompletedTransaction, TransactionStorageError> { let db_clone = self.db.clone(); let result = tokio::task::spawn_blocking(move || fetch!(db_clone, tx_id, CompletedTransaction)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(result) } pub async fn get_pending_inbound_transactions( &self, ) -> Result<HashMap<TxId, InboundTransaction>, TransactionStorageError> { let db_clone = self.db.clone(); let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::PendingInboundTransactions) { Ok(None) => log_error( DbKey::PendingInboundTransactions, TransactionStorageError::UnexpectedResult( "Could not retrieve pending inbound transactions".to_string(), ), ), Ok(Some(DbValue::PendingInboundTransactions(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(DbKey::PendingInboundTransactions, other), Err(e) => log_error(DbKey::PendingInboundTransactions, e), }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(t) } pub async fn get_pending_outbound_transactions( &self, ) -> Result<HashMap<TxId, OutboundTransaction>, TransactionStorageError> { let db_clone = self.db.clone(); let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::PendingOutboundTransactions) { Ok(None) => log_error( DbKey::PendingOutboundTransactions, TransactionStorageError::UnexpectedResult( "Could not retrieve pending outbound transactions".to_string(), ), ), Ok(Some(DbValue::PendingOutboundTransactions(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(DbKey::PendingOutboundTransactions, other), Err(e) => log_error(DbKey::PendingOutboundTransactions, e), }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(t) } pub async fn get_pending_coinbase_transactions( &self, ) -> Result<HashMap<TxId, PendingCoinbaseTransaction>, TransactionStorageError> { let db_clone = self.db.clone(); let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::PendingCoinbaseTransactions) { Ok(None) => log_error( DbKey::PendingCoinbaseTransactions, TransactionStorageError::UnexpectedResult( "Could not retrieve pending coinbase transactions".to_string(), ), ), Ok(Some(DbValue::PendingCoinbaseTransactions(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(DbKey::PendingCoinbaseTransactions, other), Err(e) => log_error(DbKey::PendingCoinbaseTransactions, e), }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(t) } pub async fn get_completed_transactions( &self, ) -> Result<HashMap<TxId, CompletedTransaction>, TransactionStorageError> { let db_clone = self.db.clone(); let t = tokio::task::spawn_blocking(move || match db_clone.fetch(&DbKey::CompletedTransactions) { Ok(None) => log_error( DbKey::CompletedTransactions, TransactionStorageError::UnexpectedResult("Could not retrieve completed transactions".to_string()), ), Ok(Some(DbValue::CompletedTransactions(pt))) => Ok(pt), Ok(Some(other)) => unexpected_result(DbKey::CompletedTransactions, other), Err(e) => log_error(DbKey::CompletedTransactions, e), }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(t) } /// This method moves a `PendingOutboundTransaction` to the `CompleteTransaction` collection. pub async fn complete_outbound_transaction( &self, tx_id: TxId, transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || db_clone.complete_outbound_transaction(tx_id, transaction)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string()))) .and_then(|inner_result| inner_result) } /// This method moves a `PendingInboundTransaction` to the `CompleteTransaction` collection. pub async fn complete_inbound_transaction( &self, tx_id: TxId, transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || db_clone.complete_inbound_transaction(tx_id, transaction)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string()))) .and_then(|inner_result| inner_result) } /// This method moves a `PendingCoinbaseTransaction` to the `CompleteTransaction` collection. pub async fn complete_coinbase_transaction( &self, tx_id: TxId, transaction: CompletedTransaction, ) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || db_clone.complete_coinbase_transaction(tx_id, transaction)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string()))) .and_then(|inner_result| inner_result) } pub async fn cancel_coinbase_transaction(&mut self, tx_id: TxId) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || { db_clone.write(WriteOperation::Remove(DbKey::PendingCoinbaseTransaction(tx_id))) }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } pub async fn cancel_completed_transaction(&mut self, tx_id: TxId) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || db_clone.cancel_completed_transaction(tx_id)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } pub async fn cancel_pending_transaction(&mut self, tx_id: TxId) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || db_clone.cancel_pending_transaction(tx_id)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } /// Indicated that the specified completed transaction has been broadcast into the mempool pub async fn broadcast_completed_transaction(&mut self, tx_id: TxId) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || db_clone.broadcast_completed_transaction(tx_id)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string()))) .and_then(|inner_result| inner_result) } /// Indicated that the specified completed transaction has been detected as mined on the base layer pub async fn mine_completed_transaction(&mut self, tx_id: TxId) -> Result<(), TransactionStorageError> { let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || db_clone.mine_completed_transaction(tx_id)) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string()))) .and_then(|inner_result| inner_result) } #[allow(clippy::erasing_op)] // this is for 0 * uT pub async fn add_utxo_import_transaction( &mut self, tx_id: TxId, amount: MicroTari, source_public_key: CommsPublicKey, comms_public_key: CommsPublicKey, message: String, ) -> Result<(), TransactionStorageError> { let transaction = CompletedTransaction { tx_id, source_public_key: source_public_key.clone(), destination_public_key: comms_public_key.clone(), amount, fee: 0 * uT, transaction: Transaction::new(Vec::new(), Vec::new(), Vec::new(), BlindingFactor::default()), status: TransactionStatus::Imported, message, timestamp: Utc::now().naive_utc(), }; let db_clone = self.db.clone(); tokio::task::spawn_blocking(move || { db_clone.write(WriteOperation::Insert(DbKeyValuePair::CompletedTransaction( tx_id, Box::new(transaction), ))) }) .await .or_else(|err| Err(TransactionStorageError::BlockingTaskSpawnError(err.to_string())))??; Ok(()) } } impl Display for DbKey { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { match self { DbKey::PendingOutboundTransaction(_) => f.write_str(&"Pending Outbound Transaction".to_string()), DbKey::PendingInboundTransaction(_) => f.write_str(&"Pending Inbound Transaction".to_string()), DbKey::PendingCoinbaseTransaction(_) => f.write_str(&"Pending Pending Coinbase Transaction".to_string()), DbKey::CompletedTransaction(_) => f.write_str(&"Completed Transaction".to_string()), DbKey::PendingOutboundTransactions => f.write_str(&"All Pending Outbound Transactions".to_string()), DbKey::PendingInboundTransactions => f.write_str(&"All Pending Inbound Transactions".to_string()), DbKey::CompletedTransactions => f.write_str(&"All Complete Transactions".to_string()), DbKey::PendingCoinbaseTransactions => f.write_str(&"All Pending Coinbase Transactions".to_string()), } } } impl Display for DbValue { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { match self { DbValue::PendingOutboundTransaction(_) => f.write_str(&"Pending Outbound Transaction".to_string()), DbValue::PendingInboundTransaction(_) => f.write_str(&"Pending Inbound Transaction".to_string()), DbValue::PendingCoinbaseTransaction(_) => f.write_str(&"Pending Coinbase Transaction".to_string()), DbValue::CompletedTransaction(_) => f.write_str(&"Completed Transaction".to_string()), DbValue::PendingOutboundTransactions(_) => f.write_str(&"All Pending Outbound Transactions".to_string()), DbValue::PendingInboundTransactions(_) => f.write_str(&"All Pending Inbound Transactions".to_string()), DbValue::CompletedTransactions(_) => f.write_str(&"All Complete Transactions".to_string()), DbValue::PendingCoinbaseTransactions(_) => f.write_str(&"All Pending Coinbase Transactions".to_string()), } } } fn log_error<T>(req: DbKey, err: TransactionStorageError) -> Result<T, TransactionStorageError> { error!( target: LOG_TARGET, "Database access error on request: {}: {}", req, err.to_string() ); Err(err) } fn unexpected_result<T>(req: DbKey, res: DbValue) -> Result<T, TransactionStorageError> { let msg = format!("Unexpected result for database query {}. Response: {}", req, res); error!(target: LOG_TARGET, "{}", msg); Err(TransactionStorageError::UnexpectedResult(msg)) }
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. pub fn parse_http_generic_error( response: &http::Response<bytes::Bytes>, ) -> Result<smithy_types::Error, smithy_json::deserialize::Error> { crate::json_errors::parse_generic_error(response.body(), response.headers()) } pub fn deser_structure_forbidden_exceptionjson_err( input: &[u8], mut builder: crate::error::forbidden_exception::Builder, ) -> Result<crate::error::forbidden_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_internal_server_error_exceptionjson_err( input: &[u8], mut builder: crate::error::internal_server_error_exception::Builder, ) -> Result<crate::error::internal_server_error_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_not_found_exceptionjson_err( input: &[u8], mut builder: crate::error::not_found_exception::Builder, ) -> Result<crate::error::not_found_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_service_unavailable_exceptionjson_err( input: &[u8], mut builder: crate::error::service_unavailable_exception::Builder, ) -> Result<crate::error::service_unavailable_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_too_many_requests_exceptionjson_err( input: &[u8], mut builder: crate::error::too_many_requests_exception::Builder, ) -> Result<crate::error::too_many_requests_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_structure_unprocessable_entity_exceptionjson_err( input: &[u8], mut builder: crate::error::unprocessable_entity_exception::Builder, ) -> Result<crate::error::unprocessable_entity_exception::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "message" => { builder = builder.set_message( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_configure_logs( input: &[u8], mut builder: crate::output::configure_logs_output::Builder, ) -> Result<crate::output::configure_logs_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "authorization" => { builder = builder.set_authorization( crate::json_deser::deser_structure_authorization(tokens)?, ); } "domainName" => { builder = builder.set_domain_name( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "egressAccessLogs" => { builder = builder.set_egress_access_logs( crate::json_deser::deser_structure_egress_access_logs(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_asset( input: &[u8], mut builder: crate::output::create_asset_output::Builder, ) -> Result<crate::output::create_asset_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "createdAt" => { builder = builder.set_created_at( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "egressEndpoints" => { builder = builder.set_egress_endpoints( crate::json_deser::deser_list___list_of_egress_endpoint(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "packagingGroupId" => { builder = builder.set_packaging_group_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "resourceId" => { builder = builder.set_resource_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "sourceArn" => { builder = builder.set_source_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "sourceRoleArn" => { builder = builder.set_source_role_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_packaging_configuration( input: &[u8], mut builder: crate::output::create_packaging_configuration_output::Builder, ) -> Result< crate::output::create_packaging_configuration_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "cmafPackage" => { builder = builder.set_cmaf_package( crate::json_deser::deser_structure_cmaf_package(tokens)?, ); } "dashPackage" => { builder = builder.set_dash_package( crate::json_deser::deser_structure_dash_package(tokens)?, ); } "hlsPackage" => { builder = builder.set_hls_package( crate::json_deser::deser_structure_hls_package(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "mssPackage" => { builder = builder.set_mss_package( crate::json_deser::deser_structure_mss_package(tokens)?, ); } "packagingGroupId" => { builder = builder.set_packaging_group_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_create_packaging_group( input: &[u8], mut builder: crate::output::create_packaging_group_output::Builder, ) -> Result<crate::output::create_packaging_group_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "authorization" => { builder = builder.set_authorization( crate::json_deser::deser_structure_authorization(tokens)?, ); } "domainName" => { builder = builder.set_domain_name( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "egressAccessLogs" => { builder = builder.set_egress_access_logs( crate::json_deser::deser_structure_egress_access_logs(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_describe_asset( input: &[u8], mut builder: crate::output::describe_asset_output::Builder, ) -> Result<crate::output::describe_asset_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "createdAt" => { builder = builder.set_created_at( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "egressEndpoints" => { builder = builder.set_egress_endpoints( crate::json_deser::deser_list___list_of_egress_endpoint(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "packagingGroupId" => { builder = builder.set_packaging_group_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "resourceId" => { builder = builder.set_resource_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "sourceArn" => { builder = builder.set_source_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "sourceRoleArn" => { builder = builder.set_source_role_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_describe_packaging_configuration( input: &[u8], mut builder: crate::output::describe_packaging_configuration_output::Builder, ) -> Result< crate::output::describe_packaging_configuration_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "cmafPackage" => { builder = builder.set_cmaf_package( crate::json_deser::deser_structure_cmaf_package(tokens)?, ); } "dashPackage" => { builder = builder.set_dash_package( crate::json_deser::deser_structure_dash_package(tokens)?, ); } "hlsPackage" => { builder = builder.set_hls_package( crate::json_deser::deser_structure_hls_package(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "mssPackage" => { builder = builder.set_mss_package( crate::json_deser::deser_structure_mss_package(tokens)?, ); } "packagingGroupId" => { builder = builder.set_packaging_group_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_describe_packaging_group( input: &[u8], mut builder: crate::output::describe_packaging_group_output::Builder, ) -> Result<crate::output::describe_packaging_group_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "authorization" => { builder = builder.set_authorization( crate::json_deser::deser_structure_authorization(tokens)?, ); } "domainName" => { builder = builder.set_domain_name( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "egressAccessLogs" => { builder = builder.set_egress_access_logs( crate::json_deser::deser_structure_egress_access_logs(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_assets( input: &[u8], mut builder: crate::output::list_assets_output::Builder, ) -> Result<crate::output::list_assets_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "assets" => { builder = builder.set_assets( crate::json_deser::deser_list___list_of_asset_shallow(tokens)?, ); } "nextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_packaging_configurations( input: &[u8], mut builder: crate::output::list_packaging_configurations_output::Builder, ) -> Result< crate::output::list_packaging_configurations_output::Builder, smithy_json::deserialize::Error, > { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "nextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "packagingConfigurations" => { builder = builder.set_packaging_configurations( crate::json_deser::deser_list___list_of_packaging_configuration( tokens, )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_packaging_groups( input: &[u8], mut builder: crate::output::list_packaging_groups_output::Builder, ) -> Result<crate::output::list_packaging_groups_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "nextToken" => { builder = builder.set_next_token( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "packagingGroups" => { builder = builder.set_packaging_groups( crate::json_deser::deser_list___list_of_packaging_group(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_list_tags_for_resource( input: &[u8], mut builder: crate::output::list_tags_for_resource_output::Builder, ) -> Result<crate::output::list_tags_for_resource_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "tags" => { builder = builder .set_tags(crate::json_deser::deser_map___map_of__string(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn deser_operation_update_packaging_group( input: &[u8], mut builder: crate::output::update_packaging_group_output::Builder, ) -> Result<crate::output::update_packaging_group_output::Builder, smithy_json::deserialize::Error> { let mut tokens_owned = smithy_json::deserialize::json_token_iter(crate::json_deser::or_empty_doc(input)) .peekable(); let tokens = &mut tokens_owned; smithy_json::deserialize::token::expect_start_object(tokens.next())?; loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "authorization" => { builder = builder.set_authorization( crate::json_deser::deser_structure_authorization(tokens)?, ); } "domainName" => { builder = builder.set_domain_name( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "egressAccessLogs" => { builder = builder.set_egress_access_logs( crate::json_deser::deser_structure_egress_access_logs(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } if tokens.next().is_some() { return Err(smithy_json::deserialize::Error::custom( "found more JSON tokens after completing parsing", )); } Ok(builder) } pub fn or_empty_doc(data: &[u8]) -> &[u8] { if data.is_empty() { b"{}" } else { data } } pub fn deser_structure_authorization<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::Authorization>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::Authorization::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "cdnIdentifierSecret" => { builder = builder.set_cdn_identifier_secret( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "secretsRoleArn" => { builder = builder.set_secrets_role_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_egress_access_logs<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::EgressAccessLogs>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::EgressAccessLogs::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "logGroupName" => { builder = builder.set_log_group_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_map_tags<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result< Option<std::collections::HashMap<std::string::String, std::string::String>>, smithy_json::deserialize::Error, > where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { let mut map = std::collections::HashMap::new(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { let key = key.to_unescaped().map(|u| u.into_owned())?; let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { map.insert(key, value); } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(map)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of_egress_endpoint<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::EgressEndpoint>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_egress_endpoint(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_cmaf_package<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::CmafPackage>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::CmafPackage::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "encryption" => { builder = builder.set_encryption( crate::json_deser::deser_structure_cmaf_encryption(tokens)?, ); } "hlsManifests" => { builder = builder.set_hls_manifests( crate::json_deser::deser_list___list_of_hls_manifest(tokens)?, ); } "includeEncoderConfigurationInSegments" => { builder = builder.set_include_encoder_configuration_in_segments( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "segmentDurationSeconds" => { builder = builder.set_segment_duration_seconds( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_dash_package<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DashPackage>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DashPackage::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "dashManifests" => { builder = builder.set_dash_manifests( crate::json_deser::deser_list___list_of_dash_manifest(tokens)?, ); } "encryption" => { builder = builder.set_encryption( crate::json_deser::deser_structure_dash_encryption(tokens)?, ); } "includeEncoderConfigurationInSegments" => { builder = builder.set_include_encoder_configuration_in_segments( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "periodTriggers" => { builder = builder.set_period_triggers( crate::json_deser::deser_list___list_of__period_triggers_element(tokens)? ); } "segmentDurationSeconds" => { builder = builder.set_segment_duration_seconds( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "segmentTemplateFormat" => { builder = builder.set_segment_template_format( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::SegmentTemplateFormat::from(u.as_ref()) }) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_hls_package<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::HlsPackage>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::HlsPackage::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "encryption" => { builder = builder.set_encryption( crate::json_deser::deser_structure_hls_encryption(tokens)?, ); } "hlsManifests" => { builder = builder.set_hls_manifests( crate::json_deser::deser_list___list_of_hls_manifest(tokens)?, ); } "segmentDurationSeconds" => { builder = builder.set_segment_duration_seconds( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "useAudioRenditionGroup" => { builder = builder.set_use_audio_rendition_group( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_mss_package<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::MssPackage>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::MssPackage::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "encryption" => { builder = builder.set_encryption( crate::json_deser::deser_structure_mss_encryption(tokens)?, ); } "mssManifests" => { builder = builder.set_mss_manifests( crate::json_deser::deser_list___list_of_mss_manifest(tokens)?, ); } "segmentDurationSeconds" => { builder = builder.set_segment_duration_seconds( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of_asset_shallow<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::AssetShallow>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_asset_shallow(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of_packaging_configuration<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result< Option<std::vec::Vec<crate::model::PackagingConfiguration>>, smithy_json::deserialize::Error, > where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_packaging_configuration(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of_packaging_group<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::PackagingGroup>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_packaging_group(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_map___map_of__string<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result< Option<std::collections::HashMap<std::string::String, std::string::String>>, smithy_json::deserialize::Error, > where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { let mut map = std::collections::HashMap::new(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { let key = key.to_unescaped().map(|u| u.into_owned())?; let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { map.insert(key, value); } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(map)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_egress_endpoint<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::EgressEndpoint>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::EgressEndpoint::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "packagingConfigurationId" => { builder = builder.set_packaging_configuration_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "url" => { builder = builder.set_url( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_cmaf_encryption<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::CmafEncryption>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::CmafEncryption::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "constantInitializationVector" => { builder = builder.set_constant_initialization_vector( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "spekeKeyProvider" => { builder = builder.set_speke_key_provider( crate::json_deser::deser_structure_speke_key_provider(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of_hls_manifest<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::HlsManifest>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_hls_manifest(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of_dash_manifest<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::DashManifest>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_dash_manifest(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_dash_encryption<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DashEncryption>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DashEncryption::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "spekeKeyProvider" => { builder = builder.set_speke_key_provider( crate::json_deser::deser_structure_speke_key_provider(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of__period_triggers_element<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result< Option<std::vec::Vec<crate::model::PeriodTriggersElement>>, smithy_json::deserialize::Error, > where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| { s.to_unescaped().map(|u| { crate::model::PeriodTriggersElement::from(u.as_ref()) }) }) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_hls_encryption<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::HlsEncryption>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::HlsEncryption::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "constantInitializationVector" => { builder = builder.set_constant_initialization_vector( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "encryptionMethod" => { builder = builder.set_encryption_method( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped().map(|u| { crate::model::EncryptionMethod::from(u.as_ref()) }) }) .transpose()?, ); } "spekeKeyProvider" => { builder = builder.set_speke_key_provider( crate::json_deser::deser_structure_speke_key_provider(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_mss_encryption<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::MssEncryption>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::MssEncryption::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "spekeKeyProvider" => { builder = builder.set_speke_key_provider( crate::json_deser::deser_structure_speke_key_provider(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of_mss_manifest<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<crate::model::MssManifest>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = crate::json_deser::deser_structure_mss_manifest(tokens)?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_asset_shallow<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::AssetShallow>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::AssetShallow::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "createdAt" => { builder = builder.set_created_at( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "packagingGroupId" => { builder = builder.set_packaging_group_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "resourceId" => { builder = builder.set_resource_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "sourceArn" => { builder = builder.set_source_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "sourceRoleArn" => { builder = builder.set_source_role_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_packaging_configuration<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::PackagingConfiguration>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::PackagingConfiguration::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "cmafPackage" => { builder = builder.set_cmaf_package( crate::json_deser::deser_structure_cmaf_package(tokens)?, ); } "dashPackage" => { builder = builder.set_dash_package( crate::json_deser::deser_structure_dash_package(tokens)?, ); } "hlsPackage" => { builder = builder.set_hls_package( crate::json_deser::deser_structure_hls_package(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "mssPackage" => { builder = builder.set_mss_package( crate::json_deser::deser_structure_mss_package(tokens)?, ); } "packagingGroupId" => { builder = builder.set_packaging_group_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_packaging_group<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::PackagingGroup>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::PackagingGroup::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "arn" => { builder = builder.set_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "authorization" => { builder = builder.set_authorization( crate::json_deser::deser_structure_authorization(tokens)?, ); } "domainName" => { builder = builder.set_domain_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "egressAccessLogs" => { builder = builder.set_egress_access_logs( crate::json_deser::deser_structure_egress_access_logs(tokens)?, ); } "id" => { builder = builder.set_id( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "tags" => { builder = builder.set_tags(crate::json_deser::deser_map_tags(tokens)?); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_speke_key_provider<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::SpekeKeyProvider>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::SpekeKeyProvider::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "roleArn" => { builder = builder.set_role_arn( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "systemIds" => { builder = builder.set_system_ids( crate::json_deser::deser_list___list_of__string(tokens)?, ); } "url" => { builder = builder.set_url( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_hls_manifest<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::HlsManifest>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::HlsManifest::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "adMarkers" => { builder = builder.set_ad_markers( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::AdMarkers::from(u.as_ref())) }) .transpose()?, ); } "includeIframeOnlyStream" => { builder = builder.set_include_iframe_only_stream( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "manifestName" => { builder = builder.set_manifest_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "programDateTimeIntervalSeconds" => { builder = builder.set_program_date_time_interval_seconds( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "repeatExtXKey" => { builder = builder.set_repeat_ext_x_key( smithy_json::deserialize::token::expect_bool_or_null( tokens.next(), )?, ); } "streamSelection" => { builder = builder.set_stream_selection( crate::json_deser::deser_structure_stream_selection(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_dash_manifest<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::DashManifest>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::DashManifest::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "manifestLayout" => { builder = builder.set_manifest_layout( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::ManifestLayout::from(u.as_ref())) }) .transpose()?, ); } "manifestName" => { builder = builder.set_manifest_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "minBufferTimeSeconds" => { builder = builder.set_min_buffer_time_seconds( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "profile" => { builder = builder.set_profile( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::Profile::from(u.as_ref())) }) .transpose()?, ); } "streamSelection" => { builder = builder.set_stream_selection( crate::json_deser::deser_structure_stream_selection(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } pub fn deser_structure_mss_manifest<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::MssManifest>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::MssManifest::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "manifestName" => { builder = builder.set_manifest_name( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?, ); } "streamSelection" => { builder = builder.set_stream_selection( crate::json_deser::deser_structure_stream_selection(tokens)?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } } #[allow(clippy::type_complexity, non_snake_case)] pub fn deser_list___list_of__string<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<std::vec::Vec<std::string::String>>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartArray { .. }) => { let mut items = Vec::new(); loop { match tokens.peek() { Some(Ok(smithy_json::deserialize::Token::EndArray { .. })) => { tokens.next().transpose().unwrap(); break; } _ => { let value = smithy_json::deserialize::token::expect_string_or_null(tokens.next())? .map(|s| s.to_unescaped().map(|u| u.into_owned())) .transpose()?; if let Some(value) = value { items.push(value); } } } } Ok(Some(items)) } _ => Err(smithy_json::deserialize::Error::custom( "expected start array or null", )), } } pub fn deser_structure_stream_selection<'a, I>( tokens: &mut std::iter::Peekable<I>, ) -> Result<Option<crate::model::StreamSelection>, smithy_json::deserialize::Error> where I: Iterator< Item = Result<smithy_json::deserialize::Token<'a>, smithy_json::deserialize::Error>, >, { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::ValueNull { .. }) => Ok(None), Some(smithy_json::deserialize::Token::StartObject { .. }) => { #[allow(unused_mut)] let mut builder = crate::model::StreamSelection::builder(); loop { match tokens.next().transpose()? { Some(smithy_json::deserialize::Token::EndObject { .. }) => break, Some(smithy_json::deserialize::Token::ObjectKey { key, .. }) => { match key.to_unescaped()?.as_ref() { "maxVideoBitsPerSecond" => { builder = builder.set_max_video_bits_per_second( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "minVideoBitsPerSecond" => { builder = builder.set_min_video_bits_per_second( smithy_json::deserialize::token::expect_number_or_null( tokens.next(), )? .map(|v| v.to_i32()), ); } "streamOrder" => { builder = builder.set_stream_order( smithy_json::deserialize::token::expect_string_or_null( tokens.next(), )? .map(|s| { s.to_unescaped() .map(|u| crate::model::StreamOrder::from(u.as_ref())) }) .transpose()?, ); } _ => smithy_json::deserialize::token::skip_value(tokens)?, } } _ => { return Err(smithy_json::deserialize::Error::custom( "expected object key or end object", )) } } } Ok(Some(builder.build())) } _ => Err(smithy_json::deserialize::Error::custom( "expected start object or null", )), } }
use crate::{std_types::RStr, type_layout::MonoTypeLayout}; /// Represents the layout of a prefix-type,for use in error messages. #[repr(C)] #[derive(Debug, Copy, Clone, StableAbi)] // #[derive(Debug, Copy, Clone, PartialEq, StableAbi)] pub struct PTStructLayout { /// The stringified generic parameters. pub generics: RStr<'static>, /// The layout information of the type which doesn't depend on generic parameters pub mono_layout: &'static MonoTypeLayout, } ////////////////////////////////////////////////////////////// impl PTStructLayout { /// Constructs a `PTStructLayout`. pub const fn new(generics: RStr<'static>, mono_layout: &'static MonoTypeLayout) -> Self { Self { generics, mono_layout, } } /// Gets an iterator over the names of the fields. #[inline] pub fn get_field_names(&self) -> impl Iterator<Item = &'static str> { self.mono_layout.field_names() } /// Gets a `Vec` with the names of the fields. #[inline] pub fn get_field_names_vec(&self) -> Vec<&'static str> { self.mono_layout.field_names().collect() } /// Gets the name of the `ith` field, returning `None` if there is no `ith` field. #[inline] pub fn get_field_name(&self, ith: usize) -> Option<&'static str> { self.mono_layout.get_field_name(ith) } }
use std::collections::{HashMap}; use rlua::{Lua, Table}; use {Error}; /// Tracks values to be converted to a model for use by the scripting language. pub struct ScriptTable { values: HashMap<String, ScriptValue>, } impl ScriptTable { /// Creates a new empty model. pub fn new() -> Self { ScriptTable { values: HashMap::new(), } } pub(crate) fn to_lua_table<'l>(&self, lua: &'l Lua) -> Result<Table<'l>, Error> { let model_table = lua.create_table()?; for (key, value) in &self.values { match *value { ScriptValue::Bool(value) => model_table.set(key.as_str(), value)?, ScriptValue::String(ref value) => model_table.set(key.as_str(), value.as_str())?, } } Ok(model_table) } /// Sets the field with given key in the model to the given value. pub fn set<V: Into<ScriptValue>>(&mut self, key: &str, value: V) { self.values.insert(key.into(), value.into()); } } /// A generic value stored in the model. pub enum ScriptValue { Bool(bool), String(String), } impl From<bool> for ScriptValue { fn from(value: bool) -> Self { ScriptValue::Bool(value) } } impl From<String> for ScriptValue { fn from(value: String) -> Self { ScriptValue::String(value) } }
#[doc = "Reader of register CFGR"] pub type R = crate::R<u32, super::CFGR>; #[doc = "Writer for register CFGR"] pub type W = crate::W<u32, super::CFGR>; #[doc = "Register CFGR `reset()`'s with value 0"] impl crate::ResetValue for super::CFGR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "ADC group injected contexts queue disable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JQDIS_A { #[doc = "0: Injected Queue enabled"] ENABLED = 0, #[doc = "1: Injected Queue disabled"] DISABLED = 1, } impl From<JQDIS_A> for bool { #[inline(always)] fn from(variant: JQDIS_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JQDIS`"] pub type JQDIS_R = crate::R<bool, JQDIS_A>; impl JQDIS_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JQDIS_A { match self.bits { false => JQDIS_A::ENABLED, true => JQDIS_A::DISABLED, } } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == JQDIS_A::ENABLED } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == JQDIS_A::DISABLED } } #[doc = "Write proxy for field `JQDIS`"] pub struct JQDIS_W<'a> { w: &'a mut W, } impl<'a> JQDIS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JQDIS_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Injected Queue enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut W { self.variant(JQDIS_A::ENABLED) } #[doc = "Injected Queue disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut W { self.variant(JQDIS_A::DISABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } #[doc = "Reader of field `AWD1CH`"] pub type AWD1CH_R = crate::R<u8, u8>; #[doc = "Write proxy for field `AWD1CH`"] pub struct AWD1CH_W<'a> { w: &'a mut W, } impl<'a> AWD1CH_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 26)) | (((value as u32) & 0x1f) << 26); self.w } } #[doc = "ADC group injected automatic trigger mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JAUTO_A { #[doc = "0: Automatic injected group conversion disabled"] DISABLED = 0, #[doc = "1: Automatic injected group conversion enabled"] ENABLED = 1, } impl From<JAUTO_A> for bool { #[inline(always)] fn from(variant: JAUTO_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JAUTO`"] pub type JAUTO_R = crate::R<bool, JAUTO_A>; impl JAUTO_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JAUTO_A { match self.bits { false => JAUTO_A::DISABLED, true => JAUTO_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == JAUTO_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == JAUTO_A::ENABLED } } #[doc = "Write proxy for field `JAUTO`"] pub struct JAUTO_W<'a> { w: &'a mut W, } impl<'a> JAUTO_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JAUTO_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Automatic injected group conversion disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut W { self.variant(JAUTO_A::DISABLED) } #[doc = "Automatic injected group conversion enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut W { self.variant(JAUTO_A::ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25); self.w } } #[doc = "ADC analog watchdog 1 enable on scope ADC group injected\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JAWD1EN_A { #[doc = "0: Analog watchdog 1 disabled on injected channels"] DISABLED = 0, #[doc = "1: Analog watchdog 1 enabled on injected channels"] ENABLED = 1, } impl From<JAWD1EN_A> for bool { #[inline(always)] fn from(variant: JAWD1EN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JAWD1EN`"] pub type JAWD1EN_R = crate::R<bool, JAWD1EN_A>; impl JAWD1EN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JAWD1EN_A { match self.bits { false => JAWD1EN_A::DISABLED, true => JAWD1EN_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == JAWD1EN_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == JAWD1EN_A::ENABLED } } #[doc = "Write proxy for field `JAWD1EN`"] pub struct JAWD1EN_W<'a> { w: &'a mut W, } impl<'a> JAWD1EN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JAWD1EN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Analog watchdog 1 disabled on injected channels"] #[inline(always)] pub fn disabled(self) -> &'a mut W { self.variant(JAWD1EN_A::DISABLED) } #[doc = "Analog watchdog 1 enabled on injected channels"] #[inline(always)] pub fn enabled(self) -> &'a mut W { self.variant(JAWD1EN_A::ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "ADC analog watchdog 1 enable on scope ADC group regular\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AWD1EN_A { #[doc = "0: Analog watchdog 1 disabled on regular channels"] DISABLED = 0, #[doc = "1: Analog watchdog 1 enabled on regular channels"] ENABLED = 1, } impl From<AWD1EN_A> for bool { #[inline(always)] fn from(variant: AWD1EN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `AWD1EN`"] pub type AWD1EN_R = crate::R<bool, AWD1EN_A>; impl AWD1EN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> AWD1EN_A { match self.bits { false => AWD1EN_A::DISABLED, true => AWD1EN_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == AWD1EN_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == AWD1EN_A::ENABLED } } #[doc = "Write proxy for field `AWD1EN`"] pub struct AWD1EN_W<'a> { w: &'a mut W, } impl<'a> AWD1EN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: AWD1EN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Analog watchdog 1 disabled on regular channels"] #[inline(always)] pub fn disabled(self) -> &'a mut W { self.variant(AWD1EN_A::DISABLED) } #[doc = "Analog watchdog 1 enabled on regular channels"] #[inline(always)] pub fn enabled(self) -> &'a mut W { self.variant(AWD1EN_A::ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23); self.w } } #[doc = "ADC analog watchdog 1 monitoring a single channel or all channels\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AWD1SGL_A { #[doc = "0: Analog watchdog 1 enabled on all channels"] ALL = 0, #[doc = "1: Analog watchdog 1 enabled on single channel selected in AWD1CH"] SINGLE = 1, } impl From<AWD1SGL_A> for bool { #[inline(always)] fn from(variant: AWD1SGL_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `AWD1SGL`"] pub type AWD1SGL_R = crate::R<bool, AWD1SGL_A>; impl AWD1SGL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> AWD1SGL_A { match self.bits { false => AWD1SGL_A::ALL, true => AWD1SGL_A::SINGLE, } } #[doc = "Checks if the value of the field is `ALL`"] #[inline(always)] pub fn is_all(&self) -> bool { *self == AWD1SGL_A::ALL } #[doc = "Checks if the value of the field is `SINGLE`"] #[inline(always)] pub fn is_single(&self) -> bool { *self == AWD1SGL_A::SINGLE } } #[doc = "Write proxy for field `AWD1SGL`"] pub struct AWD1SGL_W<'a> { w: &'a mut W, } impl<'a> AWD1SGL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: AWD1SGL_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Analog watchdog 1 enabled on all channels"] #[inline(always)] pub fn all(self) -> &'a mut W { self.variant(AWD1SGL_A::ALL) } #[doc = "Analog watchdog 1 enabled on single channel selected in AWD1CH"] #[inline(always)] pub fn single(self) -> &'a mut W { self.variant(AWD1SGL_A::SINGLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22); self.w } } #[doc = "ADC group injected contexts queue mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JQM_A { #[doc = "0: JSQR Mode 0: Queue maintains the last written configuration into JSQR"] MODE0 = 0, #[doc = "1: JSQR Mode 1: An empty queue disables software and hardware triggers of the injected sequence"] MODE1 = 1, } impl From<JQM_A> for bool { #[inline(always)] fn from(variant: JQM_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JQM`"] pub type JQM_R = crate::R<bool, JQM_A>; impl JQM_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JQM_A { match self.bits { false => JQM_A::MODE0, true => JQM_A::MODE1, } } #[doc = "Checks if the value of the field is `MODE0`"] #[inline(always)] pub fn is_mode0(&self) -> bool { *self == JQM_A::MODE0 } #[doc = "Checks if the value of the field is `MODE1`"] #[inline(always)] pub fn is_mode1(&self) -> bool { *self == JQM_A::MODE1 } } #[doc = "Write proxy for field `JQM`"] pub struct JQM_W<'a> { w: &'a mut W, } impl<'a> JQM_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JQM_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "JSQR Mode 0: Queue maintains the last written configuration into JSQR"] #[inline(always)] pub fn mode0(self) -> &'a mut W { self.variant(JQM_A::MODE0) } #[doc = "JSQR Mode 1: An empty queue disables software and hardware triggers of the injected sequence"] #[inline(always)] pub fn mode1(self) -> &'a mut W { self.variant(JQM_A::MODE1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21); self.w } } #[doc = "ADC group injected sequencer discontinuous mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum JDISCEN_A { #[doc = "0: Discontinuous mode on injected channels disabled"] DISABLED = 0, #[doc = "1: Discontinuous mode on injected channels enabled"] ENABLED = 1, } impl From<JDISCEN_A> for bool { #[inline(always)] fn from(variant: JDISCEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `JDISCEN`"] pub type JDISCEN_R = crate::R<bool, JDISCEN_A>; impl JDISCEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> JDISCEN_A { match self.bits { false => JDISCEN_A::DISABLED, true => JDISCEN_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == JDISCEN_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == JDISCEN_A::ENABLED } } #[doc = "Write proxy for field `JDISCEN`"] pub struct JDISCEN_W<'a> { w: &'a mut W, } impl<'a> JDISCEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: JDISCEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Discontinuous mode on injected channels disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut W { self.variant(JDISCEN_A::DISABLED) } #[doc = "Discontinuous mode on injected channels enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut W { self.variant(JDISCEN_A::ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Reader of field `DISCNUM`"] pub type DISCNUM_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DISCNUM`"] pub struct DISCNUM_W<'a> { w: &'a mut W, } impl<'a> DISCNUM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 17)) | (((value as u32) & 0x07) << 17); self.w } } #[doc = "ADC group regular sequencer discontinuous mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DISCEN_A { #[doc = "0: Discontinuous mode on regular channels disabled"] DISABLED = 0, #[doc = "1: Discontinuous mode on regular channels enabled"] ENABLED = 1, } impl From<DISCEN_A> for bool { #[inline(always)] fn from(variant: DISCEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DISCEN`"] pub type DISCEN_R = crate::R<bool, DISCEN_A>; impl DISCEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DISCEN_A { match self.bits { false => DISCEN_A::DISABLED, true => DISCEN_A::ENABLED, } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == DISCEN_A::DISABLED } #[doc = "Checks if the value of the field is `ENABLED`"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == DISCEN_A::ENABLED } } #[doc = "Write proxy for field `DISCEN`"] pub struct DISCEN_W<'a> { w: &'a mut W, } impl<'a> DISCEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DISCEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Discontinuous mode on regular channels disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut W { self.variant(DISCEN_A::DISABLED) } #[doc = "Discontinuous mode on regular channels enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut W { self.variant(DISCEN_A::ENABLED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "ADC low power auto wait\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AUTDLY_A { #[doc = "0: Auto delayed conversion mode off"] OFF = 0, #[doc = "1: Auto delayed conversion mode on"] ON = 1, } impl From<AUTDLY_A> for bool { #[inline(always)] fn from(variant: AUTDLY_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `AUTDLY`"] pub type AUTDLY_R = crate::R<bool, AUTDLY_A>; impl AUTDLY_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> AUTDLY_A { match self.bits { false => AUTDLY_A::OFF, true => AUTDLY_A::ON, } } #[doc = "Checks if the value of the field is `OFF`"] #[inline(always)] pub fn is_off(&self) -> bool { *self == AUTDLY_A::OFF } #[doc = "Checks if the value of the field is `ON`"] #[inline(always)] pub fn is_on(&self) -> bool { *self == AUTDLY_A::ON } } #[doc = "Write proxy for field `AUTDLY`"] pub struct AUTDLY_W<'a> { w: &'a mut W, } impl<'a> AUTDLY_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: AUTDLY_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Auto delayed conversion mode off"] #[inline(always)] pub fn off(self) -> &'a mut W { self.variant(AUTDLY_A::OFF) } #[doc = "Auto delayed conversion mode on"] #[inline(always)] pub fn on(self) -> &'a mut W { self.variant(AUTDLY_A::ON) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14); self.w } } #[doc = "ADC group regular continuous conversion mode\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CONT_A { #[doc = "0: Single conversion mode"] SINGLE = 0, #[doc = "1: Continuous conversion mode"] CONTINUOUS = 1, } impl From<CONT_A> for bool { #[inline(always)] fn from(variant: CONT_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `CONT`"] pub type CONT_R = crate::R<bool, CONT_A>; impl CONT_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> CONT_A { match self.bits { false => CONT_A::SINGLE, true => CONT_A::CONTINUOUS, } } #[doc = "Checks if the value of the field is `SINGLE`"] #[inline(always)] pub fn is_single(&self) -> bool { *self == CONT_A::SINGLE } #[doc = "Checks if the value of the field is `CONTINUOUS`"] #[inline(always)] pub fn is_continuous(&self) -> bool { *self == CONT_A::CONTINUOUS } } #[doc = "Write proxy for field `CONT`"] pub struct CONT_W<'a> { w: &'a mut W, } impl<'a> CONT_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CONT_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Single conversion mode"] #[inline(always)] pub fn single(self) -> &'a mut W { self.variant(CONT_A::SINGLE) } #[doc = "Continuous conversion mode"] #[inline(always)] pub fn continuous(self) -> &'a mut W { self.variant(CONT_A::CONTINUOUS) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "ADC group regular overrun configuration\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum OVRMOD_A { #[doc = "0: Preserve DR register when an overrun is detected"] PRESERVE = 0, #[doc = "1: Overwrite DR register when an overrun is detected"] OVERWRITE = 1, } impl From<OVRMOD_A> for bool { #[inline(always)] fn from(variant: OVRMOD_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `OVRMOD`"] pub type OVRMOD_R = crate::R<bool, OVRMOD_A>; impl OVRMOD_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> OVRMOD_A { match self.bits { false => OVRMOD_A::PRESERVE, true => OVRMOD_A::OVERWRITE, } } #[doc = "Checks if the value of the field is `PRESERVE`"] #[inline(always)] pub fn is_preserve(&self) -> bool { *self == OVRMOD_A::PRESERVE } #[doc = "Checks if the value of the field is `OVERWRITE`"] #[inline(always)] pub fn is_overwrite(&self) -> bool { *self == OVRMOD_A::OVERWRITE } } #[doc = "Write proxy for field `OVRMOD`"] pub struct OVRMOD_W<'a> { w: &'a mut W, } impl<'a> OVRMOD_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: OVRMOD_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Preserve DR register when an overrun is detected"] #[inline(always)] pub fn preserve(self) -> &'a mut W { self.variant(OVRMOD_A::PRESERVE) } #[doc = "Overwrite DR register when an overrun is detected"] #[inline(always)] pub fn overwrite(self) -> &'a mut W { self.variant(OVRMOD_A::OVERWRITE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "ADC group regular external trigger polarity\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum EXTEN_A { #[doc = "0: Trigger detection disabled"] DISABLED = 0, #[doc = "1: Trigger detection on the rising edge"] RISINGEDGE = 1, #[doc = "2: Trigger detection on the falling edge"] FALLINGEDGE = 2, #[doc = "3: Trigger detection on both the rising and falling edges"] BOTHEDGES = 3, } impl From<EXTEN_A> for u8 { #[inline(always)] fn from(variant: EXTEN_A) -> Self { variant as _ } } #[doc = "Reader of field `EXTEN`"] pub type EXTEN_R = crate::R<u8, EXTEN_A>; impl EXTEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EXTEN_A { match self.bits { 0 => EXTEN_A::DISABLED, 1 => EXTEN_A::RISINGEDGE, 2 => EXTEN_A::FALLINGEDGE, 3 => EXTEN_A::BOTHEDGES, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `DISABLED`"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == EXTEN_A::DISABLED } #[doc = "Checks if the value of the field is `RISINGEDGE`"] #[inline(always)] pub fn is_rising_edge(&self) -> bool { *self == EXTEN_A::RISINGEDGE } #[doc = "Checks if the value of the field is `FALLINGEDGE`"] #[inline(always)] pub fn is_falling_edge(&self) -> bool { *self == EXTEN_A::FALLINGEDGE } #[doc = "Checks if the value of the field is `BOTHEDGES`"] #[inline(always)] pub fn is_both_edges(&self) -> bool { *self == EXTEN_A::BOTHEDGES } } #[doc = "Write proxy for field `EXTEN`"] pub struct EXTEN_W<'a> { w: &'a mut W, } impl<'a> EXTEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EXTEN_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "Trigger detection disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut W { self.variant(EXTEN_A::DISABLED) } #[doc = "Trigger detection on the rising edge"] #[inline(always)] pub fn rising_edge(self) -> &'a mut W { self.variant(EXTEN_A::RISINGEDGE) } #[doc = "Trigger detection on the falling edge"] #[inline(always)] pub fn falling_edge(self) -> &'a mut W { self.variant(EXTEN_A::FALLINGEDGE) } #[doc = "Trigger detection on both the rising and falling edges"] #[inline(always)] pub fn both_edges(self) -> &'a mut W { self.variant(EXTEN_A::BOTHEDGES) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u32) & 0x03) << 10); self.w } } #[doc = "ADC group regular external trigger source\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum EXTSEL_A { #[doc = "5: Timer 4 CC4 event"] TIM4_CC4 = 5, #[doc = "7: Timer 8 TRGO event"] TIM8_TRGO = 7, #[doc = "8: Timer 8 TRGO2 event"] TIM8_TRGO2 = 8, #[doc = "12: Timer 4 TRGO event"] TIM4_TRGO = 12, #[doc = "16: HRTIM1_ADCTRG1 event"] HRTIM1_ADCTRG1 = 16, #[doc = "17: HRTIM1_ADCTRG3 event"] HRTIM1_ADCTRG3 = 17, #[doc = "18: LPTIM1_OUT event"] LPTIM1_OUT = 18, #[doc = "19: LPTIM2_OUT event"] LPTIM2_OUT = 19, #[doc = "20: LPTIM3_OUT event"] LPTIM3_OUT = 20, #[doc = "0: Timer 1 CC1 event"] TIM1_CC1 = 0, #[doc = "1: Timer 1 CC2 event"] TIM1_CC2 = 1, #[doc = "2: Timer 1 CC3 event"] TIM1_CC3 = 2, #[doc = "3: Timer 2 CC2 event"] TIM2_CC2 = 3, #[doc = "4: Timer 3 TRGO event"] TIM3_TRGO = 4, #[doc = "6: EXTI line 11"] EXTI11 = 6, #[doc = "9: Timer 1 TRGO event"] TIM1_TRGO = 9, #[doc = "10: Timer 1 TRGO2 event"] TIM1_TRGO2 = 10, #[doc = "11: Timer 2 TRGO event"] TIM2_TRGO = 11, #[doc = "13: Timer 6 TRGO event"] TIM6_TRGO = 13, #[doc = "14: Timer 15 TRGO event"] TIM15_TRGO = 14, #[doc = "15: Timer 3 CC4 event"] TIM3_CC4 = 15, } impl From<EXTSEL_A> for u8 { #[inline(always)] fn from(variant: EXTSEL_A) -> Self { variant as _ } } #[doc = "Reader of field `EXTSEL`"] pub type EXTSEL_R = crate::R<u8, EXTSEL_A>; impl EXTSEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, EXTSEL_A> { use crate::Variant::*; match self.bits { 5 => Val(EXTSEL_A::TIM4_CC4), 7 => Val(EXTSEL_A::TIM8_TRGO), 8 => Val(EXTSEL_A::TIM8_TRGO2), 12 => Val(EXTSEL_A::TIM4_TRGO), 16 => Val(EXTSEL_A::HRTIM1_ADCTRG1), 17 => Val(EXTSEL_A::HRTIM1_ADCTRG3), 18 => Val(EXTSEL_A::LPTIM1_OUT), 19 => Val(EXTSEL_A::LPTIM2_OUT), 20 => Val(EXTSEL_A::LPTIM3_OUT), 0 => Val(EXTSEL_A::TIM1_CC1), 1 => Val(EXTSEL_A::TIM1_CC2), 2 => Val(EXTSEL_A::TIM1_CC3), 3 => Val(EXTSEL_A::TIM2_CC2), 4 => Val(EXTSEL_A::TIM3_TRGO), 6 => Val(EXTSEL_A::EXTI11), 9 => Val(EXTSEL_A::TIM1_TRGO), 10 => Val(EXTSEL_A::TIM1_TRGO2), 11 => Val(EXTSEL_A::TIM2_TRGO), 13 => Val(EXTSEL_A::TIM6_TRGO), 14 => Val(EXTSEL_A::TIM15_TRGO), 15 => Val(EXTSEL_A::TIM3_CC4), i => Res(i), } } #[doc = "Checks if the value of the field is `TIM4_CC4`"] #[inline(always)] pub fn is_tim4_cc4(&self) -> bool { *self == EXTSEL_A::TIM4_CC4 } #[doc = "Checks if the value of the field is `TIM8_TRGO`"] #[inline(always)] pub fn is_tim8_trgo(&self) -> bool { *self == EXTSEL_A::TIM8_TRGO } #[doc = "Checks if the value of the field is `TIM8_TRGO2`"] #[inline(always)] pub fn is_tim8_trgo2(&self) -> bool { *self == EXTSEL_A::TIM8_TRGO2 } #[doc = "Checks if the value of the field is `TIM4_TRGO`"] #[inline(always)] pub fn is_tim4_trgo(&self) -> bool { *self == EXTSEL_A::TIM4_TRGO } #[doc = "Checks if the value of the field is `HRTIM1_ADCTRG1`"] #[inline(always)] pub fn is_hrtim1_adctrg1(&self) -> bool { *self == EXTSEL_A::HRTIM1_ADCTRG1 } #[doc = "Checks if the value of the field is `HRTIM1_ADCTRG3`"] #[inline(always)] pub fn is_hrtim1_adctrg3(&self) -> bool { *self == EXTSEL_A::HRTIM1_ADCTRG3 } #[doc = "Checks if the value of the field is `LPTIM1_OUT`"] #[inline(always)] pub fn is_lptim1_out(&self) -> bool { *self == EXTSEL_A::LPTIM1_OUT } #[doc = "Checks if the value of the field is `LPTIM2_OUT`"] #[inline(always)] pub fn is_lptim2_out(&self) -> bool { *self == EXTSEL_A::LPTIM2_OUT } #[doc = "Checks if the value of the field is `LPTIM3_OUT`"] #[inline(always)] pub fn is_lptim3_out(&self) -> bool { *self == EXTSEL_A::LPTIM3_OUT } #[doc = "Checks if the value of the field is `TIM1_CC1`"] #[inline(always)] pub fn is_tim1_cc1(&self) -> bool { *self == EXTSEL_A::TIM1_CC1 } #[doc = "Checks if the value of the field is `TIM1_CC2`"] #[inline(always)] pub fn is_tim1_cc2(&self) -> bool { *self == EXTSEL_A::TIM1_CC2 } #[doc = "Checks if the value of the field is `TIM1_CC3`"] #[inline(always)] pub fn is_tim1_cc3(&self) -> bool { *self == EXTSEL_A::TIM1_CC3 } #[doc = "Checks if the value of the field is `TIM2_CC2`"] #[inline(always)] pub fn is_tim2_cc2(&self) -> bool { *self == EXTSEL_A::TIM2_CC2 } #[doc = "Checks if the value of the field is `TIM3_TRGO`"] #[inline(always)] pub fn is_tim3_trgo(&self) -> bool { *self == EXTSEL_A::TIM3_TRGO } #[doc = "Checks if the value of the field is `EXTI11`"] #[inline(always)] pub fn is_exti11(&self) -> bool { *self == EXTSEL_A::EXTI11 } #[doc = "Checks if the value of the field is `TIM1_TRGO`"] #[inline(always)] pub fn is_tim1_trgo(&self) -> bool { *self == EXTSEL_A::TIM1_TRGO } #[doc = "Checks if the value of the field is `TIM1_TRGO2`"] #[inline(always)] pub fn is_tim1_trgo2(&self) -> bool { *self == EXTSEL_A::TIM1_TRGO2 } #[doc = "Checks if the value of the field is `TIM2_TRGO`"] #[inline(always)] pub fn is_tim2_trgo(&self) -> bool { *self == EXTSEL_A::TIM2_TRGO } #[doc = "Checks if the value of the field is `TIM6_TRGO`"] #[inline(always)] pub fn is_tim6_trgo(&self) -> bool { *self == EXTSEL_A::TIM6_TRGO } #[doc = "Checks if the value of the field is `TIM15_TRGO`"] #[inline(always)] pub fn is_tim15_trgo(&self) -> bool { *self == EXTSEL_A::TIM15_TRGO } #[doc = "Checks if the value of the field is `TIM3_CC4`"] #[inline(always)] pub fn is_tim3_cc4(&self) -> bool { *self == EXTSEL_A::TIM3_CC4 } } #[doc = "Write proxy for field `EXTSEL`"] pub struct EXTSEL_W<'a> { w: &'a mut W, } impl<'a> EXTSEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EXTSEL_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Timer 4 CC4 event"] #[inline(always)] pub fn tim4_cc4(self) -> &'a mut W { self.variant(EXTSEL_A::TIM4_CC4) } #[doc = "Timer 8 TRGO event"] #[inline(always)] pub fn tim8_trgo(self) -> &'a mut W { self.variant(EXTSEL_A::TIM8_TRGO) } #[doc = "Timer 8 TRGO2 event"] #[inline(always)] pub fn tim8_trgo2(self) -> &'a mut W { self.variant(EXTSEL_A::TIM8_TRGO2) } #[doc = "Timer 4 TRGO event"] #[inline(always)] pub fn tim4_trgo(self) -> &'a mut W { self.variant(EXTSEL_A::TIM4_TRGO) } #[doc = "HRTIM1_ADCTRG1 event"] #[inline(always)] pub fn hrtim1_adctrg1(self) -> &'a mut W { self.variant(EXTSEL_A::HRTIM1_ADCTRG1) } #[doc = "HRTIM1_ADCTRG3 event"] #[inline(always)] pub fn hrtim1_adctrg3(self) -> &'a mut W { self.variant(EXTSEL_A::HRTIM1_ADCTRG3) } #[doc = "LPTIM1_OUT event"] #[inline(always)] pub fn lptim1_out(self) -> &'a mut W { self.variant(EXTSEL_A::LPTIM1_OUT) } #[doc = "LPTIM2_OUT event"] #[inline(always)] pub fn lptim2_out(self) -> &'a mut W { self.variant(EXTSEL_A::LPTIM2_OUT) } #[doc = "LPTIM3_OUT event"] #[inline(always)] pub fn lptim3_out(self) -> &'a mut W { self.variant(EXTSEL_A::LPTIM3_OUT) } #[doc = "Timer 1 CC1 event"] #[inline(always)] pub fn tim1_cc1(self) -> &'a mut W { self.variant(EXTSEL_A::TIM1_CC1) } #[doc = "Timer 1 CC2 event"] #[inline(always)] pub fn tim1_cc2(self) -> &'a mut W { self.variant(EXTSEL_A::TIM1_CC2) } #[doc = "Timer 1 CC3 event"] #[inline(always)] pub fn tim1_cc3(self) -> &'a mut W { self.variant(EXTSEL_A::TIM1_CC3) } #[doc = "Timer 2 CC2 event"] #[inline(always)] pub fn tim2_cc2(self) -> &'a mut W { self.variant(EXTSEL_A::TIM2_CC2) } #[doc = "Timer 3 TRGO event"] #[inline(always)] pub fn tim3_trgo(self) -> &'a mut W { self.variant(EXTSEL_A::TIM3_TRGO) } #[doc = "EXTI line 11"] #[inline(always)] pub fn exti11(self) -> &'a mut W { self.variant(EXTSEL_A::EXTI11) } #[doc = "Timer 1 TRGO event"] #[inline(always)] pub fn tim1_trgo(self) -> &'a mut W { self.variant(EXTSEL_A::TIM1_TRGO) } #[doc = "Timer 1 TRGO2 event"] #[inline(always)] pub fn tim1_trgo2(self) -> &'a mut W { self.variant(EXTSEL_A::TIM1_TRGO2) } #[doc = "Timer 2 TRGO event"] #[inline(always)] pub fn tim2_trgo(self) -> &'a mut W { self.variant(EXTSEL_A::TIM2_TRGO) } #[doc = "Timer 6 TRGO event"] #[inline(always)] pub fn tim6_trgo(self) -> &'a mut W { self.variant(EXTSEL_A::TIM6_TRGO) } #[doc = "Timer 15 TRGO event"] #[inline(always)] pub fn tim15_trgo(self) -> &'a mut W { self.variant(EXTSEL_A::TIM15_TRGO) } #[doc = "Timer 3 CC4 event"] #[inline(always)] pub fn tim3_cc4(self) -> &'a mut W { self.variant(EXTSEL_A::TIM3_CC4) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 5)) | (((value as u32) & 0x1f) << 5); self.w } } #[doc = "ADC data resolution\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum RES_A { #[doc = "0: 16-bit resolution"] SIXTEENBIT = 0, #[doc = "1: 14-bit resolution"] FOURTEENBIT = 1, #[doc = "2: 12-bit resolution"] TWELVEBIT = 2, #[doc = "3: 10-bit resolution"] TENBIT = 3, #[doc = "4: 8-bit resolution"] EIGHTBIT = 4, } impl From<RES_A> for u8 { #[inline(always)] fn from(variant: RES_A) -> Self { variant as _ } } #[doc = "Reader of field `RES`"] pub type RES_R = crate::R<u8, RES_A>; impl RES_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, RES_A> { use crate::Variant::*; match self.bits { 0 => Val(RES_A::SIXTEENBIT), 1 => Val(RES_A::FOURTEENBIT), 2 => Val(RES_A::TWELVEBIT), 3 => Val(RES_A::TENBIT), 4 => Val(RES_A::EIGHTBIT), i => Res(i), } } #[doc = "Checks if the value of the field is `SIXTEENBIT`"] #[inline(always)] pub fn is_sixteen_bit(&self) -> bool { *self == RES_A::SIXTEENBIT } #[doc = "Checks if the value of the field is `FOURTEENBIT`"] #[inline(always)] pub fn is_fourteen_bit(&self) -> bool { *self == RES_A::FOURTEENBIT } #[doc = "Checks if the value of the field is `TWELVEBIT`"] #[inline(always)] pub fn is_twelve_bit(&self) -> bool { *self == RES_A::TWELVEBIT } #[doc = "Checks if the value of the field is `TENBIT`"] #[inline(always)] pub fn is_ten_bit(&self) -> bool { *self == RES_A::TENBIT } #[doc = "Checks if the value of the field is `EIGHTBIT`"] #[inline(always)] pub fn is_eight_bit(&self) -> bool { *self == RES_A::EIGHTBIT } } #[doc = "Write proxy for field `RES`"] pub struct RES_W<'a> { w: &'a mut W, } impl<'a> RES_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RES_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "16-bit resolution"] #[inline(always)] pub fn sixteen_bit(self) -> &'a mut W { self.variant(RES_A::SIXTEENBIT) } #[doc = "14-bit resolution"] #[inline(always)] pub fn fourteen_bit(self) -> &'a mut W { self.variant(RES_A::FOURTEENBIT) } #[doc = "12-bit resolution"] #[inline(always)] pub fn twelve_bit(self) -> &'a mut W { self.variant(RES_A::TWELVEBIT) } #[doc = "10-bit resolution"] #[inline(always)] pub fn ten_bit(self) -> &'a mut W { self.variant(RES_A::TENBIT) } #[doc = "8-bit resolution"] #[inline(always)] pub fn eight_bit(self) -> &'a mut W { self.variant(RES_A::EIGHTBIT) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 2)) | (((value as u32) & 0x07) << 2); self.w } } #[doc = "ADC DMA transfer enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum DMNGT_A { #[doc = "0: Store output data in DR only"] DR = 0, #[doc = "1: DMA One Shot Mode selected"] DMA_ONESHOT = 1, #[doc = "2: DFSDM mode selected"] DFSDM = 2, #[doc = "3: DMA Circular Mode selected"] DMA_CIRCULAR = 3, } impl From<DMNGT_A> for u8 { #[inline(always)] fn from(variant: DMNGT_A) -> Self { variant as _ } } #[doc = "Reader of field `DMNGT`"] pub type DMNGT_R = crate::R<u8, DMNGT_A>; impl DMNGT_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DMNGT_A { match self.bits { 0 => DMNGT_A::DR, 1 => DMNGT_A::DMA_ONESHOT, 2 => DMNGT_A::DFSDM, 3 => DMNGT_A::DMA_CIRCULAR, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `DR`"] #[inline(always)] pub fn is_dr(&self) -> bool { *self == DMNGT_A::DR } #[doc = "Checks if the value of the field is `DMA_ONESHOT`"] #[inline(always)] pub fn is_dma_one_shot(&self) -> bool { *self == DMNGT_A::DMA_ONESHOT } #[doc = "Checks if the value of the field is `DFSDM`"] #[inline(always)] pub fn is_dfsdm(&self) -> bool { *self == DMNGT_A::DFSDM } #[doc = "Checks if the value of the field is `DMA_CIRCULAR`"] #[inline(always)] pub fn is_dma_circular(&self) -> bool { *self == DMNGT_A::DMA_CIRCULAR } } #[doc = "Write proxy for field `DMNGT`"] pub struct DMNGT_W<'a> { w: &'a mut W, } impl<'a> DMNGT_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DMNGT_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "Store output data in DR only"] #[inline(always)] pub fn dr(self) -> &'a mut W { self.variant(DMNGT_A::DR) } #[doc = "DMA One Shot Mode selected"] #[inline(always)] pub fn dma_one_shot(self) -> &'a mut W { self.variant(DMNGT_A::DMA_ONESHOT) } #[doc = "DFSDM mode selected"] #[inline(always)] pub fn dfsdm(self) -> &'a mut W { self.variant(DMNGT_A::DFSDM) } #[doc = "DMA Circular Mode selected"] #[inline(always)] pub fn dma_circular(self) -> &'a mut W { self.variant(DMNGT_A::DMA_CIRCULAR) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03); self.w } } impl R { #[doc = "Bit 31 - ADC group injected contexts queue disable"] #[inline(always)] pub fn jqdis(&self) -> JQDIS_R { JQDIS_R::new(((self.bits >> 31) & 0x01) != 0) } #[doc = "Bits 26:30 - ADC analog watchdog 1 monitored channel selection"] #[inline(always)] pub fn awd1ch(&self) -> AWD1CH_R { AWD1CH_R::new(((self.bits >> 26) & 0x1f) as u8) } #[doc = "Bit 25 - ADC group injected automatic trigger mode"] #[inline(always)] pub fn jauto(&self) -> JAUTO_R { JAUTO_R::new(((self.bits >> 25) & 0x01) != 0) } #[doc = "Bit 24 - ADC analog watchdog 1 enable on scope ADC group injected"] #[inline(always)] pub fn jawd1en(&self) -> JAWD1EN_R { JAWD1EN_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 23 - ADC analog watchdog 1 enable on scope ADC group regular"] #[inline(always)] pub fn awd1en(&self) -> AWD1EN_R { AWD1EN_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 22 - ADC analog watchdog 1 monitoring a single channel or all channels"] #[inline(always)] pub fn awd1sgl(&self) -> AWD1SGL_R { AWD1SGL_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 21 - ADC group injected contexts queue mode"] #[inline(always)] pub fn jqm(&self) -> JQM_R { JQM_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 20 - ADC group injected sequencer discontinuous mode"] #[inline(always)] pub fn jdiscen(&self) -> JDISCEN_R { JDISCEN_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bits 17:19 - ADC group regular sequencer discontinuous number of ranks"] #[inline(always)] pub fn discnum(&self) -> DISCNUM_R { DISCNUM_R::new(((self.bits >> 17) & 0x07) as u8) } #[doc = "Bit 16 - ADC group regular sequencer discontinuous mode"] #[inline(always)] pub fn discen(&self) -> DISCEN_R { DISCEN_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 14 - ADC low power auto wait"] #[inline(always)] pub fn autdly(&self) -> AUTDLY_R { AUTDLY_R::new(((self.bits >> 14) & 0x01) != 0) } #[doc = "Bit 13 - ADC group regular continuous conversion mode"] #[inline(always)] pub fn cont(&self) -> CONT_R { CONT_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 12 - ADC group regular overrun configuration"] #[inline(always)] pub fn ovrmod(&self) -> OVRMOD_R { OVRMOD_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bits 10:11 - ADC group regular external trigger polarity"] #[inline(always)] pub fn exten(&self) -> EXTEN_R { EXTEN_R::new(((self.bits >> 10) & 0x03) as u8) } #[doc = "Bits 5:9 - ADC group regular external trigger source"] #[inline(always)] pub fn extsel(&self) -> EXTSEL_R { EXTSEL_R::new(((self.bits >> 5) & 0x1f) as u8) } #[doc = "Bits 2:4 - ADC data resolution"] #[inline(always)] pub fn res(&self) -> RES_R { RES_R::new(((self.bits >> 2) & 0x07) as u8) } #[doc = "Bits 0:1 - ADC DMA transfer enable"] #[inline(always)] pub fn dmngt(&self) -> DMNGT_R { DMNGT_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bit 31 - ADC group injected contexts queue disable"] #[inline(always)] pub fn jqdis(&mut self) -> JQDIS_W { JQDIS_W { w: self } } #[doc = "Bits 26:30 - ADC analog watchdog 1 monitored channel selection"] #[inline(always)] pub fn awd1ch(&mut self) -> AWD1CH_W { AWD1CH_W { w: self } } #[doc = "Bit 25 - ADC group injected automatic trigger mode"] #[inline(always)] pub fn jauto(&mut self) -> JAUTO_W { JAUTO_W { w: self } } #[doc = "Bit 24 - ADC analog watchdog 1 enable on scope ADC group injected"] #[inline(always)] pub fn jawd1en(&mut self) -> JAWD1EN_W { JAWD1EN_W { w: self } } #[doc = "Bit 23 - ADC analog watchdog 1 enable on scope ADC group regular"] #[inline(always)] pub fn awd1en(&mut self) -> AWD1EN_W { AWD1EN_W { w: self } } #[doc = "Bit 22 - ADC analog watchdog 1 monitoring a single channel or all channels"] #[inline(always)] pub fn awd1sgl(&mut self) -> AWD1SGL_W { AWD1SGL_W { w: self } } #[doc = "Bit 21 - ADC group injected contexts queue mode"] #[inline(always)] pub fn jqm(&mut self) -> JQM_W { JQM_W { w: self } } #[doc = "Bit 20 - ADC group injected sequencer discontinuous mode"] #[inline(always)] pub fn jdiscen(&mut self) -> JDISCEN_W { JDISCEN_W { w: self } } #[doc = "Bits 17:19 - ADC group regular sequencer discontinuous number of ranks"] #[inline(always)] pub fn discnum(&mut self) -> DISCNUM_W { DISCNUM_W { w: self } } #[doc = "Bit 16 - ADC group regular sequencer discontinuous mode"] #[inline(always)] pub fn discen(&mut self) -> DISCEN_W { DISCEN_W { w: self } } #[doc = "Bit 14 - ADC low power auto wait"] #[inline(always)] pub fn autdly(&mut self) -> AUTDLY_W { AUTDLY_W { w: self } } #[doc = "Bit 13 - ADC group regular continuous conversion mode"] #[inline(always)] pub fn cont(&mut self) -> CONT_W { CONT_W { w: self } } #[doc = "Bit 12 - ADC group regular overrun configuration"] #[inline(always)] pub fn ovrmod(&mut self) -> OVRMOD_W { OVRMOD_W { w: self } } #[doc = "Bits 10:11 - ADC group regular external trigger polarity"] #[inline(always)] pub fn exten(&mut self) -> EXTEN_W { EXTEN_W { w: self } } #[doc = "Bits 5:9 - ADC group regular external trigger source"] #[inline(always)] pub fn extsel(&mut self) -> EXTSEL_W { EXTSEL_W { w: self } } #[doc = "Bits 2:4 - ADC data resolution"] #[inline(always)] pub fn res(&mut self) -> RES_W { RES_W { w: self } } #[doc = "Bits 0:1 - ADC DMA transfer enable"] #[inline(always)] pub fn dmngt(&mut self) -> DMNGT_W { DMNGT_W { w: self } } }
use crate::{SMResult, AST}; use sm_algorithm::prime_sum_i; pub fn floor(expr: &AST) -> SMResult<AST> { match expr { _ => unimplemented!(), } } pub fn ceiling(expr: &AST) -> SMResult<AST> { match expr { _ => unimplemented!(), } } pub fn round(expr: &AST) -> SMResult<AST> { match expr { _ => unimplemented!(), } } pub fn integer_part(expr: &AST) -> SMResult<AST> { match expr { _ => unimplemented!(), } } pub fn chop(expr: &AST) -> SMResult<AST> { match expr { _ => unimplemented!(), } } pub fn factor_integer(expr: &AST) -> SMResult<AST> { match expr { _ => unimplemented!(), } } pub fn prime_sum(expr: &AST) -> SMResult<AST> { match expr { AST::Integer(n) => Ok(AST::Integer(prime_sum_i(n)?)), AST::Decimal(n) => Ok(AST::Decimal(n.clone())), _ => unimplemented!(), } }
mod player; use rendering::{Color, Renderer}; pub struct Game { glow: f64, } impl Game { pub fn new() -> Self { Game { glow: 0.0, } } pub fn update(&mut self, dt: f64) { self.glow += dt / 1000.0; self.glow %= 1.0; } pub fn render(&self, renderer: &Renderer) { renderer.clear(Color { r: self.glow, g: self.glow, b: self.glow, }); } }
use argh::FromArgs; #[macro_use] extern crate lalrpop_util; #[macro_use] extern crate lazy_static; pub mod errors; pub mod flat; pub mod flatten; pub mod ir; lalrpop_mod!(pub grammar); pub mod lexer; pub mod parser; pub mod raw; pub mod source_file; pub mod span; pub mod token; pub mod typeshape; use flat::{add_library, resolve_library, validate_latest_library}; use flatten::flatten_files; use parser::parse_files; use raw::validate_files; use std::fs; #[derive(FromArgs)] /// The FIDL compiler, rust edition struct Args { #[argh(option)] /// a comma separated list of fidl files corresponding to a single library files: Vec<String>, #[argh(option)] /// path to write JSON IR output json: Option<String>, } fn main() { let args: Args = argh::from_env(); match compile(args.files) { Ok(ir) => match args.json { Some(path) => fs::write(path, serde_json::to_string_pretty(&ir).unwrap()) .expect("couldn't write to out file"), None => println!("no errors found!"), }, Err(errs) => errs.print_errors(), }; } fn compile(files: Vec<String>) -> Result<ir::Library, errors::ErrorCx> { let mut libs = flat::Libraries::default(); let mut errors = errors::ErrorCx::default(); let mut srcs = source_file::FileMap::new(); for lib_files in files { let filenames: Vec<String> = lib_files.split(',').map(str::to_string).collect(); let raw_asts = parse_files(&mut srcs, &mut errors, filenames); if raw_asts.is_empty() { return Err(errors); } validate_files(&srcs, &mut errors, &raw_asts); let unresolved_lib = flatten_files(&srcs, &mut errors, raw_asts); let resolved_lib = match resolve_library(&srcs, unresolved_lib, &libs) { Ok(lib) => lib, Err(errs) => { errors.extend(errs); return Err(errors); } }; add_library(&mut errors, &mut libs, resolved_lib); validate_latest_library(&srcs, &mut errors, &libs); } if errors.is_empty() { Ok(libs.to_ir()) } else { Err(errors) } }
csci114_20171030code1.Savings csci114_20171030code1.BankAccount csci114_20171030code1.Csci114_20171030code1 csci114_20171030code1.StudentSavings
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_exception::ErrorCode; use common_exception::Result; use common_meta_app::principal::UserSetting; use common_meta_kvapi::kvapi; use common_meta_kvapi::kvapi::UpsertKVReq; use common_meta_types::IntoSeqV; use common_meta_types::MatchSeq; use common_meta_types::MatchSeqExt; use common_meta_types::MetaError; use common_meta_types::Operation; use common_meta_types::SeqV; use crate::setting::SettingApi; static USER_SETTING_API_KEY_PREFIX: &str = "__fd_settings"; pub struct SettingMgr { kv_api: Arc<dyn kvapi::KVApi<Error = MetaError>>, setting_prefix: String, } impl SettingMgr { #[allow(dead_code)] pub fn create(kv_api: Arc<dyn kvapi::KVApi<Error = MetaError>>, tenant: &str) -> Result<Self> { Ok(SettingMgr { kv_api, setting_prefix: format!("{}/{}", USER_SETTING_API_KEY_PREFIX, tenant), }) } } #[async_trait::async_trait] impl SettingApi for SettingMgr { async fn set_setting(&self, setting: UserSetting) -> Result<u64> { // Upsert. let seq = MatchSeq::GE(0); let val = Operation::Update(serde_json::to_vec(&setting)?); let key = format!("{}/{}", self.setting_prefix, setting.name); let upsert = self .kv_api .upsert_kv(UpsertKVReq::new(&key, seq, val, None)); let res = upsert.await?.added_or_else(|v| v); match res { Ok(added) => Ok(added.seq), Err(existing) => Ok(existing.seq), } } async fn get_settings(&self) -> Result<Vec<UserSetting>> { let values = self.kv_api.prefix_list_kv(&self.setting_prefix).await?; let mut settings = Vec::with_capacity(values.len()); for (_, value) in values { let setting = serde_json::from_slice::<UserSetting>(&value.data)?; settings.push(setting); } Ok(settings) } async fn get_setting(&self, name: &str, seq: MatchSeq) -> Result<SeqV<UserSetting>> { let key = format!("{}/{}", self.setting_prefix, name); let kv_api = self.kv_api.clone(); let get_kv = async move { kv_api.get_kv(&key).await }; let res = get_kv.await?; let seq_value = res.ok_or_else(|| ErrorCode::UnknownVariable(format!("Unknown setting {}", name)))?; match seq.match_seq(&seq_value) { Ok(_) => Ok(seq_value.into_seqv()?), Err(_) => Err(ErrorCode::UnknownVariable(format!( "Unknown setting {}", name ))), } } async fn drop_setting(&self, name: &str, seq: MatchSeq) -> Result<()> { let key = format!("{}/{}", self.setting_prefix, name); let kv_api = self.kv_api.clone(); let upsert_kv = async move { kv_api .upsert_kv(UpsertKVReq::new(&key, seq, Operation::Delete, None)) .await }; let res = upsert_kv.await?; if res.prev.is_some() && res.result.is_none() { Ok(()) } else { Err(ErrorCode::UnknownVariable(format!( "Unknown setting {}", name ))) } } }
use std::io::Read; fn main() { let mut input = String::new(); std::io::stdin().read_to_string(&mut input).unwrap(); let mut black = std::collections::HashSet::new(); for line in input.lines() { let (x, y, _) = line.chars().fold((0, 0, None), |(x, y, previous), character| { match character { 'n' => (x, y, Some('n')), 's' => (x, y, Some('s')), 'e' => match previous { Some('n') => (x, y + 1, None), Some('s') => (x - 1, y, None), Some(_) => (x, y, None), None => (x - 1, y + 1, None), }, 'w' => match previous { Some('n') => (x + 1, y, None), Some('s') => (x, y - 1, None), Some(_) => (x, y, None), None => (x + 1, y - 1, None), }, _ => (x, y, previous) } }); if black.contains(&(x, y)) { black.remove(&(x, y)); } else { black.insert((x, y)); } } println!("{}", black.len()); }
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use alloc::sync::Arc; use alloc::string::String; use alloc::string::ToString; use core::fmt; use super::super::fs::host::hostinodeop::*; use super::super::qlib::common::*; use super::super::qlib::linux_def::*; use super::super::task::*; use super::super::qlib::addr::*; //use super::super::task::*; use super::*; use super::super::qlib::range::*; use super::super::qlib::mem::areaset::*; use super::mm::*; use super::arch::*; // map32Start/End are the bounds to which MAP_32BIT mappings are constrained, // and are equivalent to Linux's MAP32_BASE and MAP32_MAX respectively. pub const MAP32_START: u64 = 0x40000000; pub const MAP32_END: u64 = 0x80000000; #[derive(Clone, Default, Debug)] pub struct FindAvailableOpts { // These fields are equivalent to those in MMapOpts, except that: // // - Addr must be page-aligned. // // - Unmap allows existing guard pages in the returned range. pub Addr: u64, pub Fixed: bool, pub Unmap: bool, pub Map32Bit: bool, pub Kernel: bool, } impl MemoryManager { pub fn FindLowestAvailableLocked(&self, length: u64, alignment: u64, bounds: &Range) -> Result<u64> { let mapping = self.mapping.lock(); let mut gap = mapping.vmas.LowerBoundGap(bounds.Start()); while gap.Ok() && gap.Range().Start() < bounds.End() { let gr = gap.Range().Intersect(bounds); if gr.Len() > length { // Can we shift up to match the alignment? let offset = gr.Start() % alignment; if offset != 0 { if gr.Len() >= length + alignment - offset { return Ok(gr.Start() + (alignment - offset)) } } // Either aligned perfectly, or can't align it. return Ok(gr.Start()) } let tmp = gap.NextGap(); gap = tmp; } return Err(Error::SysError(SysErr::ENOMEM)); } pub fn FindHighestAvailableLocked(&self, length: u64, alignment: u64, bounds: &Range) -> Result<u64> { let mapping = self.mapping.lock(); let mut gap = mapping.vmas.UpperBoundGap(bounds.End()); while gap.Ok() && gap.Range().End() > bounds.Start() { let gr = gap.Range().Intersect(bounds); if gr.Len() > length { // Can we shift up to match the alignment? let start = gr.End() - length; let offset = gr.Start() % alignment; if offset != 0 { if gr.Start() >= start - offset { return Ok(start - offset) } } // Either aligned perfectly, or can't align it. return Ok(start) } let tmp = gap.NextGap(); gap = tmp; } return Err(Error::SysError(SysErr::ENOMEM)); } // getVMAsLocked ensures that vmas exist for all addresses in ar, and support // access of type (at, ignorePermissions). It returns: // // - An iterator to the vma containing ar.Start. If no vma contains ar.Start, // the iterator is unspecified. // // - An iterator to the gap after the last vma containing an address in ar. If // vmas exist for no addresses in ar, the iterator is to a gap that begins // before ar.Start. // // - An error that is non-nil if vmas exist for only a subset of ar. // // Preconditions: mm.mappingMu must be locked for reading; it may be // temporarily unlocked. ar.Length() != 0. pub fn GetVMAsLocked(&self, r: &Range, at: &AccessType, ignorePermissions: bool) -> (AreaSeg<VMA>, AreaGap<VMA>, Result<()>) { let mapping = self.mapping.lock(); let (mut vbegin, mut vgap) = mapping.vmas.Find(r.Start()); if !vbegin.Ok() { vbegin = vgap.NextSeg() } else { vgap = vbegin.PrevGap(); } let mut addr = r.Start(); let mut vseg = vbegin.clone(); while vseg.Ok() { // Loop invariants: vgap = vseg.PrevGap(); addr < vseg.End(). if addr < vseg.Range().Start() { return (vbegin, vgap, Err(Error::SysError(SysErr::EFAULT))); } { let vseglock = vseg.lock(); let vma = vseglock.Value(); let mut perms = vma.effectivePerms; if ignorePermissions { perms = vma.maxPerms; } if !perms.SupersetOf(at) { return (vbegin, vgap, Err(Error::SysError(SysErr::EPERM))); } } addr = vseg.Range().End(); vgap = vseg.NextGap(); if addr >= r.End() { return (vbegin, vgap, Ok(())) } vseg = vseg.NextSeg(); } // Ran out of vmas before ar.End. return (vbegin, vgap, Err(Error::SysError(SysErr::EFAULT))); } pub fn FindAvailableLocked(&self, length: u64, opts: &mut FindAvailableOpts) -> Result<u64> { if opts.Fixed { opts.Map32Bit = false; } let mut allowedRange = if opts.Kernel { Range::New(0, !0) } else { self.ApplicationAddrRange() }; if opts.Map32Bit { allowedRange = allowedRange.Intersect(&Range::New(MAP32_START, MAP32_END - MAP32_START)); } // Does the provided suggestion work? match Addr(opts.Addr).ToRange(length) { Ok(r) => { if allowedRange.IsSupersetOf(&r) { if opts.Unmap { return Ok(r.Start()); } let vgap = self.mapping.lock().vmas.FindGap(r.Start()); if vgap.Ok() && vgap.AvailableRange().IsSupersetOf(&r) { return Ok(r.Start()) } } } Err(_) => () } // Fixed mappings accept only the requested address. if opts.Fixed { return Err(Error::SysError(SysErr::ENOMEM)) } // Prefer hugepage alignment if a hugepage or more is requested. let mut alignment = MemoryDef::PAGE_SIZE; if length > MemoryDef::HUGE_PAGE_SIZE { alignment = MemoryDef::HUGE_PAGE_SIZE; } if opts.Map32Bit { return self.FindLowestAvailableLocked(length, alignment, &allowedRange); } let layout = *self.layout.lock(); if layout.DefaultDirection == MMAP_BOTTOM_UP { return self.FindLowestAvailableLocked(length, alignment, &Range::New(layout.BottomUpBase, layout.MaxAddr - layout.BottomUpBase)); } return self.FindHighestAvailableLocked(length, alignment, &Range::New(layout.MinAddr, layout.TopDownBase - layout.MinAddr)); } pub fn CreateVMAlocked(&self, _task: &Task, opts: &MMapOpts) -> Result<(AreaSeg<VMA>, Range)> { if opts.MaxPerms != opts.MaxPerms.Effective() { panic!("Non-effective MaxPerms {:?} cannot be enforced", opts.MaxPerms); } // Find a useable range. let mut findopts = FindAvailableOpts { Addr: opts.Addr, Fixed: opts.Fixed, Unmap: opts.Unmap, Map32Bit: opts.Map32Bit, Kernel: opts.Kernel, }; let addr = self.FindAvailableLocked(opts.Length, &mut findopts)?; let ar = Range::New(addr, opts.Length); // todo: Check against RLIMIT_AS. /*let mut newUsageAS = self.usageAS + opts.Length; if opts.Unmap { newUsageAS -= self.vmas.SpanRange(&ar); }*/ // Remove overwritten mappings. This ordering is consistent with Linux: // compare Linux's mm/mmap.c:mmap_region() => do_munmap(), // file->f_op->mmap(). if opts.Unmap { self.RemoveVMAsLocked(&ar)?; } let mut mapping = self.mapping.lock(); let gap = mapping.vmas.FindGap(ar.Start()); if opts.Mappable.is_some() { let mappable = opts.Mappable.clone().unwrap(); mappable.AddMapping(self, &ar, opts.Offset, !opts.Private && opts.MaxPerms.Write())?; } let vma = VMA { mappable: opts.Mappable.clone(), offset: opts.Offset, fixed: opts.Fixed, realPerms: opts.Perms, effectivePerms: opts.Perms.Effective(), maxPerms: opts.MaxPerms, private: opts.Private, growsDown: opts.GrowsDown, dontfork: false, mlockMode: opts.MLockMode, kernel: opts.Kernel, hint: opts.Hint.to_string(), id: opts.Mapping.clone(), numaPolicy: 0, numaNodemask: 0, }; mapping.usageAS += opts.Length; let vseg = mapping.vmas.Insert(&gap, &ar, vma); let nextvseg = vseg.NextSeg(); assert!(vseg.Range().End() <= nextvseg.Range().Start(), "vseg end < vseg.next.start"); return Ok((vseg, ar)) } //find free seg with enough len pub fn FindAvailableSeg(&self, _task: &Task, offset: u64, len: u64) -> Result<u64> { let ml = self.MappingLock(); let _ml = ml.write(); let mut findopts = FindAvailableOpts { Addr: offset, Fixed: false, Unmap: false, Map32Bit: false, Kernel: false, }; let addr = self.FindAvailableLocked(len, &mut findopts)?; return Ok(addr); } } #[derive(Clone, Default)] pub struct VMA { pub mappable: Option<HostInodeOp>, pub offset: u64, //file offset when the mappable is not null, phyaddr for other pub fixed: bool, // realPerms are the memory permissions on this vma, as defined by the // application. pub realPerms: AccessType, // effectivePerms are the memory permissions on this vma which are // actually used to control access. // // Invariant: effectivePerms == realPerms.Effective(). pub effectivePerms: AccessType, // maxPerms limits the set of permissions that may ever apply to this // memory pub maxPerms: AccessType, // private is true if this is a MAP_PRIVATE mapping, such that writes to // the mapping are propagated to a copy. pub private: bool, // growsDown is true if the mapping may be automatically extended downward // under certain conditions. If growsDown is true, mappable must be nil. // // There is currently no corresponding growsUp flag; in Linux, the only // architectures that can have VM_GROWSUP mappings are ia64, parisc, and // metag, none of which we currently support. pub growsDown: bool, // dontfork is the MADV_DONTFORK setting for this vma configured by madvise(). pub dontfork: bool, pub mlockMode: MLockMode, pub kernel: bool, pub hint: String, pub id: Option<Arc<Mapping>>, // numaPolicy is the NUMA policy for this vma set by mbind(). pub numaPolicy: i32, // numaNodemask is the NUMA nodemask for this vma set by mbind(). pub numaNodemask: u64, } impl fmt::Debug for VMA { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("VMA") .field("offset", &self.offset) .field("realPerms", &self.realPerms) .field("effectivePerms", &self.effectivePerms) .field("maxPerms", &self.maxPerms) .field("private", &self.private) .field("growsDown", &self.growsDown) .field("kernel", &self.kernel) .field("hint", &self.hint) .finish() } } impl VMA { pub fn Copy(&self) -> Self { let copy = VMA { mappable: self.mappable.clone(), offset: self.offset, fixed: self.fixed, realPerms: self.realPerms, effectivePerms: self.effectivePerms, maxPerms: self.maxPerms, private: self.private, growsDown: self.growsDown, dontfork: self.dontfork, mlockMode: self.mlockMode, kernel: self.kernel, hint: self.hint.to_string(), id: self.id.clone(), numaPolicy: 0, numaNodemask: 0, }; return copy } // canWriteMappableLocked returns true if it is possible for vma.mappable to be // written to via this vma, i.e. if it is possible that // vma.mappable.Translate(at.Write=true) may be called as a result of this vma. // This includes via I/O with usermem.IOOpts.IgnorePermissions = true, such as // PTRACE_POKEDATA. // // canWriteMappableLocked is equivalent to Linux's VM_SHARED. pub fn CanWriteMappableLocked(&self) -> bool { !self.private && self.maxPerms.Write() } } impl AreaSeg<VMA> { //virtual address to mappable offset pub fn MappableOffsetAt(&self, vaddr: u64) -> u64 { let entry = self.lock(); let vma = entry.Value(); let vstart = entry.range.Start(); return vma.offset + (vaddr - vstart); } //virtual address range to mappable range pub fn MappableRangeOf(&self, r: &Range) -> Range { return Range::New(self.MappableOffsetAt(r.Start()), r.Len()) } pub fn MappableRange(&self) -> Range { return self.MappableRangeOf(&self.Range()) } //mappable offset to virtual address pub fn AddrRangeAt(&self, offset: u64) -> u64 { let entry = self.lock(); let vma = entry.Value(); let vstart = entry.range.Start(); return vstart + (offset - vma.offset); } //mappable range to virtual range pub fn AddrRangeOf(&self, r: &Range) -> Range { let start = self.AddrRangeAt(r.Start()); return Range::New(start, r.Len()) } //find first area which range.end is large than addr pub fn SeekNextLowerBound(&self, addr: u64) -> AreaSeg<VMA> { let mut seg = self.clone(); while seg.Ok() && addr >= seg.Range().End() { seg = seg.NextSeg(); } return seg; } } pub const GUARD_BYTES: u64 = 256 * MemoryDef::PAGE_SIZE; impl AreaGap<VMA> { // availableRange returns the subset of vgap.Range() in which new vmas may be // created without MMapOpts.Unmap == true. pub fn AvailableRange(&self) -> Range { let r = self.Range(); let next = self.NextSeg(); //no next if !next.Ok() || !next.lock().Value().growsDown { return r } // Exclude guard pages. if r.Len() < GUARD_BYTES { return Range::New(r.Start(), 0); } return Range::New(r.Start(), r.Len() - GUARD_BYTES) } } pub fn MinKey() -> u64 { return 0; } pub fn MaxKey() -> u64 { return !0; } impl AreaValue for VMA { fn Merge(&self, r1: &Range, _r2: &Range, vma2: &VMA) -> Option<VMA> { let vma1 = self; if vma1.mappable.is_some() && vma1.offset + r1.Len() != vma2.offset { return None; } if vma1.mappable != vma2.mappable || vma1.realPerms != vma2.realPerms || vma1.maxPerms != vma2.maxPerms || vma1.effectivePerms != vma2.effectivePerms || vma1.private != vma2.private || vma1.growsDown != vma2.growsDown || vma1.dontfork != vma2.dontfork || vma1.mlockMode != vma2.mlockMode || vma1.kernel != vma2.kernel || vma1.numaPolicy != vma2.numaPolicy || vma1.numaNodemask != vma2.numaNodemask || vma1.hint != vma2.hint { return None; } return Some(vma1.Copy()) } fn Split(&self, r: &Range, split: u64) -> (VMA, VMA) { let v = self; let mut v2 = v.clone(); v2.offset += split - r.Start(); return (v.clone(), v2) } }
// Eqを実装する。 #[derive(Eq, PartialEq)] struct A(i32); // PartialOrdを実装するためにPartialEqが必要 #[derive(PartialEq, PartialOrd)] struct B(f32); // Copyを実装するためにCloneを実装する。 #[derive(Copy, Clone)] struct C; #[derive(Clone)] struct D; #[derive(Debug)] struct E; #[derive(Default)] struct F; // スレッドライブラリをインポートする。 use std::thread; // Rcライブラリをインポートする。(シングルスレッド用) use std::rc::Rc; // Arcライブラリをインポートする。(マルチスレッド用) use std::sync::{Arc, Mutex}; use std::sync::mpsc; /** * メイン関数 */ fn main() { println!("Hello, world!"); // 文字列の例 let s1: String = String::from("Hello, world!"); let s2: &str = &s1; let s3: String = s2.to_string(); // タプルの例 let mut t = (1, "2"); t.0 = 2; t.1 = "3"; // 配列の例 let mut a: [i32; 3] = [0, 1, 2]; let b: [i32; 3] = [0; 3]; a[1] = b[1]; a[2] = b[2]; print!("{:?}", &a[1..3]); // ユーザー定義型変数の例 struct Person { name: String, age: u32, } // Person型の変数を初期化する。 let p = Person { name: String::from("John"), age: 8, }; // 列挙型変数の例 enum Event { Quit, KeyDown(u8), MouseDown { x: i32, y: i32 }, } // 変数を用意する。 let e1 = Event::Quit; let e2 = Event::MouseDown { x: 10, y: 10 }; // 以下頻出するライブラリの型の例 // Optionの例(データの存在有無を表す) pub enum Option<T> { None, Some(T) } // ベクタ型の例 let v1 = vec![1, 2, 3, 4, 5]; // 0を5つ埋めて初期化する。 let v2 = vec![0; 5]; print!("{}", v1[0]); // すべての要素を出力させる。 for element in &v1 { print!("{}", element); } // Boxの例 let byte_array = [b'h', b'e', b'l', b'l', b'o']; // print関数を呼ぶ print(Box::new(byte_array)); // 以下、matchの例 enum Color { Red, Blue, Green, } let c = Color::Red; // match文 match c { Color::Red => println!("Red"), Color::Blue => println!("Blue"), Color::Green => println!("Green"), } // Iteratorの例 // Iter型の変数を用意する。 let it = Iter { current: 0, max: 10, }; // ループ文 for num in it { println!("{}", num); } // プログラム外のリソースにアクセスするマクロ println!("definded in file: {}", file!()); println!("definded on line: {}", line!()); println!("is test: {}", cfg!(unix)); println!("CARGO_HOME: {}", env!("CARGO_HOME")); // アサーション用のマクロ (cargo run --releaseで実行しないとエラーになる。) assert!(true); debug_assert!(false); assert_eq!(1, 1); debug_assert_eq!(1, 1); assert_ne!(1, 0); debug_assert_ne!(1, 0); // Aは、一致比較可能 println!("{:?}", A(0) == A(1)); // Bは、大小比較可能 println!("{:?}", B(1.0) > B(0.0)); // Cは、ムーブではなくコピーされる let c0 = c; let _c1 = c0; // let _c2 = c0; // Dは、clone可能 let d0 = D; let _d1 = d0.clone(); // Eは、デバックプリント可能 println!("{:?}", E); // Fは、default可能 let _f = F::default(); // ジェネリクスの実装例 let t1 = make_tuple(1, 2); let t2 = make_tuple("Hello", "World!"); let t3 = make_tuple(vec![1, 2, 3], vec![4, 5, 6]); let t4 = make_tuple(3, "years old "); // Droppable型の変数を定義する。 { let d = Droppable; } println!("The Droppable should be released at the end of block."); // 以下、スレッドプログラミングの例 // Vec型の変数を用意する。 let mut handles = Vec::new(); // vec型の変数を用意する。 let mut data = Arc::new(Mutex::new(vec![1; 10])); // 10回ループする。 for x in 0..10 { let data_ref = data.clone(); // スレッドを10個生成する。 handles.push(thread::spawn(move || { // lockを使ってdataへの可変参照を得る(排他的制御のため) let mut data = data_ref.lock().unwrap(); data[x] += 1; })); } // 各スレッドの終了を待つ for handle in handles { let _ = handle.join(); } dbg!(data); // メッセージパッシング // 各種変数を用意する。 let mut handles = Vec::new(); let mut data = vec![1; 10]; let mut snd_channels = Vec::new(); let mut rcv_channels = Vec::new(); // 10回ループする。 for _ in 0..10 { // mainから各スレッドへの通信用のチャンネル変数 let (snd_tx, snd_rx) = mpsc::channel(); // 各スレッドからmainへの通信用のチャンネル変数 let (rcv_tx, rcv_rx) = mpsc::channel(); snd_channels.push(snd_tx); rcv_channels.push(rcv_tx); // スレッドを10個生成する。 handles.push(thread::spawn(move || { // lockを使ってdataへの可変参照を得る(排他的制御のため) let mut data = snd_rx.recv().unwrap(); data += 1; let _ = rcv_tx.send(data); })); } // 各スレッドにdataの値を送信する。 for x in 0..10 { let _ = snd_channels[x].send(data[x]); } // 各スレッドからの結果をdataに格納する。 for x in 0..10 { data[x] = rcv_channels[x].recv().unwrap(); } // 各スレッドの終了を待つ for handle in handles { let _ = handle.join(); } dbg!(data); } struct Iter { current: usize, max: usize, } struct Droppable; /** * print関数 */ fn print (s: Box<[u8]>) { println!("{:?}", s); } /** * ジェネリクス用の関数 */ fn make_tuple<T, S> (t:T, s:S) -> (T, S) { (t, s) } /** * Iteratorトレイトを適用させる。 */ impl Iterator for Iter { // 出力する型の紐付けをする。 type Item = usize; // next()メソッドを実装する。 fn next(&mut self) -> Option<usize> { self.current += 1; // 条件分岐 if self.current - 1 < self.max { Some(self.current - 1) } else { None } } } /** * Dropトレイトを適用させる。 */ impl Drop for Droppable { fn drop(&mut self) { println!("Resource will be released!"); } } /* // Resultの例(処理結果を表す変数) fn result() { pub enum Result<i32, String> { Ok(i32), Err(String), } let result: Result<i32, String> = Ok(200); // パターンマッチの例 match result { Ok(code) => print!("code: {}", code), Err(err) => print!("err: {}", err), }; if let Ok(code) = result { print!("code: {}", code); }; // unwrap_or()の例 print!("code: {}", result.unwrap_or(-1)); let result: Result<i32, String> = Err("error".to_string()); print!("code: {}", result.unwrap_or(-1)); let result: Result<i32, String> = Ok(200); let next_result = result.and_then(func); let result: Result<i32, String> = Err("error".to_string()); let next_result = result.and_then(func); } // and_then()の例 fn func (code: i32) -> Result<i32, String> { print!("code: {}", code); Ok(100); } */
use super::{ PrimaryTableIndex, ScopeName, SecondaryTableName, Table, TableName, }; use crate::AccountName; use core::marker::PhantomData; /// TODO docs #[derive(Copy, Clone, Debug)] pub struct SecondaryTableIndex<K, T> where T: Table, { /// TODO docs pub code: AccountName, /// TODO docs pub scope: ScopeName, /// TODO docs pub table: SecondaryTableName, /// TODO docs _data: PhantomData<(K, T)>, } impl<K, T> SecondaryTableIndex<K, T> where T: Table, { /// TODO docs #[inline] pub fn new<C, S, N>(code: C, scope: S, name: N, index: usize) -> Self where C: Into<AccountName>, S: Into<ScopeName>, N: Into<TableName>, { Self { code: code.into(), scope: scope.into(), table: SecondaryTableName::new(name.into(), index), _data: PhantomData, } } /// TODO docs #[must_use] pub fn primary_index(&self) -> PrimaryTableIndex<T> { PrimaryTableIndex::new(self.code, self.scope) } }
pub fn find_center(edges: Vec<Vec<i32>>) -> i32 { use std::collections::HashMap; let mut count: HashMap<i32, i32> = HashMap::new(); for edge in edges { for val in edge { *count.entry(val).or_insert(0) += 1; } } if let Some(kvp) = count.iter().max_by(|a, b| a.1.cmp(b.1)) { *kvp.0 } else { 0 } }
extern crate ncurses; extern crate rand; use ncurses::*; use std::*; pub const COLOR_GRAY: i16 = 8; fn main() { let ans: [i16; 4] = [ (rand::random::<u16>() % 6 + 1) as i16, (rand::random::<u16>() % 6 + 1) as i16, (rand::random::<u16>() % 6 + 1) as i16, (rand::random::<u16>() % 6 + 1) as i16, ]; let colors: [&'static str; 7] = ["", "Red", "Green", "Yellow", "Blue", "Magenta", "Cyan"]; // TODO options let num_attempts: u8 = 10; // window setup initscr(); if has_colors() == false { endwin(); println!("Your terminal does not support color."); process::exit(1); } start_color(); init_pair(COLOR_BLACK, COLOR_BLACK, COLOR_BLACK); init_pair(COLOR_RED, COLOR_RED, COLOR_BLACK); init_pair(COLOR_GREEN, COLOR_GREEN, COLOR_BLACK); init_pair(COLOR_YELLOW, COLOR_YELLOW, COLOR_BLACK); init_pair(COLOR_BLUE, COLOR_BLUE, COLOR_BLACK); init_pair(COLOR_MAGENTA, COLOR_MAGENTA, COLOR_BLACK); init_pair(COLOR_CYAN, COLOR_CYAN, COLOR_BLACK); init_pair(COLOR_WHITE, COLOR_WHITE, COLOR_WHITE); init_color(COLOR_GRAY, 200, 200, 200); init_pair(COLOR_GRAY, COLOR_GRAY, COLOR_BLACK); cbreak(); noecho(); keypad(stdscr(), true); bkgd(COLOR_PAIR(COLOR_BLACK)); // game setup let mut game: vec::Vec<vec::Vec<i16>> = vec::Vec::new(); game.push(vec![ COLOR_RED, COLOR_GREEN, COLOR_YELLOW, COLOR_BLUE, COLOR_MAGENTA, COLOR_CYAN, ]); /* [i16; 4]; num_attempts]; */ for _ in 0..num_attempts as usize { game.push(vec![0i16; 4]); } let mut selector1: [i32; 2] = [4; 2]; for i in 1..7 { print_color_block(selector1[0], selector1[1], i); selector1[0] += 3; } selector1[0] = 4; selector1[1] = 7; mvaddch(selector1[0], selector1[1], '*' as chtype); for i in 0..23 { mvaddch(i, 10, ACS_VLINE()); } let mut selector2: [i32; 2] = [2, 14]; for i in (selector2[0]..selector2[0] + 2 * num_attempts as i32).step_by(2) { for j in selector2[1]..selector2[1] + 16 { if (j - selector2[1]) % 4 != 0 { mvaddch(i, j, '_' as chtype); } } } selector2[0] = 3; selector2[1] = 16; mvaddch(selector2[0], selector2[1], '*' as chtype); wmove(stdscr(), 23, 79); let mut attempt: u8 = 1; // game loop loop { let inp: i32 = getch(); // clear message wmove(stdscr(), 23, 0); clrtoeol(); // handle input if inp == 'q' as i32 { break; } #[allow(unreachable_code)] { match inp { 259 => { // arrow up mvaddch(selector1[0], selector1[1], ' ' as chtype); selector1[0] -= 3; if selector1[0] < 4 { selector1[0] = 19; } mvaddch(selector1[0], selector1[1], '*' as chtype); }, 258 => { // arrow down mvaddch(selector1[0], selector1[1], ' ' as chtype); selector1[0] += 3; if selector1[0] > 19 { selector1[0] = 4; } mvaddch(selector1[0], selector1[1], '*' as chtype); }, 32 => { // space (select) // translate selector1 to color let idx1 = (selector1[0] - 4) / 3; // TODO debug mode // printMsg("selected color idx %d", idx1); let color = game[0 as usize][idx1 as usize]; // translate selector2 to board slot let idx2 = (selector2[1] - 16) / 4; game[attempt as usize][idx2 as usize] = color; // put color on board print_color_block(selector2[0] - 1, selector2[1] - 1, color); // fallthrough to arrow right move_right(&mut selector2); }, 261 => { // arrow right move_right(&mut selector2); }, 260 => { // arrow left mvaddch(selector2[0], selector2[1], ' ' as chtype); selector2[1] -= 4; if selector2[1] < 16 { selector2[1] = 28; } mvaddch(selector2[0], selector2[1], '*' as chtype); }, 10 => { // enter (submit) if game[attempt as usize][0] == 0 || game[attempt as usize][1] == 0 || game[attempt as usize][2] == 0 || game[attempt as usize][3] == 0 { print_msg("incomplete attempt"); continue; } let mut result: [u8; 4] = [0; 4]; let mut used_ans: [bool; 4] = [false; 4]; // calculate result // first set used_ans based on exact match for i in 0..4 { let color = game[attempt as usize][i]; if color == ans[i] as i16 { used_ans[i] = true; result[i] = 2; } } // then set wrong-location match for i in 0..4 { let color = game[attempt as usize][i]; for a in 0..4 { if used_ans[a as usize] == false && color == ans[a as usize] && result[i] == 0 { // don't reuse ans[x] or overwrite result[x] used_ans[a as usize] = true; result[i] = 1; continue; } } } // sort result for obfuscation result.sort(); result.reverse(); // show result wmove(stdscr(), selector2[0] - 1 as i32, 33); for i in 0..4 { match result[i] { 1 => { // color right but not location addch(ACS_CKBOARD() | COLOR_PAIR(COLOR_GRAY)); }, 2 => { // color and location right addch(ACS_CKBOARD() | COLOR_PAIR(COLOR_WHITE)); }, _ => {} } } // check win/lose if result[0] == 2 && result[1] == 2 && result[2] == 2 && result[3] == 2 { print_msg("win"); getch(); endwin(); return; } else if attempt >= num_attempts { // lose, out of attempts print_msg(&(format!("lose, answer was {} {} {} {}", colors[ans[0] as usize], colors[ans[1] as usize], colors[ans[2] as usize], colors[ans[3] as usize]))); getch(); endwin(); return; } else { attempt += 1; mvaddch(selector2[0], selector2[1], ' ' as chtype); selector2[0] += 2; mvaddch(selector2[0], selector2[1], '*' as chtype); } }, _ => {} } wmove(stdscr(), 23, 79); refresh(); } } // cleanup endwin(); } fn print_color_block(y: i32, x: i32, color_pair: i16) { wmove(stdscr(), y, x); addch(ACS_CKBOARD() | COLOR_PAIR(color_pair)); addch(ACS_CKBOARD() | COLOR_PAIR(color_pair)); addch(ACS_CKBOARD() | COLOR_PAIR(color_pair)); } fn print_msg(msg: &str) { wmove(stdscr(), 23, 0); wprintw(stdscr(), msg); } fn move_right(selector2: &mut [i32; 2]) { mvaddch(selector2[0], selector2[1], ' ' as chtype); selector2[1] += 4; if selector2[1] > 28 { selector2[1] = 16; } mvaddch(selector2[0], selector2[1], '*' as chtype); }
use core::fmt; pub enum OffchainError { NoAccountAvailable, Other(&'static str), TooEarlyToSendUnsignedTransaction, } impl fmt::Debug for OffchainError { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Self::NoAccountAvailable => write!(f, "No account available"), Self::Other(err) => write!(f, "{}", err), Self::TooEarlyToSendUnsignedTransaction => { write!(f, "Too early to send unsigned transaction") } } } } impl fmt::Display for OffchainError { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(self, f) } } #[cfg(feature = "std")] impl std::error::Error for OffchainError {}
pub(crate) use _operator::make_module; #[pymodule] mod _operator { use crate::common::cmp; use crate::{ builtins::{PyInt, PyIntRef, PyStr, PyStrRef, PyTupleRef, PyTypeRef}, function::Either, function::{ArgBytesLike, FuncArgs, KwArgs, OptionalArg}, identifier, protocol::PyIter, recursion::ReprGuard, types::{Callable, Constructor, PyComparisonOp, Representable}, AsObject, Py, PyObjectRef, PyPayload, PyRef, PyResult, VirtualMachine, }; #[pyfunction] fn lt(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { a.rich_compare(b, PyComparisonOp::Lt, vm) } #[pyfunction] fn le(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { a.rich_compare(b, PyComparisonOp::Le, vm) } #[pyfunction] fn gt(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { a.rich_compare(b, PyComparisonOp::Gt, vm) } #[pyfunction] fn ge(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { a.rich_compare(b, PyComparisonOp::Ge, vm) } #[pyfunction] fn eq(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { a.rich_compare(b, PyComparisonOp::Eq, vm) } #[pyfunction] fn ne(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { a.rich_compare(b, PyComparisonOp::Ne, vm) } #[pyfunction] fn not_(a: PyObjectRef, vm: &VirtualMachine) -> PyResult<bool> { a.try_to_bool(vm).map(|r| !r) } #[pyfunction] fn truth(a: PyObjectRef, vm: &VirtualMachine) -> PyResult<bool> { a.try_to_bool(vm) } #[pyfunction] fn is_(a: PyObjectRef, b: PyObjectRef) -> PyResult<bool> { Ok(a.is(&b)) } #[pyfunction] fn is_not(a: PyObjectRef, b: PyObjectRef) -> PyResult<bool> { Ok(!a.is(&b)) } #[pyfunction] fn abs(a: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._abs(&a) } #[pyfunction] fn add(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._add(&a, &b) } #[pyfunction] fn and_(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._and(&a, &b) } #[pyfunction] fn floordiv(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._floordiv(&a, &b) } // Note: Keep track of issue17567. Will need changes in order to strictly match behavior of // a.__index__ as raised in the issue. Currently, we accept int subclasses. #[pyfunction] fn index(a: PyObjectRef, vm: &VirtualMachine) -> PyResult<PyIntRef> { a.try_index(vm) } #[pyfunction] fn invert(pos: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._invert(&pos) } #[pyfunction] fn lshift(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._lshift(&a, &b) } #[pyfunction(name = "mod")] fn mod_(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._mod(&a, &b) } #[pyfunction] fn mul(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._mul(&a, &b) } #[pyfunction] fn matmul(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._matmul(&a, &b) } #[pyfunction] fn neg(pos: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._neg(&pos) } #[pyfunction] fn or_(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._or(&a, &b) } #[pyfunction] fn pos(obj: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._pos(&obj) } #[pyfunction] fn pow(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._pow(&a, &b, vm.ctx.none.as_object()) } #[pyfunction] fn rshift(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._rshift(&a, &b) } #[pyfunction] fn sub(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._sub(&a, &b) } #[pyfunction] fn truediv(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._truediv(&a, &b) } #[pyfunction] fn xor(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._xor(&a, &b) } // Sequence based operators #[pyfunction] fn concat(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { // Best attempt at checking that a is sequence-like. if !a.class().has_attr(identifier!(vm, __getitem__)) || a.fast_isinstance(vm.ctx.types.dict_type) { return Err( vm.new_type_error(format!("{} object can't be concatenated", a.class().name())) ); } vm._add(&a, &b) } #[pyfunction] fn contains(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._contains(&a, b) } #[pyfunction(name = "countOf")] fn count_of(a: PyIter, b: PyObjectRef, vm: &VirtualMachine) -> PyResult<usize> { let mut count: usize = 0; for element in a.iter_without_hint::<PyObjectRef>(vm)? { let element = element?; if element.is(&b) || vm.bool_eq(&b, &element)? { count += 1; } } Ok(count) } #[pyfunction] fn delitem(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult<()> { a.del_item(&*b, vm) } #[pyfunction] fn getitem(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { a.get_item(&*b, vm) } #[pyfunction(name = "indexOf")] fn index_of(a: PyIter, b: PyObjectRef, vm: &VirtualMachine) -> PyResult<usize> { for (index, element) in a.iter_without_hint::<PyObjectRef>(vm)?.enumerate() { let element = element?; if element.is(&b) || vm.bool_eq(&b, &element)? { return Ok(index); } } Err(vm.new_value_error("sequence.index(x): x not in sequence".to_owned())) } #[pyfunction] fn setitem( a: PyObjectRef, b: PyObjectRef, c: PyObjectRef, vm: &VirtualMachine, ) -> PyResult<()> { a.set_item(&*b, c, vm) } #[pyfunction] fn length_hint(obj: PyObjectRef, default: OptionalArg, vm: &VirtualMachine) -> PyResult<usize> { let default: usize = default .map(|v| { if !v.fast_isinstance(vm.ctx.types.int_type) { return Err(vm.new_type_error(format!( "'{}' type cannot be interpreted as an integer", v.class().name() ))); } v.payload::<PyInt>().unwrap().try_to_primitive(vm) }) .unwrap_or(Ok(0))?; obj.length_hint(default, vm) } // Inplace Operators #[pyfunction] fn iadd(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._iadd(&a, &b) } #[pyfunction] fn iand(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._iand(&a, &b) } #[pyfunction] fn iconcat(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { // Best attempt at checking that a is sequence-like. if !a.class().has_attr(identifier!(vm, __getitem__)) || a.fast_isinstance(vm.ctx.types.dict_type) { return Err( vm.new_type_error(format!("{} object can't be concatenated", a.class().name())) ); } vm._iadd(&a, &b) } #[pyfunction] fn ifloordiv(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._ifloordiv(&a, &b) } #[pyfunction] fn ilshift(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._ilshift(&a, &b) } #[pyfunction] fn imod(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._imod(&a, &b) } #[pyfunction] fn imul(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._imul(&a, &b) } #[pyfunction] fn imatmul(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._imatmul(&a, &b) } #[pyfunction] fn ior(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._ior(&a, &b) } #[pyfunction] fn ipow(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._ipow(&a, &b, vm.ctx.none.as_object()) } #[pyfunction] fn irshift(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._irshift(&a, &b) } #[pyfunction] fn isub(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._isub(&a, &b) } #[pyfunction] fn itruediv(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._itruediv(&a, &b) } #[pyfunction] fn ixor(a: PyObjectRef, b: PyObjectRef, vm: &VirtualMachine) -> PyResult { vm._ixor(&a, &b) } #[pyfunction] fn _compare_digest( a: Either<PyStrRef, ArgBytesLike>, b: Either<PyStrRef, ArgBytesLike>, vm: &VirtualMachine, ) -> PyResult<bool> { let res = match (a, b) { (Either::A(a), Either::A(b)) => { if !a.as_str().is_ascii() || !b.as_str().is_ascii() { return Err(vm.new_type_error( "comparing strings with non-ASCII characters is not supported".to_owned(), )); } cmp::timing_safe_cmp(a.as_str().as_bytes(), b.as_str().as_bytes()) } (Either::B(a), Either::B(b)) => { a.with_ref(|a| b.with_ref(|b| cmp::timing_safe_cmp(a, b))) } _ => { return Err(vm.new_type_error( "unsupported operand types(s) or combination of types".to_owned(), )) } }; Ok(res) } /// attrgetter(attr, ...) --> attrgetter object /// /// Return a callable object that fetches the given attribute(s) from its operand. /// After f = attrgetter('name'), the call f(r) returns r.name. /// After g = attrgetter('name', 'date'), the call g(r) returns (r.name, r.date). /// After h = attrgetter('name.first', 'name.last'), the call h(r) returns /// (r.name.first, r.name.last). #[pyattr] #[pyclass(name = "attrgetter")] #[derive(Debug, PyPayload)] struct PyAttrGetter { attrs: Vec<PyStrRef>, } #[pyclass(with(Callable, Constructor, Representable))] impl PyAttrGetter { #[pymethod(magic)] fn reduce(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyResult<(PyTypeRef, PyTupleRef)> { let attrs = vm .ctx .new_tuple(zelf.attrs.iter().map(|v| v.clone().into()).collect()); Ok((zelf.class().to_owned(), attrs)) } // Go through dotted parts of string and call getattr on whatever is returned. fn get_single_attr( obj: PyObjectRef, attr: &Py<PyStr>, vm: &VirtualMachine, ) -> PyResult<PyObjectRef> { let attr_str = attr.as_str(); let parts = attr_str.split('.').collect::<Vec<_>>(); if parts.len() == 1 { return obj.get_attr(attr, vm); } let mut obj = obj; for part in parts { obj = obj.get_attr(&vm.ctx.new_str(part), vm)?; } Ok(obj) } } impl Constructor for PyAttrGetter { type Args = FuncArgs; fn py_new(cls: PyTypeRef, args: Self::Args, vm: &VirtualMachine) -> PyResult { let nattr = args.args.len(); // Check we get no keyword and at least one positional. if !args.kwargs.is_empty() { return Err(vm.new_type_error("attrgetter() takes no keyword arguments".to_owned())); } if nattr == 0 { return Err(vm.new_type_error("attrgetter expected 1 argument, got 0.".to_owned())); } let mut attrs = Vec::with_capacity(nattr); for o in args.args { if let Ok(r) = o.try_into_value(vm) { attrs.push(r); } else { return Err(vm.new_type_error("attribute name must be a string".to_owned())); } } PyAttrGetter { attrs } .into_ref_with_type(vm, cls) .map(Into::into) } } impl Callable for PyAttrGetter { type Args = PyObjectRef; fn call(zelf: &Py<Self>, obj: Self::Args, vm: &VirtualMachine) -> PyResult { // Handle case where we only have one attribute. if zelf.attrs.len() == 1 { return Self::get_single_attr(obj, &zelf.attrs[0], vm); } // Build tuple and call get_single on each element in attrs. let mut results = Vec::with_capacity(zelf.attrs.len()); for o in &zelf.attrs { results.push(Self::get_single_attr(obj.clone(), o, vm)?); } Ok(vm.ctx.new_tuple(results).into()) } } impl Representable for PyAttrGetter { #[inline] fn repr_str(zelf: &Py<Self>, vm: &VirtualMachine) -> PyResult<String> { let fmt = if let Some(_guard) = ReprGuard::enter(vm, zelf.as_object()) { let mut parts = Vec::with_capacity(zelf.attrs.len()); for part in &zelf.attrs { parts.push(part.as_object().repr(vm)?.as_str().to_owned()); } parts.join(", ") } else { "...".to_owned() }; Ok(format!("operator.attrgetter({fmt})")) } } /// itemgetter(item, ...) --> itemgetter object /// /// Return a callable object that fetches the given item(s) from its operand. /// After f = itemgetter(2), the call f(r) returns r[2]. /// After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3]) #[pyattr] #[pyclass(name = "itemgetter")] #[derive(Debug, PyPayload)] struct PyItemGetter { items: Vec<PyObjectRef>, } #[pyclass(with(Callable, Constructor, Representable))] impl PyItemGetter { #[pymethod(magic)] fn reduce(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyObjectRef { let items = vm.ctx.new_tuple(zelf.items.to_vec()); vm.new_pyobj((zelf.class().to_owned(), items)) } } impl Constructor for PyItemGetter { type Args = FuncArgs; fn py_new(cls: PyTypeRef, args: Self::Args, vm: &VirtualMachine) -> PyResult { // Check we get no keyword and at least one positional. if !args.kwargs.is_empty() { return Err(vm.new_type_error("itemgetter() takes no keyword arguments".to_owned())); } if args.args.is_empty() { return Err(vm.new_type_error("itemgetter expected 1 argument, got 0.".to_owned())); } PyItemGetter { items: args.args } .into_ref_with_type(vm, cls) .map(Into::into) } } impl Callable for PyItemGetter { type Args = PyObjectRef; fn call(zelf: &Py<Self>, obj: Self::Args, vm: &VirtualMachine) -> PyResult { // Handle case where we only have one attribute. if zelf.items.len() == 1 { return obj.get_item(&*zelf.items[0], vm); } // Build tuple and call get_single on each element in attrs. let mut results = Vec::with_capacity(zelf.items.len()); for item in &zelf.items { results.push(obj.get_item(&**item, vm)?); } Ok(vm.ctx.new_tuple(results).into()) } } impl Representable for PyItemGetter { #[inline] fn repr_str(zelf: &Py<Self>, vm: &VirtualMachine) -> PyResult<String> { let fmt = if let Some(_guard) = ReprGuard::enter(vm, zelf.as_object()) { let mut items = Vec::with_capacity(zelf.items.len()); for item in &zelf.items { items.push(item.repr(vm)?.as_str().to_owned()); } items.join(", ") } else { "...".to_owned() }; Ok(format!("operator.itemgetter({fmt})")) } } /// methodcaller(name, ...) --> methodcaller object /// /// Return a callable object that calls the given method on its operand. /// After f = methodcaller('name'), the call f(r) returns r.name(). /// After g = methodcaller('name', 'date', foo=1), the call g(r) returns /// r.name('date', foo=1). #[pyattr] #[pyclass(name = "methodcaller")] #[derive(Debug, PyPayload)] struct PyMethodCaller { name: PyStrRef, args: FuncArgs, } #[pyclass(with(Callable, Constructor, Representable))] impl PyMethodCaller { #[pymethod(magic)] fn reduce(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyResult<PyTupleRef> { // With no kwargs, return (type(obj), (name, *args)) tuple. if zelf.args.kwargs.is_empty() { let mut pargs = vec![zelf.name.as_object().to_owned()]; pargs.append(&mut zelf.args.args.clone()); Ok(vm.new_tuple((zelf.class().to_owned(), vm.ctx.new_tuple(pargs)))) } else { // If we have kwargs, create a partial function that contains them and pass back that // along with the args. let partial = vm.import("functools", None, 0)?.get_attr("partial", vm)?; let args = FuncArgs::new( vec![zelf.class().to_owned().into(), zelf.name.clone().into()], KwArgs::new(zelf.args.kwargs.clone()), ); let callable = partial.call(args, vm)?; Ok(vm.new_tuple((callable, vm.ctx.new_tuple(zelf.args.args.clone())))) } } } impl Constructor for PyMethodCaller { type Args = (PyObjectRef, FuncArgs); fn py_new(cls: PyTypeRef, (name, args): Self::Args, vm: &VirtualMachine) -> PyResult { if let Ok(name) = name.try_into_value(vm) { PyMethodCaller { name, args } .into_ref_with_type(vm, cls) .map(Into::into) } else { Err(vm.new_type_error("method name must be a string".to_owned())) } } } impl Callable for PyMethodCaller { type Args = PyObjectRef; #[inline] fn call(zelf: &Py<Self>, obj: Self::Args, vm: &VirtualMachine) -> PyResult { vm.call_method(&obj, zelf.name.as_str(), zelf.args.clone()) } } impl Representable for PyMethodCaller { #[inline] fn repr_str(zelf: &Py<Self>, vm: &VirtualMachine) -> PyResult<String> { let fmt = if let Some(_guard) = ReprGuard::enter(vm, zelf.as_object()) { let args = &zelf.args.args; let kwargs = &zelf.args.kwargs; let mut fmt = vec![zelf.name.as_object().repr(vm)?.as_str().to_owned()]; if !args.is_empty() { let mut parts = Vec::with_capacity(args.len()); for v in args { parts.push(v.repr(vm)?.as_str().to_owned()); } fmt.push(parts.join(", ")); } // build name=value pairs from KwArgs. if !kwargs.is_empty() { let mut parts = Vec::with_capacity(kwargs.len()); for (key, value) in kwargs { let value_repr = value.repr(vm)?; parts.push(format!("{key}={value_repr}")); } fmt.push(parts.join(", ")); } fmt.join(", ") } else { "...".to_owned() }; Ok(format!("operator.methodcaller({fmt})")) } } }
// build.rs extern crate vergen; use vergen::*; fn main() { let mut flags = Flags::all(); flags.toggle(NOW); vergen(flags); }
// auto generated, do not modify. // created: Wed Jan 20 00:44:03 2016 // src-file: /QtQml/qqmlfileselector.h // dst-file: /src/qml/qqmlfileselector.rs // // header block begin => #![feature(libc)] #![feature(core)] #![feature(collections)] extern crate libc; use self::libc::*; // <= header block end // main block begin => // <= main block end // use block begin => use super::super::core::qobject::QObject; // 771 use std::ops::Deref; use super::super::core::qstringlist::QStringList; // 771 use super::qqmlengine::QQmlEngine; // 773 use super::super::core::qfileselector::QFileSelector; // 771 // <= use block end // ext block begin => // #[link(name = "Qt5Core")] // #[link(name = "Qt5Gui")] // #[link(name = "Qt5Widgets")] // #[link(name = "QtInline")] extern { fn QQmlFileSelector_Class_Size() -> c_int; // proto: void QQmlFileSelector::setExtraSelectors(QStringList & strings); fn _ZN16QQmlFileSelector17setExtraSelectorsER11QStringList(qthis: u64 /* *mut c_void*/, arg0: *mut c_void); // proto: void QQmlFileSelector::QQmlFileSelector(QQmlEngine * engine, QObject * parent); fn _ZN16QQmlFileSelectorC2EP10QQmlEngineP7QObject(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void); // proto: static QQmlFileSelector * QQmlFileSelector::get(QQmlEngine * ); fn _ZN16QQmlFileSelector3getEP10QQmlEngine(arg0: *mut c_void) -> *mut c_void; // proto: void QQmlFileSelector::setSelector(QFileSelector * selector); fn _ZN16QQmlFileSelector11setSelectorEP13QFileSelector(qthis: u64 /* *mut c_void*/, arg0: *mut c_void); // proto: const QMetaObject * QQmlFileSelector::metaObject(); fn _ZNK16QQmlFileSelector10metaObjectEv(qthis: u64 /* *mut c_void*/); // proto: void QQmlFileSelector::QQmlFileSelector(const QQmlFileSelector & ); fn _ZN16QQmlFileSelectorC2ERKS_(qthis: u64 /* *mut c_void*/, arg0: *mut c_void); // proto: void QQmlFileSelector::setExtraSelectors(const QStringList & strings); fn _ZN16QQmlFileSelector17setExtraSelectorsERK11QStringList(qthis: u64 /* *mut c_void*/, arg0: *mut c_void); // proto: void QQmlFileSelector::~QQmlFileSelector(); fn _ZN16QQmlFileSelectorD2Ev(qthis: u64 /* *mut c_void*/); } // <= ext block end // body block begin => // class sizeof(QQmlFileSelector)=1 #[derive(Default)] pub struct QQmlFileSelector { qbase: QObject, pub qclsinst: u64 /* *mut c_void*/, } impl /*struct*/ QQmlFileSelector { pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QQmlFileSelector { return QQmlFileSelector{qbase: QObject::inheritFrom(qthis), qclsinst: qthis, ..Default::default()}; } } impl Deref for QQmlFileSelector { type Target = QObject; fn deref(&self) -> &QObject { return & self.qbase; } } impl AsRef<QObject> for QQmlFileSelector { fn as_ref(& self) -> & QObject { return & self.qbase; } } // proto: void QQmlFileSelector::setExtraSelectors(QStringList & strings); impl /*struct*/ QQmlFileSelector { pub fn setExtraSelectors<RetType, T: QQmlFileSelector_setExtraSelectors<RetType>>(& self, overload_args: T) -> RetType { return overload_args.setExtraSelectors(self); // return 1; } } pub trait QQmlFileSelector_setExtraSelectors<RetType> { fn setExtraSelectors(self , rsthis: & QQmlFileSelector) -> RetType; } // proto: void QQmlFileSelector::setExtraSelectors(QStringList & strings); impl<'a> /*trait*/ QQmlFileSelector_setExtraSelectors<()> for (&'a QStringList) { fn setExtraSelectors(self , rsthis: & QQmlFileSelector) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN16QQmlFileSelector17setExtraSelectorsER11QStringList()}; let arg0 = self.qclsinst as *mut c_void; unsafe {_ZN16QQmlFileSelector17setExtraSelectorsER11QStringList(rsthis.qclsinst, arg0)}; // return 1; } } // proto: void QQmlFileSelector::QQmlFileSelector(QQmlEngine * engine, QObject * parent); impl /*struct*/ QQmlFileSelector { pub fn new<T: QQmlFileSelector_new>(value: T) -> QQmlFileSelector { let rsthis = value.new(); return rsthis; // return 1; } } pub trait QQmlFileSelector_new { fn new(self) -> QQmlFileSelector; } // proto: void QQmlFileSelector::QQmlFileSelector(QQmlEngine * engine, QObject * parent); impl<'a> /*trait*/ QQmlFileSelector_new for (&'a QQmlEngine, &'a QObject) { fn new(self) -> QQmlFileSelector { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN16QQmlFileSelectorC2EP10QQmlEngineP7QObject()}; let ctysz: c_int = unsafe{QQmlFileSelector_Class_Size()}; let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64; let arg0 = self.0.qclsinst as *mut c_void; let arg1 = self.1.qclsinst as *mut c_void; unsafe {_ZN16QQmlFileSelectorC2EP10QQmlEngineP7QObject(qthis_ph, arg0, arg1)}; let qthis: u64 = qthis_ph; let rsthis = QQmlFileSelector{qbase: QObject::inheritFrom(qthis), qclsinst: qthis, ..Default::default()}; return rsthis; // return 1; } } // proto: static QQmlFileSelector * QQmlFileSelector::get(QQmlEngine * ); impl /*struct*/ QQmlFileSelector { pub fn get_s<RetType, T: QQmlFileSelector_get_s<RetType>>( overload_args: T) -> RetType { return overload_args.get_s(); // return 1; } } pub trait QQmlFileSelector_get_s<RetType> { fn get_s(self ) -> RetType; } // proto: static QQmlFileSelector * QQmlFileSelector::get(QQmlEngine * ); impl<'a> /*trait*/ QQmlFileSelector_get_s<QQmlFileSelector> for (&'a QQmlEngine) { fn get_s(self ) -> QQmlFileSelector { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN16QQmlFileSelector3getEP10QQmlEngine()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {_ZN16QQmlFileSelector3getEP10QQmlEngine(arg0)}; let mut ret1 = QQmlFileSelector::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: void QQmlFileSelector::setSelector(QFileSelector * selector); impl /*struct*/ QQmlFileSelector { pub fn setSelector<RetType, T: QQmlFileSelector_setSelector<RetType>>(& self, overload_args: T) -> RetType { return overload_args.setSelector(self); // return 1; } } pub trait QQmlFileSelector_setSelector<RetType> { fn setSelector(self , rsthis: & QQmlFileSelector) -> RetType; } // proto: void QQmlFileSelector::setSelector(QFileSelector * selector); impl<'a> /*trait*/ QQmlFileSelector_setSelector<()> for (&'a QFileSelector) { fn setSelector(self , rsthis: & QQmlFileSelector) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN16QQmlFileSelector11setSelectorEP13QFileSelector()}; let arg0 = self.qclsinst as *mut c_void; unsafe {_ZN16QQmlFileSelector11setSelectorEP13QFileSelector(rsthis.qclsinst, arg0)}; // return 1; } } // proto: const QMetaObject * QQmlFileSelector::metaObject(); impl /*struct*/ QQmlFileSelector { pub fn metaObject<RetType, T: QQmlFileSelector_metaObject<RetType>>(& self, overload_args: T) -> RetType { return overload_args.metaObject(self); // return 1; } } pub trait QQmlFileSelector_metaObject<RetType> { fn metaObject(self , rsthis: & QQmlFileSelector) -> RetType; } // proto: const QMetaObject * QQmlFileSelector::metaObject(); impl<'a> /*trait*/ QQmlFileSelector_metaObject<()> for () { fn metaObject(self , rsthis: & QQmlFileSelector) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK16QQmlFileSelector10metaObjectEv()}; unsafe {_ZNK16QQmlFileSelector10metaObjectEv(rsthis.qclsinst)}; // return 1; } } // proto: void QQmlFileSelector::QQmlFileSelector(const QQmlFileSelector & ); impl<'a> /*trait*/ QQmlFileSelector_new for (&'a QQmlFileSelector) { fn new(self) -> QQmlFileSelector { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN16QQmlFileSelectorC2ERKS_()}; let ctysz: c_int = unsafe{QQmlFileSelector_Class_Size()}; let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64; let arg0 = self.qclsinst as *mut c_void; unsafe {_ZN16QQmlFileSelectorC2ERKS_(qthis_ph, arg0)}; let qthis: u64 = qthis_ph; let rsthis = QQmlFileSelector{qbase: QObject::inheritFrom(qthis), qclsinst: qthis, ..Default::default()}; return rsthis; // return 1; } } // proto: void QQmlFileSelector::~QQmlFileSelector(); impl /*struct*/ QQmlFileSelector { pub fn free<RetType, T: QQmlFileSelector_free<RetType>>(& self, overload_args: T) -> RetType { return overload_args.free(self); // return 1; } } pub trait QQmlFileSelector_free<RetType> { fn free(self , rsthis: & QQmlFileSelector) -> RetType; } // proto: void QQmlFileSelector::~QQmlFileSelector(); impl<'a> /*trait*/ QQmlFileSelector_free<()> for () { fn free(self , rsthis: & QQmlFileSelector) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN16QQmlFileSelectorD2Ev()}; unsafe {_ZN16QQmlFileSelectorD2Ev(rsthis.qclsinst)}; // return 1; } } // <= body block end
// use human_panic::setup_panic; use anyhow::Result; use cli::Action::*; use structopt::StructOpt; mod actions; mod cli; mod file_handler; mod template; fn main() -> Result<()> { // setup_panic!(); let args = cli::Cli::from_args(); match args.action { New => actions::when_new(args.name.unwrap())?, Build => actions::when_build()?, Run => actions::when_run()?, Clean => actions::when_clean()?, Test => actions::when_test()?, // unimplemented Add => actions::when_add()?, // unimplemented Query => actions::when_query()?, Fmt => actions::when_fmt()?, } /* match args.action { New => { println!("args: new\nMake files and directories...\n"); println!( "./main/main.cpp:\n{}", template::main_cpp(&args.name.clone().unwrap()) ); println!( "./main/BUILD:\n{}", template::build_main(args.name.clone().unwrap()) ); println!("./lib/BUILD:\n{}", template::build_lib()); println!("./WORKSPACE:\n{}", template::workspace()); println!("./.gitignore:\n{}", template::git_ignore()); println!("./config:\n{}", template::config(args.name.unwrap())); } Build => { println!("args: build"); println!("READ config"); println!("$ bazel build //main:{}", "filename"); } Run => { println!("args: run"); println!("READ config"); println!("$ bazel run //main:{}", "filename"); } Test => { println!("args: test"); println!("READ config"); println!("TEST"); } Add => { // For this, dentaku has to know more about Bazel println!("args: add"); if let Some(name) = args.lib_name { println!("add statement to BUILD"); println!("cc_library(\n name = \"{}\",\n srcs = [\"{}.cpp\"]\n hdrs = [\"{}\".h],\n)]", name, name, name); println!("add statement to cc_binary in BUILD"); println!(" deps = [\":{}.h\",],", name); } else { // happend? println!("add statement to cc_binary in BUILD"); println!("") } println!("add sentence to cc_library in BUILD:"); println!(" hdrs = [\"filename.h\"] in cc_binary"); } Query => { println!("args: query"); println!("READ config"); println!( "$ bazel query --nohost_deps --noimplicit_deps 'deps(//main:{})' --output graph", "name" ); } } // */ Ok(()) }
#![allow(non_snake_case)] #![cfg(test)] use std::ops::Add; #[test] fn Empty_struct_requirec_braces_wjen_creating_instance() { struct Empty {}; let x = Empty {}; } //TODO: How to instatntiate this? #[test] fn Struct_can_have_str_field_but_then_cannot_be_instantiated() { struct MyBox { name: str, }; // let myBox = MyBox { // name: "hello".to_string(), // }; } #[test] fn Creating_instance_with_str_name_property() { struct Person<'LNames> { firstName: &'LNames str, lastName: &'LNames str, }; let gossda = Person { firstName: "Marcin?", lastName: "Gozdera", }; } #[test] fn Owing_struct_immutably() { struct Person<'LNames> { firstName: &'LNames str, lastName: &'LNames str, }; let gossda = Person { firstName: "Marcin?", lastName: "Gozdera", }; //gossda.firstName = "Pan Gossda"; //gossda = Person {firstName: "Boxer", lastName: "Bokserski"}; } #[test] fn Owing_struct_mutably() { struct Person<'LNames> { firstName: &'LNames str, lastName: &'LNames str, }; let mut gossda = Person { firstName: "Marcin?", lastName: "Gozdera", }; gossda.firstName = "Pan Gossda"; gossda = Person { firstName: "Boxer", lastName: "Bokserski", }; }
#[doc = "Writer for register C7IFCR"] pub type W = crate::W<u32, super::C7IFCR>; #[doc = "Register C7IFCR `reset()`'s with value 0"] impl crate::ResetValue for super::C7IFCR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `CTEIF7`"] pub struct CTEIF7_W<'a> { w: &'a mut W, } impl<'a> CTEIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `CCTCIF7`"] pub struct CCTCIF7_W<'a> { w: &'a mut W, } impl<'a> CCTCIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `CBRTIF7`"] pub struct CBRTIF7_W<'a> { w: &'a mut W, } impl<'a> CBRTIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `CBTIF7`"] pub struct CBTIF7_W<'a> { w: &'a mut W, } impl<'a> CBTIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `CLTCIF7`"] pub struct CLTCIF7_W<'a> { w: &'a mut W, } impl<'a> CLTCIF7_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } impl W { #[doc = "Bit 0 - Channel x clear transfer error interrupt flag Writing a 1 into this bit clears TEIFx in the MDMA_ISRy register"] #[inline(always)] pub fn cteif7(&mut self) -> CTEIF7_W { CTEIF7_W { w: self } } #[doc = "Bit 1 - Clear Channel transfer complete interrupt flag for channel x Writing a 1 into this bit clears CTCIFx in the MDMA_ISRy register"] #[inline(always)] pub fn cctcif7(&mut self) -> CCTCIF7_W { CCTCIF7_W { w: self } } #[doc = "Bit 2 - Channel x clear block repeat transfer complete interrupt flag Writing a 1 into this bit clears BRTIFx in the MDMA_ISRy register"] #[inline(always)] pub fn cbrtif7(&mut self) -> CBRTIF7_W { CBRTIF7_W { w: self } } #[doc = "Bit 3 - Channel x Clear block transfer complete interrupt flag Writing a 1 into this bit clears BTIFx in the MDMA_ISRy register"] #[inline(always)] pub fn cbtif7(&mut self) -> CBTIF7_W { CBTIF7_W { w: self } } #[doc = "Bit 4 - CLear buffer Transfer Complete Interrupt Flag for channel x Writing a 1 into this bit clears TCIFx in the MDMA_ISRy register"] #[inline(always)] pub fn cltcif7(&mut self) -> CLTCIF7_W { CLTCIF7_W { w: self } } }
use nom::character::complete::{multispace0, space1}; use nom::combinator::opt; use nom::sequence::{terminated, tuple}; use crate::assembler::label_parsers::label_declaration_parser; use crate::assembler::opcode_parsers::opcode_parser; use crate::assembler::operand_parsers::{integer_operand_parser, register_parser}; use crate::assembler::Token; use crate::instructions::Opcode; #[derive(Debug, PartialEq)] pub enum Action { Opcode(Token), Directive(Token), } #[derive(Debug, PartialEq)] pub struct AssemblerInstruction { pub label: Option<Token>, pub action: Action, pub operand1: Option<Token>, pub operand2: Option<Token>, pub operand3: Option<Token>, } impl AssemblerInstruction { pub fn to_bytes(&self) -> Vec<u8> { let mut results = vec![]; match &self.action { Action::Opcode(Token::Op(code)) => results.push(u8::from(code)), _ => panic!("Non-opcode found in opcode field"), }; // TODO: make it nicer match (&self.operand1, &self.operand2, &self.operand3) { (Some(op1), Some(op2), Some(op3)) => { AssemblerInstruction::extract_operand(&op1, &mut results); AssemblerInstruction::extract_operand(&op2, &mut results); AssemblerInstruction::extract_operand(&op3, &mut results); } (Some(op1), Some(op2), None) => { AssemblerInstruction::extract_operand(&op1, &mut results); AssemblerInstruction::extract_operand(&op2, &mut results); } (Some(op1), None, None) => { AssemblerInstruction::extract_operand(&op1, &mut results); } (None, None, None) => (), _ => panic!("malformed AssemblerInstruction"), } results } // TODO: add From<Token> for u8 fn extract_operand(t: &Token, results: &mut Vec<u8>) { match t { Token::Register(reg_num) => { results.push(*reg_num); } Token::IntegerOperand(value) => { let converted = *value as u16; let byte1 = converted; let byte2 = converted >> 8; results.push(byte2 as u8); results.push(byte1 as u8); } _ => { panic!("Opcode found in operand field"); } }; } } // <$REGISTER> <#VALUE> // LOAD $0 #100 fn args_reg_value( mut asm_instruction: AssemblerInstruction, input: &str, ) -> nom::IResult<&str, AssemblerInstruction> { let parser = tuple(( terminated(register_parser, space1), terminated(integer_operand_parser, multispace0), )); let (input, (register, operand)) = parser(input)?; asm_instruction.operand1.replace(register); asm_instruction.operand2.replace(operand); Ok((input, asm_instruction)) } // <$REGISTER> <$REGISTER> <$REGISTER> // ADD $0 $1 $2\n fn args_reg_reg_reg( mut asm_instruction: AssemblerInstruction, input: &str, ) -> nom::IResult<&str, AssemblerInstruction> { let parser = tuple(( terminated(register_parser, space1), terminated(register_parser, space1), terminated(register_parser, multispace0), )); let (input, (reg1, reg2, reg3)) = parser(input)?; asm_instruction.operand1.replace(reg1); asm_instruction.operand2.replace(reg2); asm_instruction.operand3.replace(reg3); Ok((input, asm_instruction)) } // <$REGISTER> <$REGISTER> // EQ $0 $1\n fn args_reg_reg( mut asm_instruction: AssemblerInstruction, input: &str, ) -> nom::IResult<&str, AssemblerInstruction> { let parser = tuple(( terminated(register_parser, space1), terminated(register_parser, multispace0), )); let (input, (reg1, reg2)) = parser(input)?; asm_instruction.operand1.replace(reg1); asm_instruction.operand2.replace(reg2); Ok((input, asm_instruction)) } // <OPCODE> <$REGISTER> // JMP $0 \n fn args_reg( mut asm_instruction: AssemblerInstruction, input: &str, ) -> nom::IResult<&str, AssemblerInstruction> { let parser = terminated(register_parser, multispace0); let (input, reg1) = parser(input)?; asm_instruction.operand1.replace(reg1); Ok((input, asm_instruction)) } // // <OPCODE> // // HLT fn args_none( asm_instruction: AssemblerInstruction, input: &str, ) -> nom::IResult<&str, AssemblerInstruction> { Ok((input, asm_instruction)) } pub fn instruction(input: &str) -> nom::IResult<&str, AssemblerInstruction> { let (input, label) = terminated(opt(label_declaration_parser), multispace0)(input)?; let (input, opcode) = terminated(opcode_parser, multispace0)(input)?; let parser = match &opcode { Token::Op(code) => match code { Opcode::HLT => args_none, Opcode::LOAD => args_reg_value, Opcode::ADD => args_reg_reg_reg, Opcode::SUB => args_reg_reg_reg, Opcode::MUL => args_reg_reg_reg, Opcode::DIV => args_reg_reg_reg, Opcode::JMP => args_reg, Opcode::JMPF => args_reg, Opcode::JMPB => args_reg, Opcode::JEQ => args_reg, Opcode::EQ => args_reg_reg, Opcode::NEQ => args_reg_reg, Opcode::GT => args_reg_reg, Opcode::LT => args_reg_reg, Opcode::GTQ => args_reg_reg, Opcode::LTQ => args_reg_reg, Opcode::IGL(_) => args_none, }, _ => panic!("non Opcode output from opcode parser"), }; let asm_instruction = AssemblerInstruction { label, action: Action::Opcode(opcode), operand1: None, operand2: None, operand3: None, }; parser(asm_instruction, input) } #[cfg(test)] mod tests { use super::*; use crate::instructions::Opcode; #[test] fn test_parse_instruction_reg_value() { let result = instruction("load $0 #100\n"); assert_eq!( result, Ok(( "", AssemblerInstruction { label: None, action: Action::Opcode(Token::Op(Opcode::LOAD)), operand1: Some(Token::Register(0)), operand2: Some(Token::IntegerOperand(100)), operand3: None } )) ); } #[test] fn test_parse_instruction_reg_reg_reg() { let result = instruction("add: ADD $0 $1 $2\n"); assert_eq!( result, Ok(( "", AssemblerInstruction { label: Some(Token::LabelDeclaration("add".to_string())), action: Action::Opcode(Token::Op(Opcode::ADD)), operand1: Some(Token::Register(0)), operand2: Some(Token::Register(1)), operand3: Some(Token::Register(2)), } )) ); } #[test] fn test_parse_instruction_no_args() { let result = instruction("HLT\n"); assert_eq!( result, Ok(( "", AssemblerInstruction { label: None, action: Action::Opcode(Token::Op(Opcode::HLT)), operand1: None, operand2: None, operand3: None, } )) ); } #[test] fn test_parse_instruction_reg_value_upper_case() { let result = instruction("LOAD $0 #100\n"); assert_eq!( result, Ok(( "", AssemblerInstruction { label: None, action: Action::Opcode(Token::Op(Opcode::LOAD)), operand1: Some(Token::Register(0)), operand2: Some(Token::IntegerOperand(100)), operand3: None } )) ); } // TODO: fix using per-opcode parser dispatch // as opcode knows args format #[test] fn test_error_parse_instruction_reg_value_upper_case() { let result = instruction("LOAD $0 100\n"); assert!(result.is_err()) } #[test] fn test_parse_instruction_reg() { let result = instruction("jump: JMP $9"); assert_eq!( result, Ok(( "", AssemblerInstruction { label: Some(Token::LabelDeclaration("jump".to_string())), action: Action::Opcode(Token::Op(Opcode::JMP)), operand1: Some(Token::Register(9)), operand2: None, operand3: None } )) ); } }
pub mod texture_register; pub mod graphics;
use ::regex::{escape, Regex}; lazy_static!{ static ref INVALID_MULTILEVEL: Regex = Regex::new("(?:[^/]#|#(?:.+))").unwrap(); static ref INVALID_SINGLELEVEL: Regex = Regex::new(r"(?:[^/]\x2B|\x2B[^/])").unwrap(); } pub struct TopicFilter { matcher: Regex, original: String } impl TopicFilter { pub fn from_string(s: &str) -> Result<TopicFilter> { // See if topic is legal if INVALID_SINGLELEVEL.is_match(s) || INVALID_MULTILEVEL.is_match(s) { bail!(ErrorKind::InvalidTopicFilter); } if s.is_empty() { bail!(ErrorKind::InvalidTopicFilter); } let mut collect: Vec<String> = Vec::new(); for tok in s.split("/") { if tok.contains("+") { collect.push(String::from("[^/]+")); } else if tok.contains("#") { collect.push(String::from("?.*")); } else { collect.push(escape(tok)) } } let match_expr = format!("^{}$", collect.join("/")); Ok(TopicFilter { original: String::from(s), matcher: Regex::new(&match_expr).chain_err(|| ErrorKind::InvalidTopicFilter)? }) } pub fn match_topic(&self, topic: &str) -> bool { self.matcher.is_match(topic) } } #[cfg(test)] mod tests { use super::*; #[test] fn invalid_filter() { let topic = "this/is/#/invalid"; let topic2 = "invalid/filter#"; let res = TopicFilter::from_string(topic); assert!(res.is_err()); let res2 = TopicFilter::from_string(topic2); assert!(res2.is_err()); } #[test] fn simple_filter() { let topic = "this/is/a/filter"; let res = TopicFilter::from_string(topic); assert!(res.is_ok()); let filter = res.unwrap(); assert!(filter.match_topic(topic)); assert!(!filter.match_topic("this/is/wrong")); assert!(!filter.match_topic("/this/is/a/filter")); } #[test] fn single_level_filter() { let filter_str = "this/is/+/level"; let res = TopicFilter::from_string(filter_str); assert!(res.is_ok()); let filter = res.unwrap(); assert!(filter.match_topic("this/is/single/level")); assert!(!filter.match_topic("this/is/not/valid/level")); } #[test] fn complex_single_level_filter() { let filter_str = "+/multi/+/+"; let res = TopicFilter::from_string(filter_str); assert!(res.is_ok()); let filter = res.unwrap(); assert!(filter.match_topic("anything/multi/foo/bar")); assert!(!filter.match_topic("not/multi/valid")); } }
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-cloudabi no processes // ignore-emscripten no processes use std::io::ErrorKind; use std::process::Command; fn main() { assert_eq!(Command::new("nonexistent") .spawn() .unwrap_err() .kind(), ErrorKind::NotFound); }
// Copyright 2023 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use chrono::TimeZone; use chrono::Utc; use common_expression as ex; use common_expression::types::NumberDataType; use common_meta_app::schema as mt; use common_meta_app::storage::StorageParams; use maplit::btreemap; use crate::common; // These bytes are built when a new version in introduced, // and are kept for backward compatibility test. // // ************************************************************* // * These messages should never be updated, * // * only be added when a new version is added, * // * or be removed when an old version is no longer supported. * // ************************************************************* // // The message bytes are built from the output of `test_build_pb_buf()` #[test] fn test_decode_v24_table_meta() -> anyhow::Result<()> { let bytes = vec![ 10, 169, 5, 10, 51, 10, 8, 110, 117, 108, 108, 97, 98, 108, 101, 18, 5, 97, 32, 43, 32, 51, 26, 26, 178, 2, 17, 154, 2, 8, 42, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 23, 10, 4, 98, 111, 111, 108, 26, 9, 138, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 31, 10, 4, 105, 110, 116, 56, 26, 17, 154, 2, 8, 42, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 32, 10, 5, 105, 110, 116, 49, 54, 26, 17, 154, 2, 8, 50, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 32, 10, 5, 105, 110, 116, 51, 50, 26, 17, 154, 2, 8, 58, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 32, 10, 5, 105, 110, 116, 54, 52, 26, 17, 154, 2, 8, 66, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 32, 10, 5, 117, 105, 110, 116, 56, 26, 17, 154, 2, 8, 10, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 33, 10, 6, 117, 105, 110, 116, 49, 54, 26, 17, 154, 2, 8, 18, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 33, 10, 6, 117, 105, 110, 116, 51, 50, 26, 17, 154, 2, 8, 26, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 33, 10, 6, 117, 105, 110, 116, 54, 52, 26, 17, 154, 2, 8, 34, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 34, 10, 7, 102, 108, 111, 97, 116, 51, 50, 26, 17, 154, 2, 8, 74, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 34, 10, 7, 102, 108, 111, 97, 116, 54, 52, 26, 17, 154, 2, 8, 82, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 23, 10, 4, 100, 97, 116, 101, 26, 9, 170, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 28, 10, 9, 116, 105, 109, 101, 115, 116, 97, 109, 112, 26, 9, 162, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 25, 10, 6, 115, 116, 114, 105, 110, 103, 26, 9, 146, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 63, 10, 6, 115, 116, 114, 117, 99, 116, 26, 47, 202, 2, 38, 10, 3, 102, 111, 111, 10, 3, 98, 97, 114, 18, 9, 138, 2, 0, 160, 6, 24, 168, 6, 24, 18, 9, 146, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 33, 10, 5, 97, 114, 114, 97, 121, 26, 18, 186, 2, 9, 138, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 31, 10, 3, 109, 97, 112, 26, 18, 194, 2, 9, 138, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 10, 26, 10, 7, 118, 97, 114, 105, 97, 110, 116, 26, 9, 210, 2, 0, 160, 6, 24, 168, 6, 24, 160, 6, 24, 168, 6, 24, 18, 6, 10, 1, 97, 18, 1, 98, 160, 6, 24, 168, 6, 24, 34, 10, 40, 97, 32, 43, 32, 50, 44, 32, 98, 41, 42, 10, 10, 3, 120, 121, 122, 18, 3, 102, 111, 111, 50, 2, 52, 52, 58, 10, 10, 3, 97, 98, 99, 18, 3, 100, 101, 102, 64, 0, 74, 10, 40, 97, 32, 43, 32, 50, 44, 32, 98, 41, 82, 23, 110, 101, 118, 101, 114, 45, 103, 111, 110, 110, 97, 45, 103, 105, 118, 101, 45, 121, 111, 117, 45, 117, 112, 162, 1, 23, 50, 48, 49, 52, 45, 49, 49, 45, 50, 56, 32, 49, 50, 58, 48, 48, 58, 48, 57, 32, 85, 84, 67, 170, 1, 23, 50, 48, 49, 52, 45, 49, 49, 45, 50, 57, 32, 49, 50, 58, 48, 48, 58, 49, 48, 32, 85, 84, 67, 178, 1, 13, 116, 97, 98, 108, 101, 95, 99, 111, 109, 109, 101, 110, 116, 186, 1, 6, 160, 6, 24, 168, 6, 24, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 202, 1, 1, 99, 210, 1, 15, 18, 13, 10, 5, 95, 100, 97, 116, 97, 160, 6, 24, 168, 6, 24, 218, 1, 5, 108, 117, 108, 117, 95, 160, 6, 24, 168, 6, 24, ]; let want = || mt::TableMeta { schema: Arc::new(ex::TableSchema::new_from( vec![ ex::TableField::new( "nullable", ex::TableDataType::Nullable(Box::new(ex::TableDataType::Number( NumberDataType::Int8, ))), ) .with_default_expr(Some("a + 3".to_string())), ex::TableField::new("bool", ex::TableDataType::Boolean), ex::TableField::new("int8", ex::TableDataType::Number(NumberDataType::Int8)), ex::TableField::new("int16", ex::TableDataType::Number(NumberDataType::Int16)), ex::TableField::new("int32", ex::TableDataType::Number(NumberDataType::Int32)), ex::TableField::new("int64", ex::TableDataType::Number(NumberDataType::Int64)), ex::TableField::new("uint8", ex::TableDataType::Number(NumberDataType::UInt8)), ex::TableField::new("uint16", ex::TableDataType::Number(NumberDataType::UInt16)), ex::TableField::new("uint32", ex::TableDataType::Number(NumberDataType::UInt32)), ex::TableField::new("uint64", ex::TableDataType::Number(NumberDataType::UInt64)), ex::TableField::new( "float32", ex::TableDataType::Number(NumberDataType::Float32), ), ex::TableField::new( "float64", ex::TableDataType::Number(NumberDataType::Float64), ), ex::TableField::new("date", ex::TableDataType::Date), ex::TableField::new("timestamp", ex::TableDataType::Timestamp), ex::TableField::new("string", ex::TableDataType::String), ex::TableField::new("struct", ex::TableDataType::Tuple { fields_name: vec![s("foo"), s("bar")], fields_type: vec![ex::TableDataType::Boolean, ex::TableDataType::String], }), ex::TableField::new( "array", ex::TableDataType::Array(Box::new(ex::TableDataType::Boolean)), ), ex::TableField::new( "map", ex::TableDataType::Map(Box::new(ex::TableDataType::Boolean)), ), ex::TableField::new("variant", ex::TableDataType::Variant), ], btreemap! {s("a") => s("b")}, )), catalog: "never-gonna-give-you-up".to_string(), engine: "44".to_string(), engine_options: btreemap! {s("abc") => s("def")}, storage_params: Some(StorageParams::default()), part_prefix: "lulu_".to_string(), options: btreemap! {s("xyz") => s("foo")}, default_cluster_key: Some("(a + 2, b)".to_string()), cluster_keys: vec!["(a + 2, b)".to_string()], default_cluster_key_id: Some(0), created_on: Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap(), updated_on: Utc.with_ymd_and_hms(2014, 11, 29, 12, 0, 10).unwrap(), comment: s("table_comment"), field_comments: vec!["c".to_string(); 21], drop_on: None, statistics: Default::default(), }; common::test_pb_from_to(func_name!(), want())?; common::test_load_old(func_name!(), bytes.as_slice(), 24, want()) } fn s(ss: impl ToString) -> String { ss.to_string() }
use crate::weierstrass::*; use crate::weierstrass::curve::{CurvePoint, WeierstrassCurve}; use crate::representation::{ElementRepr}; use crate::field::*; use crate::public_interface; use crate::public_interface::{ApiError}; use crate::traits::{FieldElement}; use crate::traits::ZeroAndOne; use crate::square_root::*; macro_rules! expand_for_modulus_limbs { ($modulus_limbs: expr, $implementation: tt, $argument: expr, $func: tt) => { match $modulus_limbs { 4 => { $implementation::<U256Repr>::$func(&$argument) }, 5 => { $implementation::<U320Repr>::$func(&$argument) }, 6 => { $implementation::<U384Repr>::$func(&$argument) }, 7 => { $implementation::<U448Repr>::$func(&$argument) }, 8 => { $implementation::<U512Repr>::$func(&$argument) }, 9 => { $implementation::<U576Repr>::$func(&$argument) }, 10 => { $implementation::<U640Repr>::$func(&$argument) }, 11 => { $implementation::<U704Repr>::$func(&$argument) }, 12 => { $implementation::<U768Repr>::$func($argument) }, 13 => { $implementation::<U832Repr>::$func(&$argument) }, 14 => { $implementation::<U896Repr>::$func(&$argument) }, 15 => { $implementation::<U960Repr>::$func(&$argument) }, 16 => { $implementation::<U1024Repr>::$func($argument) }, default => { $implementation::<U1024Repr>::$func($argument) } } } } trait Fuzzer { fn fuzz(bytes: &[u8]) -> Result<(), ApiError>; } // version of Sqrt that return Result instead of Option pub struct Fuzz<FE: ElementRepr> { _marker_fe: std::marker::PhantomData<FE>, } impl<FE: ElementRepr> Fuzzer for Fuzz<FE> { fn fuzz(bytes: &[u8]) -> Result<(), ApiError> { let data = &bytes[1..]; // First Curve (G1) let (_, modulus_len_1, mut modulus, rest) = public_interface::decode_fp::parse_base_field_from_encoding::<FE>(&data)?; modulus.as_mut()[0] = 3; // This is now 3 mod 4 (3&3 == 3) let field = field_from_modulus::<FE>(&modulus).map_err(|_| { ApiError::InputError("Failed to create prime field from modulus".to_owned()) })?; let (a, b, rest) = public_interface::decode_g1::parse_ab_in_base_field_from_encoding(&rest, 1, &field)?; let (_order_len, order, rest) = public_interface::decode_g1::parse_group_order_from_encoding(rest)?; let fp_params = CurveOverFpParameters::new(&field); let curve_g1 = WeierstrassCurve::new(&order.as_ref(), a, b, &fp_params).map_err(|_| { ApiError::InputError("Curve shape is not supported".to_owned()) })?; // Second curve (G2) let (_, modulus_len_2, mut modulus, rest) = public_interface::decode_fp::parse_base_field_from_encoding::<FE>(&data)?; modulus.as_mut()[0] = 3; // This is now 3 mod 4 (3&3 == 3) let field = field_from_modulus::<FE>(&modulus).map_err(|_| { ApiError::InputError("Failed to create prime field from modulus".to_owned()) })?; let (extension_2, rest) = public_interface::decode_g2::create_fp2_extension(rest, &modulus, modulus_len_2, &field, false)?; let (a, b, rest) = public_interface::decode_g2::parse_ab_in_fp2_from_encoding(&rest, modulus_len_2, &extension_2)?; let (_order_len, order, rest) = public_interface::decode_g1::parse_group_order_from_encoding(rest)?; let fp2_params = CurveOverFp2Parameters::new(&extension_2); let curve_g2 = WeierstrassCurve::new(&order.as_ref(), a, b, &fp2_params).map_err(|_| { ApiError::InputError("Curve shape is not supported".to_owned()) })?; // First X on G1 (x1_1) let (x1_1, rest) = public_interface::decode_fp::decode_fp(rest, modulus_len_1, curve_g1.params.params())?; // Second X on G1 (x1_2) let (x1_2, rest) = public_interface::decode_fp::decode_fp(rest, modulus_len_1, curve_g1.params.params())?; // First X on G2 (x2_1) let (x2_1, rest) = public_interface::decode_fp::decode_fp2(rest, modulus_len_2, curve_g2.params.params())?; // Second X on G2 (x2_2) let (x2_2, rest) = public_interface::decode_fp::decode_fp2(rest, modulus_len_2, curve_g2.params.params())?; // Multipliers (m_1, m_2) let (m_1, rest) = public_interface::decode_fp::decode_fp(rest, modulus_len_1, curve_g1.params.params())?; let (m_2, _) = public_interface::decode_fp::decode_fp(rest, modulus_len_1, curve_g1.params.params())?; // Check Linearity in G1 f(ar1+br2)=af(r1)+bf(r2) let mut x_lhs = x1_1.clone(); x_lhs.mul_assign(&m_1); let mut x_m = x1_2.clone(); x_m.mul_assign(&m_2); x_lhs.add_assign(&x_m); let mut lhs = curve_g1.b.clone(); let mut ax = x_lhs.clone(); ax.mul_assign(&curve_g1.a); lhs.add_assign(&ax); let mut x_3 = x_lhs.clone(); x_3.square(); x_3.mul_assign(&x_lhs); lhs.add_assign(&x_3); lhs = sqrt_result(lhs.clone())?; let mut g_r = curve_g1.b.clone(); let mut ax = x1_1.clone(); ax.mul_assign(&curve_g1.a); g_r.add_assign(&ax); let mut x_3 = x1_1.clone(); x_3.square(); x_3.mul_assign(&x1_1); g_r.add_assign(&x_3); g_r = sqrt_result(g_r.clone())?; g_r.mul_assign(&m_1); let mut g_r2 = curve_g1.b.clone(); let mut ax = x1_2.clone(); ax.mul_assign(&curve_g1.a); g_r.add_assign(&ax); let mut x_3 = x1_2.clone(); x_3.square(); x_3.mul_assign(&x1_2); g_r2.add_assign(&x_3); g_r2 = sqrt_result(g_r2.clone())?; g_r2.mul_assign(&m_2); let mut rhs = g_r2.clone(); rhs.mul_assign(&g_r); assert_eq!(rhs, lhs); // Multipliers (m_1, m_2) let (m_1, rest) = public_interface::decode_fp::decode_fp2(rest, modulus_len_2, curve_g2.params.params())?; let (m_2, _) = public_interface::decode_fp::decode_fp2(rest, modulus_len_2, curve_g2.params.params())?; // Check Linearity in G2 let mut x_lhs = x2_1.clone(); x_lhs.mul_assign(&m_1); let mut x_m = x2_2.clone(); x_m.mul_assign(&m_2); x_lhs.add_assign(&x_m); let mut lhs = curve_g2.b.clone(); let mut ax_g2 = x_lhs.clone(); ax_g2.mul_assign(&curve_g2.a); lhs.add_assign(&ax_g2); let mut x_3 = x_lhs.clone(); x_3.square(); x_3.mul_assign(&x_lhs); lhs.add_assign(&x_3); lhs = sqrt_for_three_mod_four_ext2_result(&lhs.clone())?; let mut g_r = curve_g2.b.clone(); let mut ax = x2_1.clone(); ax.mul_assign(&curve_g2.a); g_r.add_assign(&ax); let mut x_3 = x2_1.clone(); x_3.square(); x_3.mul_assign(&x2_1); g_r.add_assign(&x_3); g_r = sqrt_for_three_mod_four_ext2_result(&g_r.clone())?; g_r.mul_assign(&m_1); let mut g_r2 = curve_g2.b.clone(); let mut ax = x2_2.clone(); ax.mul_assign(&curve_g2.a); g_r.add_assign(&ax); let mut x_3 = x2_2.clone(); x_3.square(); x_3.mul_assign(&x2_2); g_r2.add_assign(&x_3); g_r2 = sqrt_for_three_mod_four_ext2_result(&g_r2.clone())?; g_r2.mul_assign(&m_2); let mut rhs = g_r2.clone(); rhs.mul_assign(&g_r); assert_eq!(rhs, lhs); Ok(()) } } pub struct FuzzG1Api; impl Fuzzer for FuzzG1Api { fn fuzz(bytes: &[u8]) -> Result<(), ApiError> { let (_, modulus, _) = public_interface::decode_utils::parse_modulus_and_length(&bytes[1..])?; let modulus_limbs = public_interface::decode_utils::num_limbs_for_modulus(&modulus)?; expand_for_modulus_limbs!(modulus_limbs, Fuzz, bytes, fuzz); Ok(()) } } pub fn run(bytes: &[u8]) -> Result<(), ApiError> { FuzzG1Api::fuzz(bytes) }
use std::collections::HashMap; use std::fs; use std::path::Path; use std::sync::Arc; use clap::ArgMatches; use log::*; use serde_json; use tantivy::schema::Schema; use tantivy::Index; use crate::client::client::create_client; use crate::server::server::IndexServer; use crate::util::log::set_logger; pub fn run_serve_cli(matches: &ArgMatches) -> Result<(), String> { set_logger(); let host = matches.value_of("HOST").unwrap(); let port = matches.value_of("PORT").unwrap().parse::<u16>().unwrap(); let id = matches.value_of("ID").unwrap().parse::<u64>().unwrap(); let mut peers = HashMap::new(); if let Some(peers_vec) = matches.values_of("PEERS") { peers_vec .map(|s| { let mut parts = s.split('='); let id = parts.next().unwrap().parse::<u64>().unwrap(); let addr = parts.next().unwrap(); peers.insert(id, create_client(addr)); }) .count(); } let leader_id = matches .value_of("LEADER_ID") .unwrap() .parse::<u64>() .unwrap(); let data_directory = matches.value_of("DATA_DIRECTORY").unwrap(); let schema_file = matches.value_of("SCHEMA_FILE").unwrap(); let unique_key_field_name = matches.value_of("UNIQUE_KEY_FIELD_NAME").unwrap(); let data_directory_path = Path::new(data_directory); fs::create_dir_all(&data_directory_path).unwrap_or_default(); let raft_path = data_directory_path.join(Path::new("raft")); fs::create_dir_all(&raft_path).unwrap_or_default(); let index_path = data_directory_path.join(Path::new("index")); let index = if index_path.exists() { Index::open_in_dir(index_path.to_str().unwrap()).unwrap() } else { let schema_content = fs::read_to_string(schema_file).unwrap(); let schema: Schema = serde_json::from_str(&schema_content).expect("error while reading json"); fs::create_dir_all(&index_path).unwrap_or_default(); Index::create_in_dir(index_path.to_str().unwrap(), schema).unwrap() }; info!("starting a server..."); debug!("host: {}", host); debug!("port: {}", port); debug!("data_directory: {}", data_directory); debug!("schema_file: {}", schema_file); debug!("id: {}", id); IndexServer::start_server( id, host, port, leader_id, peers, Arc::new(index), unique_key_field_name, ); Ok(()) }
// The MIT License (MIT) // // Copyright (c) 2015 FaultyRAM // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. use std::clone::Clone; use std::io::{Read, Seek}; use super::{ZByte, ZMachine}; /// Provides a `ZMachine` with information on feature support and other /// configuration. #[derive(Debug)] pub struct ZMachineConfig { // Flags 1 (V3) status_line_available: bool, // V3 screen_split_available: bool, // V3 default_font_is_var_pitch: bool, // V3 // Flags 1 (V4+) colors_available: bool, // V5+ pictures_available: bool, // V6 bold_available: bool, // V4+ italic_available: bool, // V4+ fixed_style_available: bool, // V4+ sound_available: bool, // V6 timed_kb_available: bool, // V4+ // Flags 2 undo_available: bool, // V5+ mouse_available: bool, // V5+ menus_available: bool, // V6 default_bg_color: ZByte, // V5+ default_fg_color: ZByte, // V5+ switch_fallback_colors: bool, } impl ZMachineConfig { /// Constructs a new `ZMachineConfig` instance with default values. /// /// The default background and foreground colors are set to the color codes /// for black and white, respectively; all other values are set to `false`. pub fn new() -> ZMachineConfig { ZMachineConfig { status_line_available: false, screen_split_available: false, default_font_is_var_pitch: false, colors_available: false, pictures_available: false, bold_available: false, italic_available: false, fixed_style_available: false, sound_available: false, timed_kb_available: false, undo_available: false, mouse_available: false, menus_available: false, default_bg_color: 2, default_fg_color: 9, switch_fallback_colors: false, } } /// Creates a new `ZMachine` instance using the current option values. /// /// # Failures /// This method fails if `buffer` contains something that is not a Z-Code /// file. /// /// # Panics /// This method panics in the event of an I/O error (i.e. attempting to /// seek or read from `buffer` results in failure). pub fn build_zmachine<T>( &self, buffer: &mut T, max_frames: usize, max_elements: usize ) -> Result<ZMachine, String> where T: Read + Seek { ZMachine::new(self, buffer, max_frames, max_elements) } /// Sets the option for the availability of the status line. (V3 only) /// /// If this option is `true`, the `ZMachine` is able to produce a status /// line. Otherwise, no status line is displayed. pub fn v3_status_line(&mut self, value: bool) -> &mut ZMachineConfig { self.status_line_available = value; self } /// Sets the option for the availability of screen splitting. (V3 only) /// /// If this option is `true`, the `ZMachine` is able to split the screen. /// Otherwise, the screen cannot be split (and it is illegal for games to /// attempt such in this situation). pub fn v3_screen_split(&mut self, value: bool) -> &mut ZMachineConfig { self.screen_split_available = value; self } /// Sets the option for the default font style. (V3 only) /// /// If this option is `true`, the default font style is variable-pitch. /// Otherwise, it is fixed-pitch. pub fn v3_default_font_is_var_pitch(&mut self, value: bool) -> &mut ZMachineConfig { self.default_font_is_var_pitch = value; self } /// Sets the option for the availability of colors. (V5+) /// /// If this option is `true`, the `ZMachine` is able to display colors /// besides black and white. Otherwise, black and white are the only colors /// supported. pub fn v5_colors(&mut self, value: bool) -> &mut ZMachineConfig { self.colors_available = value; self } /// Sets the option for the availability of pictures. (V6 only) /// /// If this option is `true`, the `ZMachine` is able to display pictures. /// Otherwise, pictures cannot be displayed. pub fn v6_pictures(&mut self, value: bool) -> &mut ZMachineConfig { self.pictures_available = value; self } /// Sets the option for the availability of boldface. (V4+) /// /// If this option is `true`, the `ZMachine` is able to display text in /// boldface. Otherwise, text cannot be displayed in boldface. pub fn v4_bold(&mut self, value: bool) -> &mut ZMachineConfig { self.bold_available = value; self } /// Sets the option for the availability of italics. (V4+) /// /// If this option is `true`, the `ZMachine` is able to display text in /// italics. Otherwise, text cannot be displayed in italics. pub fn v4_italic(&mut self, value: bool) -> &mut ZMachineConfig { self.italic_available = value; self } /// Sets the option for the availability of the fixed-space font style. /// (V4+) /// /// If this option is `true`, the `ZMachine` is able to display text using /// the fixed-space font. Otherwise, the fixed-space style cannot be used. pub fn v4_fixed_font_style(&mut self, value: bool) -> &mut ZMachineConfig { self.fixed_style_available = value; self } /// Sets the option for the availability of sound. (V6 only) /// /// If this option is `true`, the `ZMachine` is able to play sound effects. /// Otherwise, sound effects cannot be played. pub fn v6_sound(&mut self, value: bool) -> &mut ZMachineConfig { self.sound_available = value; self } /// Sets the option for the availability of timed keyboard input. (V4+) /// /// If this option is `true`, the `ZMachine` supports timed keyboard input. /// Otherwise, timed keyboard input is not available. pub fn v4_timed_kb_input(&mut self, value: bool) -> &mut ZMachineConfig { self.timed_kb_available = value; self } /// Sets the option for the availability of UNDO opcodes. (V5+) /// /// If this option is `true`, the game is allowed to use the UNDO opcodes /// for undo support. Otherwise, it is illegal to use the UNDO opcodes. pub fn v5_undo(&mut self, value: bool) -> &mut ZMachineConfig { self.undo_available = value; self } /// Sets the option for the availability of mouse features. (V5+) /// /// If this option is `true`, the `ZMachine` allows access to the mouse /// (assuming a mouse-like device is accessible). Otherwise, the mouse is /// not available. pub fn v5_mouse(&mut self, value: bool) -> &mut ZMachineConfig { self.mouse_available = value; self } /// Sets the option for the availability of menus. (V6 only) /// /// If this option is `true`, the `ZMachine` provides support for menus. /// Otherwise, menus are not available. pub fn v6_menus(&mut self, value: bool) -> &mut ZMachineConfig { self.menus_available = value; self } /// Sets the option for the default background color. (V5+) /// /// If the `ZMachine` is configured for color support, the default /// background color is configured according to the given index. Otherwise, /// the default background color is black. pub fn v5_default_bg_color(&mut self, value: ZByte) -> &mut ZMachineConfig { self.default_bg_color = value; self } /// Sets the option for the default foreground color. (V5+) /// /// If the `ZMachine` is configured for color support, the default /// foreground color is configured according to the given index. Otherwise, /// the default foreground color is white. pub fn v5_default_fg_color(&mut self, value: ZByte) -> &mut ZMachineConfig { self.default_fg_color = value; self } /// Sets the option for switching the fallback colors. /// /// If this option is `true`, and the `ZMachine` is *not* configured for /// color support, the fallback colors are switched: the default background /// and foreground colors will be white and black, respectively, instead of /// black and white. pub fn v1_switch_fallback_colors(&mut self, value: bool) -> &mut ZMachineConfig { self.switch_fallback_colors = value; self } } impl Clone for ZMachineConfig { fn clone(&self) -> ZMachineConfig { ZMachineConfig { status_line_available: self.status_line_available, screen_split_available: self.screen_split_available, default_font_is_var_pitch: self.default_font_is_var_pitch, colors_available: self.colors_available, pictures_available: self.pictures_available, bold_available: self.bold_available, italic_available: self.italic_available, fixed_style_available: self.fixed_style_available, sound_available: self.sound_available, timed_kb_available: self.timed_kb_available, undo_available: self.undo_available, mouse_available: self.mouse_available, menus_available: self.menus_available, default_bg_color: self.default_bg_color, default_fg_color: self.default_fg_color, switch_fallback_colors: self.switch_fallback_colors, } } }
use proconio::input; use proconio::marker::Usize1; use std::collections::VecDeque; fn main() { input! { n: usize, m: usize, a: [u64; n], edges: [(Usize1, Usize1); m], }; let mut g = vec![vec![]; n]; for (u, v) in edges { g[u].push(v); g[v].push(u); } let mut cost = vec![0; n]; for i in 0..n { for &j in &g[i] { cost[i] += a[j]; } } if cost.iter().all(|&c| c == 0) { println!("0"); return; } let mut ng = 0; let mut ok = a.iter().sum::<u64>() + 1; while ok - ng > 1 { let mid = (ok + ng) / 2; let mut cost = cost.clone(); let mut seen = vec![false; n]; let mut heap = VecDeque::new(); for i in 0..n { if cost[i] <= mid { seen[i] = true; heap.push_back(i); cost[i] = 0; // ?? } } let mut remove_count = 0; while let Some(i) = heap.pop_front() { remove_count += 1; for &j in &g[i] { if seen[j] { continue; } assert!(cost[j] >= a[i]); cost[j] -= a[i]; if cost[j] <= mid { seen[j] = true; heap.push_back(j); cost[j] = 0; } } } if remove_count == n { ok = mid; } else { ng = mid; } } println!("{}", ok); }
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-cloudabi no processes // ignore-emscripten no processes use std::env; use std::io::prelude::*; use std::io; use std::process::{Command, Stdio}; fn main() { let args: Vec<String> = env::args().collect(); if args.len() > 1 && args[1] == "child" { return child() } test(); } fn child() { writeln!(&mut io::stdout(), "foo").unwrap(); writeln!(&mut io::stderr(), "bar").unwrap(); let mut stdin = io::stdin(); let mut s = String::new(); stdin.lock().read_line(&mut s).unwrap(); assert_eq!(s.len(), 0); } fn test() { let args: Vec<String> = env::args().collect(); let mut p = Command::new(&args[0]).arg("child") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn().unwrap(); assert!(p.wait().unwrap().success()); }
extern crate glfw; use glfw::{Key, Action}; use cgmath::{Vector3, vec3}; pub struct Camera { position: Vector3<f32>, pitch: f32, yaw: f32, roll: f32, } impl Camera { pub fn new() -> Camera { Camera { position: vec3(0.0, 0.0, 0.0), pitch: 0.0, yaw: 0.0, roll: 0.0, } } pub fn move_camera(&mut self, window: &glfw::Window) { if window.get_key(Key::W) == Action::Press { self.position.z -= 0.02; } if window.get_key(Key::D) == Action::Press { self.position.x += 0.02; } if window.get_key(Key::A) == Action::Press { self.position.x -= 0.02; } } pub fn get_position(&self) -> &Vector3<f32> { &self.position } pub fn get_pitch(&self) -> &f32 { &self.pitch } pub fn get_yaw(&self) -> &f32 { &self.yaw } pub fn get_roll(&self) -> &f32 { &self.roll } }
use rand; use rand::RngCore; use std::ffi::OsString; use std::path::{Path, PathBuf}; use std::{io, iter}; use error::IoResultExt; fn tmpname(prefix: &str, suffix: &str, rand_len: usize) -> OsString { let mut buf = String::with_capacity(prefix.len() + suffix.len() + rand_len); buf.push_str(prefix); buf.extend(iter::repeat('X').take(rand_len)); buf.push_str(suffix); // Randomize. unsafe { // We guarantee utf8. let bytes = &mut buf.as_mut_vec()[prefix.len()..prefix.len() + rand_len]; rand::thread_rng().fill_bytes(bytes); for byte in bytes.iter_mut() { *byte = match *byte % 62 { v @ 0...9 => (v + b'0'), v @ 10...35 => (v - 10 + b'a'), v @ 36...61 => (v - 36 + b'A'), _ => unreachable!(), } } } OsString::from(buf) } pub fn create_helper<F, R>( base: &Path, prefix: &str, suffix: &str, random_len: usize, f: F, ) -> io::Result<R> where F: Fn(PathBuf) -> io::Result<R>, { let num_retries = if random_len != 0 { ::NUM_RETRIES } else { 1 }; for _ in 0..num_retries { let path = base.join(tmpname(prefix, suffix, random_len)); return match f(path) { Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue, res => res, }; } Err(io::Error::new( io::ErrorKind::AlreadyExists, "too many temporary files exist")) .with_err_path(|| base) }
use std::sync::mpsc::{Receiver, Sender}; use serial::core::BaudRate::Baud115200; use serial::unix::TTYPort; mod port_handle; use crate::probe::port_handle::Handle; #[cfg(test)] mod tests; #[allow(unused)] pub enum Pin { D0 = 0, D1 = 1, D2 = 2, D3 = 3, D4 = 4, D5 = 5, D6 = 6, D7 = 7, D8 = 8, D9 = 9, D10 = 10, D11 = 11, D12 = 12, D13 = 13, A0 = 14, A1 = 15, A2 = 16, A3 = 17, A4 = 18, A5 = 19, } #[allow(unused)] #[derive(Clone,Copy)] pub enum PinMode { Off = 0, Digital = 1, Analog = 2, } impl PinMode { fn as_char(&self) -> char { match self { PinMode::Off => 'o', PinMode::Digital => 'd', PinMode::Analog => 'a', } } } pub struct PinConfig { pins: [PinMode; 20], } #[allow(unused)] impl PinConfig { pub fn new() -> PinConfig { PinConfig { pins: [PinMode::Off; 20] } } pub fn pin_mode(&mut self, pin: Pin, pin_mode: PinMode) { self.pins[pin as usize] = pin_mode; } pub fn get_pin_mode(&self, pin: Pin) -> PinMode { self.pins[pin as usize] } fn as_string(&self) -> String { let mut response = String::with_capacity(90); let mut first = true; for (pin, mode) in self.pins.iter().enumerate() { if !first { response.push(','); } first = false; response.push_str(format!("{}={}", pin, mode.as_char()).as_str()); } response } } pub enum ProbeMessage { Reset, ConfigurePins(PinConfig), DumpPins, StartCapture, StopCapture, } pub struct Probe { command_tx: Sender<ProbeMessage>, data_rx: Receiver<String>, } impl Probe { pub fn new(path: &'static str) -> Probe { let (command_tx, data_rx) = Handle::<TTYPort>::spawn(path, Baud115200); Probe { command_tx, data_rx } } pub fn reset(&self) { self.command_tx.send(ProbeMessage::Reset).expect("Link broken"); self.data_rx.recv().expect("Broken link"); } pub fn configure_pins(&self, pins: PinConfig) { self.command_tx.send(ProbeMessage::ConfigurePins(pins)).expect("Link broken"); self.data_rx.recv().expect("Broken link"); } pub fn dump_pins(&self) -> String { self.command_tx.send(ProbeMessage::DumpPins).expect("Link broken"); match self.data_rx.recv() { Ok(res) => res, _ => String::new() } } pub fn start_capture(&self) { self.command_tx.send(ProbeMessage::StartCapture).expect("Link broken"); } pub fn stop_capture(&self) -> Vec<Vec<u16>> { self.command_tx.send(ProbeMessage::StopCapture).expect("Link broken"); let capture = self.data_rx.recv().expect("Link broken"); let lines: Vec<&str> = capture.split("\n").collect::<Vec<&str>>(); lines.iter().map(|line| { if line.len()> 0 { line.split(",").map(|column| { u16::from_str_radix(column, 10).unwrap() }).collect::<Vec<u16>>() } else { vec![65535u16] } }).filter(|row| row.len() > 1 || row[0] != 65535 ).collect() } }
use eosio::AccountName; use structopt::StructOpt; /// Create various items, on and off the blockchain #[derive(StructOpt, Debug)] pub enum Create { /// Create a new keypair and print the public and private keys Key(CreateKey), /// Create a new account on the blockchain (assumes system contract does not /// restrict RAM usage) Account(CreateAccount), } /// Create a new keypair and print the public and private keys #[derive(StructOpt, Debug)] pub struct CreateKey { /// Generate a key using the R1 curve (iPhone), instead of the K1 curve (Bitcoin) #[structopt(long)] pub r1: bool, /// Name of file to write private/public key output to. (Must be set, unless /// "--to-console" is passed) #[structopt(short, long, required_unless = "to-console")] pub file: Option<String>, /// Print private/public keys to console. #[structopt(long)] pub to_console: bool, } /// Create a new account on the blockchain (assumes system contract does not /// restrict RAM usage) #[derive(StructOpt, Debug)] pub struct CreateAccount { /// The name of the account creating the new account pub creator: AccountName, /// The name of the new account pub name: AccountName, /// The owner public key for the new account pub owner_key: String, /// The active public key for the new account pub active_key: Option<String>, #[structopt(flatten)] pub transaction_opts: super::TransactionOpts, }
#![warn(clippy::all)] // #![deny(missing_docs)] <-- Useful if you wanna make sure to document everything // Note, external crates are found by Cargo, not by "extern crate" // And the graphics the OpenGL version to use as backend use opengl_graphics::OpenGL; // Change this to OpenGL::V2_1 if not working. const OPENGL_VERSION: glutin_window::OpenGL = OpenGL::V3_2; // Import the objects we need mod application; mod renderer; mod user_interface; mod traits; mod audio; use application::App; fn main() { // Create a new game and run it. let mut app = App::boot(OPENGL_VERSION); // Let the infinity begin! app.main_loop(); }
fn main() { let mut v1 = vec![1,2,3]; let x = v1.pop() + 10; println!("{}", x); }
use std::str::FromStr; use std::io::{stdout, stderr}; extern crate argparse; use argparse::{ArgumentParser, StoreTrue, Store, List}; #[allow(non_camel_case_types)] #[derive(Debug)] enum Command { play, record, } impl FromStr for Command { type Err = (); fn from_str(src: &str) -> Result<Command, ()> { return match src { "play" => Ok(Command::play), "record" => Ok(Command::record), _ => Err(()), }; } } fn play_command(verbose: bool, args: Vec<String>) { let mut output = "".to_string(); { let mut ap = ArgumentParser::new(); ap.set_description("Plays a sound"); ap.refer(&mut output) .add_option(&["--output"], Store, r#"Output sink to play to"#); match ap.parse(args, &mut stdout(), &mut stderr()) { Ok(()) => {} Err(x) => { std::process::exit(x); } } } println!("Verbosity: {}, Output: {}", verbose, output); } fn record_command(verbose: bool, args: Vec<String>) { let mut input = "".to_string(); { let mut ap = ArgumentParser::new(); ap.set_description("Records a sound"); ap.refer(&mut input) .add_option(&["--input"], Store, r#"Output source to record from"#); match ap.parse(args, &mut stdout(), &mut stderr()) { Ok(()) => {} Err(x) => { std::process::exit(x); } } } println!("Verbosity: {}, Input: {}", verbose, input); } fn main() { let mut verbose = false; let mut subcommand = Command::play; let mut args = vec!(); { let mut ap = ArgumentParser::new(); ap.set_description("Plays or records sound"); ap.refer(&mut verbose) .add_option(&["-v", "--verbose"], StoreTrue, "Be verbose"); ap.refer(&mut subcommand).required() .add_argument("command", Store, r#"Command to run (either "play" or "record")"#); ap.refer(&mut args) .add_argument("arguments", List, r#"Arguments for command"#); ap.stop_on_first_argument(true); ap.parse_args_or_exit(); } args.insert(0, format!("subcommand {:?}", subcommand)); match subcommand { Command::play => play_command(verbose, args), Command::record => record_command(verbose, args), } }
use crate::descriptor::{Database, EnumDescriptor, FileDescriptor, MessageDescriptor}; use crate::pecan_descriptor::google::protobuf::compiler::plugin_pb; use id_arena::Id; use pecan::Message; use pecan_utils::naming; use std::collections::HashMap; use std::fmt::{self, Write}; pub struct Printer<'a> { output: &'a mut Vec<u8>, indent: Vec<u8>, at_line_start: bool, } impl Printer<'_> { fn new(output: &mut Vec<u8>) -> Printer { Printer { output, indent: Vec::new(), at_line_start: true, } } pub fn write_raw(&mut self, bytes: &[u8]) { if bytes.is_empty() { return; } if bytes[0] != b'\n' && self.at_line_start { self.output.extend_from_slice(&self.indent); self.at_line_start = false; } self.output.extend_from_slice(bytes); } pub fn indent(&mut self) { self.indent.extend_from_slice(b" "); } pub fn outdent(&mut self) { let new_len = self.indent.len() - 4; self.indent.truncate(new_len) } } impl Write for Printer<'_> { fn write_str(&mut self, s: &str) -> fmt::Result { if self.indent.is_empty() { self.output.extend_from_slice(s.as_bytes()); return Ok(()); } let mut start = 0; for (i, b) in s.bytes().enumerate() { if b == b'\n' { self.write_raw(&s.as_bytes()[start..=i]); self.at_line_start = true; start = i + 1; } } self.write_raw(&s.as_bytes()[start..]); Ok(()) } } pub struct Context { pub db: Database, pub files: HashMap<String, Id<FileDescriptor>>, pub message_address: HashMap<String, Id<MessageDescriptor>>, pub enum_address: HashMap<String, Id<EnumDescriptor>>, pub to_generate: Vec<Id<FileDescriptor>>, } impl Context { pub fn new(mut request: plugin_pb::CodeGeneratorRequest) -> Context { let mut db = Database::with_capacity(request.proto_file().len()); let mut files = HashMap::with_capacity(request.proto_file().len()); let mut message_address = HashMap::with_capacity(request.proto_file().len()); let mut enum_address = HashMap::with_capacity(request.proto_file().len()); for file in request.proto_file_mut().drain(..) { let name = file.name().to_owned(); let fd = FileDescriptor::new(&mut db, file); files.insert(name, fd); let f = db.files.get(fd).unwrap(); let pkg_name = f.proto.package(); for msg_id in &f.messages { let msg = db.messages.get(*msg_id).unwrap(); let full_name = naming::full_name(pkg_name, &msg.type_name); eprintln!("register {}", full_name); message_address.insert(full_name, *msg_id); } for enum_id in &f.enums { let e = db.enums.get(*enum_id).unwrap(); let full_name = naming::full_name(pkg_name, &e.type_name); eprintln!("register {}", full_name); enum_address.insert(full_name, *enum_id); } } let mut to_generate = Vec::with_capacity(request.file_to_generate().len()); for f in request.file_to_generate() { eprintln!("mark {} to generate", f); to_generate.push(files.get(f).unwrap().clone()); } Context { db, files, to_generate, message_address, enum_address, } } } #[derive(Default)] pub struct Output { response: plugin_pb::CodeGeneratorResponse, } impl Output { pub fn open(&mut self, file_name: &str) -> Printer { let mut f = plugin_pb::CodeGeneratorResponseNestedFile::default(); f.set_name(file_name.to_owned()); self.response.file_mut().push(f); Printer::new(unsafe { self.response .file_mut() .last_mut() .unwrap() .content_mut() .as_mut_vec() }) } pub fn to_bytes(&self) -> Vec<u8> { let buffer = vec![]; let mut s = pecan::CodedOutputStream::new(buffer); self.response.write_to(&mut s).unwrap(); s.into_inner().unwrap() } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - Timerx Control Register"] pub timbcr: TIMBCR, #[doc = "0x04 - Timerx Interrupt Status Register"] pub timbisr: TIMBISR, #[doc = "0x08 - Timerx Interrupt Clear Register"] pub timbicr: TIMBICR, #[doc = "0x0c - TIMxDIER5"] pub timbdier5: TIMBDIER5, #[doc = "0x10 - Timerx Counter Register"] pub cntr: CNTR, #[doc = "0x14 - Timerx Period Register"] pub perbr: PERBR, #[doc = "0x18 - Timerx Repetition Register"] pub repbr: REPBR, #[doc = "0x1c - Timerx Compare 1 Register"] pub cmp1br: CMP1BR, #[doc = "0x20 - Timerx Compare 1 Compound Register"] pub cmp1cbr: CMP1CBR, #[doc = "0x24 - Timerx Compare 2 Register"] pub cmp2br: CMP2BR, #[doc = "0x28 - Timerx Compare 3 Register"] pub cmp3br: CMP3BR, #[doc = "0x2c - Timerx Compare 4 Register"] pub cmp4br: CMP4BR, #[doc = "0x30 - Timerx Capture 1 Register"] pub cpt1br: CPT1BR, #[doc = "0x34 - Timerx Capture 2 Register"] pub cpt2br: CPT2BR, #[doc = "0x38 - Timerx Deadtime Register"] pub dtbr: DTBR, #[doc = "0x3c - Timerx Output1 Set Register"] pub setb1r: SETB1R, #[doc = "0x40 - Timerx Output1 Reset Register"] pub rstb1r: RSTB1R, #[doc = "0x44 - Timerx Output2 Set Register"] pub setb2r: SETB2R, #[doc = "0x48 - Timerx Output2 Reset Register"] pub rstb2r: RSTB2R, #[doc = "0x4c - Timerx External Event Filtering Register 1"] pub eefbr1: EEFBR1, #[doc = "0x50 - Timerx External Event Filtering Register 2"] pub eefbr2: EEFBR2, #[doc = "0x54 - TimerA Reset Register"] pub rstbr: RSTBR, #[doc = "0x58 - Timerx Chopper Register"] pub chpbr: CHPBR, #[doc = "0x5c - Timerx Capture 2 Control Register"] pub cpt1bcr: CPT1BCR, #[doc = "0x60 - CPT2xCR"] pub cpt2bcr: CPT2BCR, #[doc = "0x64 - Timerx Output Register"] pub outbr: OUTBR, #[doc = "0x68 - Timerx Fault Register"] pub fltbr: FLTBR, } #[doc = "Timerx Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [timbcr](timbcr) module"] pub type TIMBCR = crate::Reg<u32, _TIMBCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _TIMBCR; #[doc = "`read()` method returns [timbcr::R](timbcr::R) reader structure"] impl crate::Readable for TIMBCR {} #[doc = "`write(|w| ..)` method takes [timbcr::W](timbcr::W) writer structure"] impl crate::Writable for TIMBCR {} #[doc = "Timerx Control Register"] pub mod timbcr; #[doc = "Timerx Interrupt Status Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [timbisr](timbisr) module"] pub type TIMBISR = crate::Reg<u32, _TIMBISR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _TIMBISR; #[doc = "`read()` method returns [timbisr::R](timbisr::R) reader structure"] impl crate::Readable for TIMBISR {} #[doc = "Timerx Interrupt Status Register"] pub mod timbisr; #[doc = "Timerx Interrupt Clear Register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [timbicr](timbicr) module"] pub type TIMBICR = crate::Reg<u32, _TIMBICR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _TIMBICR; #[doc = "`write(|w| ..)` method takes [timbicr::W](timbicr::W) writer structure"] impl crate::Writable for TIMBICR {} #[doc = "Timerx Interrupt Clear Register"] pub mod timbicr; #[doc = "TIMxDIER5\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [timbdier5](timbdier5) module"] pub type TIMBDIER5 = crate::Reg<u32, _TIMBDIER5>; #[allow(missing_docs)] #[doc(hidden)] pub struct _TIMBDIER5; #[doc = "`read()` method returns [timbdier5::R](timbdier5::R) reader structure"] impl crate::Readable for TIMBDIER5 {} #[doc = "`write(|w| ..)` method takes [timbdier5::W](timbdier5::W) writer structure"] impl crate::Writable for TIMBDIER5 {} #[doc = "TIMxDIER5"] pub mod timbdier5; #[doc = "Timerx Counter Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cntr](cntr) module"] pub type CNTR = crate::Reg<u32, _CNTR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CNTR; #[doc = "`read()` method returns [cntr::R](cntr::R) reader structure"] impl crate::Readable for CNTR {} #[doc = "`write(|w| ..)` method takes [cntr::W](cntr::W) writer structure"] impl crate::Writable for CNTR {} #[doc = "Timerx Counter Register"] pub mod cntr; #[doc = "Timerx Period Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [perbr](perbr) module"] pub type PERBR = crate::Reg<u32, _PERBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _PERBR; #[doc = "`read()` method returns [perbr::R](perbr::R) reader structure"] impl crate::Readable for PERBR {} #[doc = "`write(|w| ..)` method takes [perbr::W](perbr::W) writer structure"] impl crate::Writable for PERBR {} #[doc = "Timerx Period Register"] pub mod perbr; #[doc = "Timerx Repetition Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [repbr](repbr) module"] pub type REPBR = crate::Reg<u32, _REPBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _REPBR; #[doc = "`read()` method returns [repbr::R](repbr::R) reader structure"] impl crate::Readable for REPBR {} #[doc = "`write(|w| ..)` method takes [repbr::W](repbr::W) writer structure"] impl crate::Writable for REPBR {} #[doc = "Timerx Repetition Register"] pub mod repbr; #[doc = "Timerx Compare 1 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cmp1br](cmp1br) module"] pub type CMP1BR = crate::Reg<u32, _CMP1BR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CMP1BR; #[doc = "`read()` method returns [cmp1br::R](cmp1br::R) reader structure"] impl crate::Readable for CMP1BR {} #[doc = "`write(|w| ..)` method takes [cmp1br::W](cmp1br::W) writer structure"] impl crate::Writable for CMP1BR {} #[doc = "Timerx Compare 1 Register"] pub mod cmp1br; #[doc = "Timerx Compare 1 Compound Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cmp1cbr](cmp1cbr) module"] pub type CMP1CBR = crate::Reg<u32, _CMP1CBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CMP1CBR; #[doc = "`read()` method returns [cmp1cbr::R](cmp1cbr::R) reader structure"] impl crate::Readable for CMP1CBR {} #[doc = "`write(|w| ..)` method takes [cmp1cbr::W](cmp1cbr::W) writer structure"] impl crate::Writable for CMP1CBR {} #[doc = "Timerx Compare 1 Compound Register"] pub mod cmp1cbr; #[doc = "Timerx Compare 2 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cmp2br](cmp2br) module"] pub type CMP2BR = crate::Reg<u32, _CMP2BR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CMP2BR; #[doc = "`read()` method returns [cmp2br::R](cmp2br::R) reader structure"] impl crate::Readable for CMP2BR {} #[doc = "`write(|w| ..)` method takes [cmp2br::W](cmp2br::W) writer structure"] impl crate::Writable for CMP2BR {} #[doc = "Timerx Compare 2 Register"] pub mod cmp2br; #[doc = "Timerx Compare 3 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cmp3br](cmp3br) module"] pub type CMP3BR = crate::Reg<u32, _CMP3BR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CMP3BR; #[doc = "`read()` method returns [cmp3br::R](cmp3br::R) reader structure"] impl crate::Readable for CMP3BR {} #[doc = "`write(|w| ..)` method takes [cmp3br::W](cmp3br::W) writer structure"] impl crate::Writable for CMP3BR {} #[doc = "Timerx Compare 3 Register"] pub mod cmp3br; #[doc = "Timerx Compare 4 Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cmp4br](cmp4br) module"] pub type CMP4BR = crate::Reg<u32, _CMP4BR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CMP4BR; #[doc = "`read()` method returns [cmp4br::R](cmp4br::R) reader structure"] impl crate::Readable for CMP4BR {} #[doc = "`write(|w| ..)` method takes [cmp4br::W](cmp4br::W) writer structure"] impl crate::Writable for CMP4BR {} #[doc = "Timerx Compare 4 Register"] pub mod cmp4br; #[doc = "Timerx Capture 1 Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cpt1br](cpt1br) module"] pub type CPT1BR = crate::Reg<u32, _CPT1BR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CPT1BR; #[doc = "`read()` method returns [cpt1br::R](cpt1br::R) reader structure"] impl crate::Readable for CPT1BR {} #[doc = "Timerx Capture 1 Register"] pub mod cpt1br; #[doc = "Timerx Capture 2 Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cpt2br](cpt2br) module"] pub type CPT2BR = crate::Reg<u32, _CPT2BR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CPT2BR; #[doc = "`read()` method returns [cpt2br::R](cpt2br::R) reader structure"] impl crate::Readable for CPT2BR {} #[doc = "Timerx Capture 2 Register"] pub mod cpt2br; #[doc = "Timerx Deadtime Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dtbr](dtbr) module"] pub type DTBR = crate::Reg<u32, _DTBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DTBR; #[doc = "`read()` method returns [dtbr::R](dtbr::R) reader structure"] impl crate::Readable for DTBR {} #[doc = "`write(|w| ..)` method takes [dtbr::W](dtbr::W) writer structure"] impl crate::Writable for DTBR {} #[doc = "Timerx Deadtime Register"] pub mod dtbr; #[doc = "Timerx Output1 Set Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [setb1r](setb1r) module"] pub type SETB1R = crate::Reg<u32, _SETB1R>; #[allow(missing_docs)] #[doc(hidden)] pub struct _SETB1R; #[doc = "`read()` method returns [setb1r::R](setb1r::R) reader structure"] impl crate::Readable for SETB1R {} #[doc = "`write(|w| ..)` method takes [setb1r::W](setb1r::W) writer structure"] impl crate::Writable for SETB1R {} #[doc = "Timerx Output1 Set Register"] pub mod setb1r; #[doc = "Timerx Output1 Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rstb1r](rstb1r) module"] pub type RSTB1R = crate::Reg<u32, _RSTB1R>; #[allow(missing_docs)] #[doc(hidden)] pub struct _RSTB1R; #[doc = "`read()` method returns [rstb1r::R](rstb1r::R) reader structure"] impl crate::Readable for RSTB1R {} #[doc = "`write(|w| ..)` method takes [rstb1r::W](rstb1r::W) writer structure"] impl crate::Writable for RSTB1R {} #[doc = "Timerx Output1 Reset Register"] pub mod rstb1r; #[doc = "Timerx Output2 Set Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [setb2r](setb2r) module"] pub type SETB2R = crate::Reg<u32, _SETB2R>; #[allow(missing_docs)] #[doc(hidden)] pub struct _SETB2R; #[doc = "`read()` method returns [setb2r::R](setb2r::R) reader structure"] impl crate::Readable for SETB2R {} #[doc = "`write(|w| ..)` method takes [setb2r::W](setb2r::W) writer structure"] impl crate::Writable for SETB2R {} #[doc = "Timerx Output2 Set Register"] pub mod setb2r; #[doc = "Timerx Output2 Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rstb2r](rstb2r) module"] pub type RSTB2R = crate::Reg<u32, _RSTB2R>; #[allow(missing_docs)] #[doc(hidden)] pub struct _RSTB2R; #[doc = "`read()` method returns [rstb2r::R](rstb2r::R) reader structure"] impl crate::Readable for RSTB2R {} #[doc = "`write(|w| ..)` method takes [rstb2r::W](rstb2r::W) writer structure"] impl crate::Writable for RSTB2R {} #[doc = "Timerx Output2 Reset Register"] pub mod rstb2r; #[doc = "Timerx External Event Filtering Register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [eefbr1](eefbr1) module"] pub type EEFBR1 = crate::Reg<u32, _EEFBR1>; #[allow(missing_docs)] #[doc(hidden)] pub struct _EEFBR1; #[doc = "`read()` method returns [eefbr1::R](eefbr1::R) reader structure"] impl crate::Readable for EEFBR1 {} #[doc = "`write(|w| ..)` method takes [eefbr1::W](eefbr1::W) writer structure"] impl crate::Writable for EEFBR1 {} #[doc = "Timerx External Event Filtering Register 1"] pub mod eefbr1; #[doc = "Timerx External Event Filtering Register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [eefbr2](eefbr2) module"] pub type EEFBR2 = crate::Reg<u32, _EEFBR2>; #[allow(missing_docs)] #[doc(hidden)] pub struct _EEFBR2; #[doc = "`read()` method returns [eefbr2::R](eefbr2::R) reader structure"] impl crate::Readable for EEFBR2 {} #[doc = "`write(|w| ..)` method takes [eefbr2::W](eefbr2::W) writer structure"] impl crate::Writable for EEFBR2 {} #[doc = "Timerx External Event Filtering Register 2"] pub mod eefbr2; #[doc = "TimerA Reset Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rstbr](rstbr) module"] pub type RSTBR = crate::Reg<u32, _RSTBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _RSTBR; #[doc = "`read()` method returns [rstbr::R](rstbr::R) reader structure"] impl crate::Readable for RSTBR {} #[doc = "`write(|w| ..)` method takes [rstbr::W](rstbr::W) writer structure"] impl crate::Writable for RSTBR {} #[doc = "TimerA Reset Register"] pub mod rstbr; #[doc = "Timerx Chopper Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [chpbr](chpbr) module"] pub type CHPBR = crate::Reg<u32, _CHPBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CHPBR; #[doc = "`read()` method returns [chpbr::R](chpbr::R) reader structure"] impl crate::Readable for CHPBR {} #[doc = "`write(|w| ..)` method takes [chpbr::W](chpbr::W) writer structure"] impl crate::Writable for CHPBR {} #[doc = "Timerx Chopper Register"] pub mod chpbr; #[doc = "Timerx Capture 2 Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cpt1bcr](cpt1bcr) module"] pub type CPT1BCR = crate::Reg<u32, _CPT1BCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CPT1BCR; #[doc = "`read()` method returns [cpt1bcr::R](cpt1bcr::R) reader structure"] impl crate::Readable for CPT1BCR {} #[doc = "`write(|w| ..)` method takes [cpt1bcr::W](cpt1bcr::W) writer structure"] impl crate::Writable for CPT1BCR {} #[doc = "Timerx Capture 2 Control Register"] pub mod cpt1bcr; #[doc = "CPT2xCR\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cpt2bcr](cpt2bcr) module"] pub type CPT2BCR = crate::Reg<u32, _CPT2BCR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CPT2BCR; #[doc = "`read()` method returns [cpt2bcr::R](cpt2bcr::R) reader structure"] impl crate::Readable for CPT2BCR {} #[doc = "`write(|w| ..)` method takes [cpt2bcr::W](cpt2bcr::W) writer structure"] impl crate::Writable for CPT2BCR {} #[doc = "CPT2xCR"] pub mod cpt2bcr; #[doc = "Timerx Output Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [outbr](outbr) module"] pub type OUTBR = crate::Reg<u32, _OUTBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _OUTBR; #[doc = "`read()` method returns [outbr::R](outbr::R) reader structure"] impl crate::Readable for OUTBR {} #[doc = "`write(|w| ..)` method takes [outbr::W](outbr::W) writer structure"] impl crate::Writable for OUTBR {} #[doc = "Timerx Output Register"] pub mod outbr; #[doc = "Timerx Fault Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [fltbr](fltbr) module"] pub type FLTBR = crate::Reg<u32, _FLTBR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FLTBR; #[doc = "`read()` method returns [fltbr::R](fltbr::R) reader structure"] impl crate::Readable for FLTBR {} #[doc = "`write(|w| ..)` method takes [fltbr::W](fltbr::W) writer structure"] impl crate::Writable for FLTBR {} #[doc = "Timerx Fault Register"] pub mod fltbr;
use crate::alsa::{ControlElementInterface, Result}; use crate::libc as c; use alsa_sys as alsa; use std::ffi::CStr; use std::mem; use std::ptr; /// A control associated with a device. /// /// See [Control::open]. pub struct Control { tag: ste::Tag, pub(super) handle: ptr::NonNull<alsa::snd_ctl_t>, } impl Control { /// Opens a CTL. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let pcm = alsa::Control::open(&name)?; /// # Ok(()) } /// ``` pub fn open(name: &CStr) -> Result<Self> { unsafe { let mut handle = mem::MaybeUninit::uninit(); errno!(alsa::snd_ctl_open(handle.as_mut_ptr(), name.as_ptr(), 0,))?; Ok(Self { tag: ste::Tag::current_thread(), handle: ptr::NonNull::new_unchecked(handle.assume_init()), }) } } /// get identifier of CTL handle. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let pcm = alsa::Control::open(&name)?; /// println!("control: {}", pcm.name().to_str()?); /// # Ok(()) } /// ``` pub fn name(&self) -> &CStr { self.tag.ensure_on_thread(); unsafe { CStr::from_ptr(alsa::snd_ctl_name(self.handle.as_ptr())) } } /// Get a list of element identifiers. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// /// if let Some(element) = element_list.get(0) { /// println!("name: {}", element.name().to_str()?); /// println!("interface: {}", element.interface()); /// } /// # Ok(()) } /// ``` pub fn element_list(&self) -> Result<ControlElementList> { self.tag.ensure_on_thread(); unsafe { let mut handle = mem::MaybeUninit::uninit(); errno!(alsa::snd_ctl_elem_list_malloc(handle.as_mut_ptr()))?; let handle = ptr::NonNull::new_unchecked(handle.assume_init()); let mut list = ControlElementList { handle, space: false, count: 0, used: 0, }; errno!(alsa::snd_ctl_elem_list( self.handle.as_ptr(), list.handle.as_mut() ))?; let count = alsa::snd_ctl_elem_list_get_count(list.handle.as_mut()); list.count = count; errno!(alsa::snd_ctl_elem_list_alloc_space( list.handle.as_mut(), count ))?; list.space = true; errno!(alsa::snd_ctl_elem_list( self.handle.as_ptr(), list.handle.as_mut() ))?; list.used = alsa::snd_ctl_elem_list_get_used(list.handle.as_ref()); Ok(list) } } } // Safety: [Control] is tagged with the thread its created it and is ensured not to // leave it. unsafe impl Send for Control {} impl Drop for Control { fn drop(&mut self) { unsafe { alsa::snd_ctl_close(self.handle.as_ptr()) }; } } /// Reference to a control element from a [ControlElementList]. /// /// Fetched with [ControlElementList::get]. pub struct ControlElement<'a> { handle: &'a ptr::NonNull<alsa::snd_ctl_elem_list_t>, index: c::c_uint, } impl ControlElement<'_> { /// Get interface part of CTL element identifier for an entry of a CTL /// element identifiers list. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// /// if let Some(element) = element_list.get(0) { /// println!("interface: {}", element.interface()); /// } /// # Ok(()) } /// ``` pub fn interface(&self) -> ControlElementInterface { unsafe { let interface = alsa::snd_ctl_elem_list_get_interface(self.handle.as_ref(), self.index); ControlElementInterface::from_value(interface).expect("bad control element interface") } } /// get identifier of CTL handle. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// /// if let Some(element) = element_list.get(0) { /// println!("name: {}", element.name().to_str()?); /// } /// # Ok(()) } /// ``` pub fn name(&self) -> &CStr { unsafe { CStr::from_ptr(alsa::snd_ctl_elem_list_get_name( self.handle.as_ref(), self.index, )) } } /// get identifier of CTL handle. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// /// if let Some(element) = element_list.get(0) { /// println!("index: {}", element.index()); /// } /// # Ok(()) } /// ``` pub fn index(&self) -> c::c_uint { unsafe { alsa::snd_ctl_elem_list_get_index(self.handle.as_ref(), self.index) } } } /// A list of control elements. pub struct ControlElementList { handle: ptr::NonNull<alsa::snd_ctl_elem_list_t>, space: bool, count: c::c_uint, used: c::c_uint, } impl ControlElementList { /// Get number of used entries in CTL element identifiers list. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// dbg!(element_list.used()); /// # Ok(()) } /// ``` pub fn used(&self) -> c::c_uint { unsafe { alsa::snd_ctl_elem_list_get_used(self.handle.as_ref()) } } /// Get total count of elements present in CTL device. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// dbg!(element_list.count()); /// # Ok(()) } /// ``` pub fn count(&self) -> c::c_uint { self.count } /// Get the control element at the given index. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// /// if let Some(element) = element_list.get(0) { /// println!("{}", element.name().to_str()?); /// } /// # Ok(()) } /// ``` pub fn get(&self, index: c::c_uint) -> Option<ControlElement<'_>> { if index >= self.used { return None; } Some(ControlElement { handle: &self.handle, index, }) } /// Construct an iterator over the list of control elements. /// /// # Examples /// /// ```rust,no_run /// use audio_device::alsa; /// use std::ffi::CString; /// /// # fn main() -> anyhow::Result<()> { /// let name = CString::new("hw:0")?; /// let control = alsa::Control::open(&name)?; /// let element_list = control.element_list()?; /// /// for element in element_list.iter() { /// println!("{}", element.name().to_str()?); /// } /// # Ok(()) } /// ``` pub fn iter(&self) -> ControlElementListIter<'_> { ControlElementListIter { handle: &self.handle, index: 0, used: self.used, } } } impl Drop for ControlElementList { fn drop(&mut self) { unsafe { if self.space { let _ = alsa::snd_ctl_elem_list_free_space(self.handle.as_ptr()); } let _ = alsa::snd_ctl_elem_list_free(self.handle.as_ptr()); }; } } /// An iterator over available control elements. /// /// See [ControlElementList::iter]. pub struct ControlElementListIter<'a> { handle: &'a ptr::NonNull<alsa::snd_ctl_elem_list_t>, index: c::c_uint, used: c::c_uint, } impl<'a> Iterator for ControlElementListIter<'a> { type Item = ControlElement<'a>; fn next(&mut self) -> Option<Self::Item> { if self.index >= self.used { return None; } let index = self.index; self.index += 1; Some(ControlElement { handle: self.handle, index, }) } }
pub mod register; pub mod cpu; pub mod ppu; pub mod apu; pub mod rom; pub mod memory; pub mod mapper; pub mod button; pub mod joypad; pub mod input; pub mod audio; pub mod display; pub mod default_input; pub mod default_audio; pub mod default_display; use cpu::Cpu; use rom::Rom; use button::Button; use input::Input; use display::Display; use audio::Audio; /// NES emulator. /// /// ```ignore /// use std::fs::File; /// use std::io::Read; /// use std::time::Duration; /// use nes_rust::Nes; /// use nes_rust::rom::Rom; /// use nes_rust::default_input::DefaultInput; /// use nes_rust::default_audio::DefaultAudio; /// use nes_rust::default_display::DefaultDisplay; /// /// let input = Box::new(DefaultInput::new()); /// let display = Box::new(DefaultDisplay::new()); /// let audio = Box::new(DefaultAudio::new()); /// let mut nes = Nes::new(input, display, audio); /// /// // Load and set Rom from rom image binary /// let filename = &args[1]; /// let mut file = File::open(filename)?; /// let mut contents = vec![]; /// file.read_to_end(&mut contents)?; /// let rom = Rom::new(contents); /// nes.set_rom(rom); /// /// // Go! /// nes.bootup(); /// let mut rgba_pixels = [0; 256 * 240 * 4]; /// loop { /// nes.step_frame(); /// nes.copy_pixels(rgba_pixels); /// // Render rgba_pixels /// // @TODO: Audio buffer sample code is T.B.D. /// // Adjust sleep time for your platform /// std::thread::sleep(Duration::from_millis(1)); /// } /// ``` pub struct Nes { cpu: Cpu } impl Nes { /// Creates a new `Nes`. /// You need to pass [`input::Input`](./input/trait.Input.html), /// [`display::Display`](./display/trait.Display.html), and /// [`audio::Audio`](./audio/trait.Audio.html) traits for your platform /// specific Input/Output. /// /// # Arguments /// * `input` For pad input /// * `display` For screen output /// * `audio` For audio output pub fn new(input: Box<dyn Input>, display: Box<dyn Display>, audio: Box<dyn Audio>) -> Self { Nes { cpu: Cpu::new( input, display, audio ) } } /// Sets up NES rom /// /// # Arguments /// * `rom` pub fn set_rom(&mut self, rom: Rom) { self.cpu.set_rom(rom); } /// Boots up pub fn bootup(&mut self) { self.cpu.bootup(); } /// Resets pub fn reset(&mut self) { self.cpu.reset(); } /// Executes a CPU cycle pub fn step(&mut self) { self.cpu.step(); } /// Executes a PPU (screen refresh) frame pub fn step_frame(&mut self) { self.cpu.step_frame(); } /// Copies RGB pixels of screen to passed pixels. /// The length and result should be specific to `display` passed via the constructor. /// /// # Arguments /// * `pixels` pub fn copy_pixels(&self, pixels: &mut [u8]) { self.cpu.get_ppu().get_display().copy_to_rgba_pixels(pixels); } /// Copies audio buffer to passed buffer. /// The length and result should be specific to `audio` passed via the constructor. /// /// # Arguments /// * `buffer` pub fn copy_sample_buffer(&mut self, buffer: &mut [f32]) { self.cpu.get_mut_apu().get_mut_audio().copy_sample_buffer(buffer); } /// Presses a pad button /// /// # Arguments /// * `button` pub fn press_button(&mut self, button: Button) { self.cpu.get_mut_input().press(button); } /// Releases a pad button /// /// # Arguments /// * `buffer` pub fn release_button(&mut self, button: Button) { self.cpu.get_mut_input().release(button); } /// Checks if NES console is powered on pub fn is_power_on(&self) -> bool { self.cpu.is_power_on() } }
use ipfs::{Ipfs, IpfsTypes}; use serde::Serialize; use std::borrow::Cow; use std::error::Error as StdError; use std::fmt; pub mod option_parsing; mod stream; pub use stream::StreamResponse; mod body; pub use body::{try_only_named_multipart, OnlyMultipartFailure}; mod timeout; pub use timeout::MaybeTimeoutExt; mod serdesupport; pub use serdesupport::StringSerialized; /// The common responses apparently returned by the go-ipfs HTTP api on errors. /// See also: https://github.com/ferristseng/rust-ipfs-api/blob/master/ipfs-api/src/response/error.rs #[derive(Debug, Serialize)] #[serde(rename_all = "PascalCase")] pub struct MessageResponse { message: Cow<'static, str>, code: usize, r#type: MessageKind, } impl MessageResponse { fn to_json_reply(&self) -> warp::reply::Json { warp::reply::json(self) } } /// The `MessageResponse` has this field, unsure if it can be anything other than "error". #[derive(Debug, Clone, Serialize)] #[serde(rename_all = "lowercase")] pub enum MessageKind { Error, } impl MessageKind { // FIXME: haven't found a spec for these codes yet pub fn with_code(self, code: usize) -> MessageResponseBuilder { MessageResponseBuilder(self, code) } } /// Combining `MessageKind` and `code` using `MessageKind::with_code` returns a /// `MessageResponseBuilder` which will only need a message to become `MessageResponse`. #[derive(Debug, Clone)] pub struct MessageResponseBuilder(MessageKind, usize); impl Default for MessageResponseBuilder { fn default() -> Self { MessageResponseBuilder(MessageKind::Error, 0) } } impl MessageResponseBuilder { pub fn with_message<S: Into<Cow<'static, str>>>(self, message: S) -> MessageResponse { let Self(kind, code) = self; MessageResponse { message: message.into(), code, r#type: kind, } } } /// Clones the handle to the filters pub fn with_ipfs<T: IpfsTypes>( ipfs: &Ipfs<T>, ) -> impl warp::Filter<Extract = (Ipfs<T>,), Error = std::convert::Infallible> + Clone { use warp::Filter; let ipfs = ipfs.clone(); warp::any().map(move || ipfs.clone()) } /// Special rejection from `pubsub/pub` #[derive(Debug)] pub(crate) struct NonUtf8Topic; impl warp::reject::Reject for NonUtf8Topic {} /// Used by `pubsub/pub` #[derive(Debug)] pub(crate) struct RequiredArgumentMissing(pub(crate) &'static str); impl warp::reject::Reject for RequiredArgumentMissing {} #[derive(Debug)] pub(crate) struct InvalidMultipartFormData; impl warp::reject::Reject for InvalidMultipartFormData {} /// Marker for `warp` specific rejections when something is unimplemented #[derive(Debug)] pub(crate) struct NotImplemented; impl warp::reject::Reject for NotImplemented {} /// PeerId parsing error, details from `libp2p::identity::ParseError` are lost. #[derive(Debug)] pub(crate) struct InvalidPeerId; impl warp::reject::Reject for InvalidPeerId {} /// Default placeholder for ipfs::Error but once we get more typed errors we could start making /// them more readable, if needed. // TODO: needs to be considered if this is even needed.. #[derive(Debug)] pub(crate) struct StringError(Cow<'static, str>); impl warp::reject::Reject for StringError {} impl<D: std::fmt::Display> From<D> for StringError { fn from(d: D) -> Self { Self(d.to_string().into()) } } impl StringError { // Allowing this as dead since it hopefully doesn't stay unused for long #[allow(dead_code)] pub fn new(cow: Cow<'static, str>) -> Self { StringError(cow) } } /// Common rejection handling strategy for ipfs http api compatible error responses pub async fn recover_as_message_response( err: warp::reject::Rejection, ) -> Result<impl warp::Reply, warp::Rejection> { use warp::http::StatusCode; use warp::reject::{InvalidQuery, LengthRequired, MethodNotAllowed}; let resp: Box<dyn warp::Reply>; let status; if err.find::<NotImplemented>().is_some() { resp = Box::new( MessageKind::Error .with_code(0) .with_message("Not implemented") .to_json_reply(), ); status = StatusCode::NOT_IMPLEMENTED; } else if let Some(e) = err.find::<RequiredArgumentMissing>() { resp = Box::new( MessageKind::Error .with_code(0) .with_message(format!("required argument {:?} missing", e.0)) .to_json_reply(), ); status = StatusCode::BAD_REQUEST; } else if err.find::<NonUtf8Topic>().is_some() { resp = Box::new( MessageKind::Error .with_code(0) .with_message("non utf8 topic") .to_json_reply(), ); status = StatusCode::BAD_REQUEST; } else if let Some(e) = err.find::<InvalidQuery>() { // invalidquery contains box<std::error::Error + Sync + Static> resp = Box::new( MessageKind::Error .with_code(0) .with_message(e.to_string()) .to_json_reply(), ); status = StatusCode::BAD_REQUEST; } else if let Some(StringError(msg)) = err.find::<StringError>() { resp = Box::new( MessageKind::Error .with_code(0) .with_message(msg.to_owned()) .to_json_reply(), ); status = StatusCode::INTERNAL_SERVER_ERROR; } else if err.find::<InvalidPeerId>().is_some() { resp = Box::new( MessageKind::Error .with_code(0) .with_message("invalid peer id") .to_json_reply(), ); status = StatusCode::BAD_REQUEST; } else if err.is_not_found() || err.find::<MethodNotAllowed>().is_some() { // strangely this here needs to match last, since the methodnotallowed can come after // InvalidQuery as well. // go-ipfs sends back a "404 Not Found" with body "404 page not found" resp = Box::new("404 page not found"); status = StatusCode::NOT_FOUND; } else if err.find::<LengthRequired>().is_some() { resp = Box::new( MessageKind::Error .with_code(0) .with_message("Missing header: content-length") .to_json_reply(), ); status = StatusCode::BAD_REQUEST; } else { // FIXME: use log warn!("unhandled rejection: {:?}", err); resp = Box::new( MessageKind::Error .with_code(0) .with_message("UNHANDLED REJECTION") .to_json_reply(), ); status = StatusCode::INTERNAL_SERVER_ERROR; } Ok(warp::reply::with_status(resp, status)) } /// Empty struct implementing std::error::Error, which we can use to mark the serde_json::Error as /// "handled" (by logging). #[derive(Debug)] pub struct HandledErr; impl StdError for HandledErr {} impl fmt::Display for HandledErr { fn fmt(&self, _fmt: &mut fmt::Formatter) -> fmt::Result { Ok(()) } }
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use core::mem; use super::super::asm::*; use super::super::qlib::common::*; use super::super::Kernel::HostSpace; use super::timer::*; #[repr(C)] #[derive(Debug, Default, Copy, Clone)] pub struct VdsoParams { pub seq_count: u64, pub monotonicReady: u64, pub monotonicBaseCycles: i64, pub monotonicBaseRef: i64, pub monotonicFrequency: u64, pub realtimeReady: u64, pub realtimeBaseCycles: i64, pub realtimeBaseRef: i64, pub realtimeFrequency: u64, } impl VdsoParams { pub fn ClockRealTime(&self) -> Result<i64> { let mut ready; let mut baseRef; let mut baseCycle; let mut frequency; let mut now; loop { let seq = self.ReadSeqBegin(); ready = self.realtimeReady; baseRef = self.realtimeBaseRef; baseCycle = self.realtimeBaseCycles; frequency = self.realtimeFrequency; now = Rdtsc(); if self.ReadSeqReady(seq) { break; }; }; if ready == 0 { return HostSpace::KernelGetTime(REALTIME); } let delta = if now < baseCycle { 0 } else { now - baseCycle }; let nowNs = baseRef + CyclesToNs(frequency, delta); return Ok(nowNs) } pub fn ClockMonotonicTime(&self) -> Result<i64> { let mut ready; let mut baseRef; let mut baseCycle; let mut frequency; let mut now; loop { let seq = self.ReadSeqBegin(); ready = self.monotonicReady; baseRef = self.monotonicBaseRef; baseCycle = self.monotonicBaseCycles; frequency = self.monotonicFrequency; now = Rdtsc(); if self.ReadSeqReady(seq) { break; }; }; if ready == 0 { return HostSpace::KernelGetTime(MONOTONIC); } let delta = if now < baseCycle { 0 } else { now - baseCycle }; let nowNs = baseRef + CyclesToNs(frequency, delta); return Ok(nowNs) } fn ReadSeqBegin(&self) -> u64 { let seq = self.seq_count; ReadBarrier(); return seq & !1; } //read success: return true, need retry: return false fn ReadSeqReady(&self, seq: u64) -> bool { ReadBarrier(); return self.seq_count == seq; } } pub struct VDSOParamPage { //pub vdsoParams: &'static mut VdsoParams, pub vdsoParams: &'static mut VdsoParams, // seq is the current sequence count written to the page. // // A write is in progress if bit 1 of the counter is set. pub seq: u64, pub paramPageAddr: u64, } impl Default for VDSOParamPage { fn default() -> VDSOParamPage { return unsafe { VDSOParamPage { vdsoParams: &mut *(0 as * mut VdsoParams), //vdsoParams: VdsoParams::default(), seq: 0, paramPageAddr: 0, } } } } impl VDSOParamPage { pub fn SetParamPageAddr(&mut self, paramPageAddr: u64) { unsafe { self.vdsoParams = &mut *(paramPageAddr as * mut VdsoParams); } self.paramPageAddr = paramPageAddr; } pub fn GetParamPageAddr(&self) -> u64 { return self.paramPageAddr } fn IncrementSeq(&mut self) -> Result<()> { let next = self.seq + 1; let mut old: u64 = next; mem::swap(&mut old, &mut self.vdsoParams.seq_count); if old != self.seq { return Err(Error::Common(format!("unexpected VDSOParamPage seq value: got {} expected {}. Application may hang or get incorrect time from the VDSO.", old, self.seq))); } self.seq = next; return Ok(()) } pub fn Write(&mut self, para: &VdsoParams) -> Result<()> { let next = self.seq + 1; if next % 2 != 1 { //let str = format!("Out-of-order sequence count: {}", self.seq); panic!("Out-of-order sequence count"); } self.IncrementSeq()?; self.vdsoParams.monotonicReady = para.monotonicReady; self.vdsoParams.monotonicBaseCycles = para.monotonicBaseCycles; self.vdsoParams.monotonicBaseRef = para.monotonicBaseRef; self.vdsoParams.monotonicFrequency = para.monotonicFrequency; self.vdsoParams.realtimeReady = para.realtimeReady; self.vdsoParams.realtimeBaseCycles = para.realtimeBaseCycles; self.vdsoParams.realtimeBaseRef = para.realtimeBaseRef; self.vdsoParams.realtimeFrequency = para.realtimeFrequency; return self.IncrementSeq(); } } const NS_PER_SEC: i128 = 1000_000_000; fn CyclesToNs(freq: u64, cycles: i64) -> i64 { let mult = NS_PER_SEC << 32 / freq as i128; return ((cycles as i128 * mult) >> 32) as i64; }
/* * @lc app=leetcode.cn id=811 lang=rust * * [811] 子域名访问计数 */ // @lc code=start impl Solution { pub fn subdomain_visits(cpdomains: Vec<String>) -> Vec<String> { fn split<'a>(content: &'a str, spliter: &str) -> Vec<&'a str> { content.split(spliter).collect() } fn string_to_static_str(s: String) -> &'static str { Box::leak(s.into_boxed_str()) } let mut map: std::collections::HashMap<&str, i32> = std::collections::HashMap::new(); for i in 0..cpdomains.len() { let str_arr: Vec<&str> = split(&cpdomains[i], " "); let times = str_arr[0].parse::<i32>().unwrap(); let domains_string = str_arr[1]; let domains = split(domains_string, "."); let max_len = domains.len(); for k in 0..max_len { let sub_domain = &domains[k..max_len].to_owned(); let domain = sub_domain.join("."); let domain_str = string_to_static_str(domain); let stat: &mut i32 = map.entry(domain_str).or_insert(0); *stat += times; } } let mut ret = vec![]; for (key, value) in &map { ret.push(format!("{} {}", value, key)) } ret } } // @lc code=end
// Copyright (c) 2021 Quark Container Authors / 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use super::super::qlib::platform::defs_impl::*; #[derive(Default)] pub struct DefaultPlatform {} impl DefaultPlatform { // SupportsAddressSpaceIO implements platform.Platform.SupportsAddressSpaceIO. pub fn SupportsAddressSpaceIO(&self) -> bool { return false; } // MapUnit implements platform.Platform.MapUnit. pub fn MapUint(&self) -> u64 { // We greedily creates PTEs in MapFile, so extremely large mappings can // be expensive. Not _that_ expensive since we allow super pages, but // even though can get out of hand if you're creating multi-terabyte // mappings. For this reason, we limit mappings to an arbitrary 16MB. return 16 << 20 } // MinUserAddress returns the lowest available address. pub fn MinUserAddress(&self) -> u64 { return MIN_USER_ADDR; } pub fn MaxUserAddress(&self) -> u64 { return MAX_USER_ADDR; } }
use self::io::*; use self::game::*; use self::player::*; mod game; mod game_test; mod io; mod io_test; mod player; mod player_test; mod board; mod board_test; mod rules; mod rules_test; mod minimax; mod minimax_test; fn main() { let io = ConsoleIo::new(); io.print("Hello! Welcome to Tic Tac Toe.\n".to_string()); let mut game_config = 3; loop { io.print("Please choose one of the following options:\n".to_string()); let response = io.prompt(concat!( "1) You vs. the game (you go first)\n", "2) The game vs. you (the game goes first)\n", "3) You vs. a friend\n").to_string()); let choice = response.trim().parse(); if choice.is_some() && [1, 2, 3].contains(&choice.unwrap()) { game_config = choice.unwrap(); break; } } let winner = match game_config { 1 => Game::new(HumanPlayer::new(io.clone(), 1), CpuPlayer::new(2)).play(), 2 => Game::new(CpuPlayer::new(1), HumanPlayer::new(io.clone(), 2)).play(), 3 | _ => Game::new(HumanPlayer::new(io.clone(), 1), HumanPlayer::new(io.clone(), 2)).play() }; match winner { 1 => io.print("X wins!\n".to_string()), 2 => io.print("O wins!\n".to_string()), _ => io.print("Draw.\n".to_string()) } }
// auto generated, do not modify. // created: Mon Feb 22 23:57:02 2016 // src-file: /QtCore/qabstractproxymodel.h // dst-file: /src/core/qabstractproxymodel.rs // // header block begin => #![feature(libc)] #![feature(core)] #![feature(collections)] extern crate libc; use self::libc::*; // <= header block end // main block begin => // <= main block end // use block begin => use super::qabstractitemmodel::*; // 773 use std::ops::Deref; use super::qstringlist::*; // 773 use super::qmimedata::*; // 773 use super::qitemselectionmodel::*; // 773 use super::qvariant::*; // 773 use super::qsize::*; // 773 use super::qobjectdefs::*; // 773 // use super::qmap::*; // 775 use super::qobject::*; // 773 // <= use block end // ext block begin => // #[link(name = "Qt5Core")] // #[link(name = "Qt5Gui")] // #[link(name = "Qt5Widgets")] // #[link(name = "QtInline")] extern { fn QAbstractProxyModel_Class_Size() -> c_int; // proto: QStringList QAbstractProxyModel::mimeTypes(); fn C_ZNK19QAbstractProxyModel9mimeTypesEv(qthis: u64 /* *mut c_void*/) -> *mut c_void; // proto: void QAbstractProxyModel::revert(); fn C_ZN19QAbstractProxyModel6revertEv(qthis: u64 /* *mut c_void*/); // proto: QItemSelection QAbstractProxyModel::mapSelectionToSource(const QItemSelection & selection); fn C_ZNK19QAbstractProxyModel20mapSelectionToSourceERK14QItemSelection(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void; // proto: QItemSelection QAbstractProxyModel::mapSelectionFromSource(const QItemSelection & selection); fn C_ZNK19QAbstractProxyModel22mapSelectionFromSourceERK14QItemSelection(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void; // proto: QVariant QAbstractProxyModel::data(const QModelIndex & proxyIndex, int role); fn C_ZNK19QAbstractProxyModel4dataERK11QModelIndexi(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: c_int) -> *mut c_void; // proto: bool QAbstractProxyModel::submit(); fn C_ZN19QAbstractProxyModel6submitEv(qthis: u64 /* *mut c_void*/) -> c_char; // proto: QSize QAbstractProxyModel::span(const QModelIndex & index); fn C_ZNK19QAbstractProxyModel4spanERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void; // proto: bool QAbstractProxyModel::canFetchMore(const QModelIndex & parent); fn C_ZNK19QAbstractProxyModel12canFetchMoreERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> c_char; // proto: const QMetaObject * QAbstractProxyModel::metaObject(); fn C_ZNK19QAbstractProxyModel10metaObjectEv(qthis: u64 /* *mut c_void*/) -> *mut c_void; // proto: QModelIndex QAbstractProxyModel::mapToSource(const QModelIndex & proxyIndex); fn C_ZNK19QAbstractProxyModel11mapToSourceERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void; // proto: QModelIndex QAbstractProxyModel::mapFromSource(const QModelIndex & sourceIndex); fn C_ZNK19QAbstractProxyModel13mapFromSourceERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void; // proto: QMap<int, QVariant> QAbstractProxyModel::itemData(const QModelIndex & index); fn C_ZNK19QAbstractProxyModel8itemDataERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void; // proto: QModelIndex QAbstractProxyModel::buddy(const QModelIndex & index); fn C_ZNK19QAbstractProxyModel5buddyERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> *mut c_void; // proto: void QAbstractProxyModel::setSourceModel(QAbstractItemModel * sourceModel); fn C_ZN19QAbstractProxyModel14setSourceModelEP18QAbstractItemModel(qthis: u64 /* *mut c_void*/, arg0: *mut c_void); // proto: QModelIndex QAbstractProxyModel::sibling(int row, int column, const QModelIndex & idx); fn C_ZNK19QAbstractProxyModel7siblingEiiRK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: c_int, arg1: c_int, arg2: *mut c_void) -> *mut c_void; // proto: bool QAbstractProxyModel::setData(const QModelIndex & index, const QVariant & value, int role); fn C_ZN19QAbstractProxyModel7setDataERK11QModelIndexRK8QVarianti(qthis: u64 /* *mut c_void*/, arg0: *mut c_void, arg1: *mut c_void, arg2: c_int) -> c_char; // proto: void QAbstractProxyModel::fetchMore(const QModelIndex & parent); fn C_ZN19QAbstractProxyModel9fetchMoreERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void); // proto: void QAbstractProxyModel::~QAbstractProxyModel(); fn C_ZN19QAbstractProxyModelD2Ev(qthis: u64 /* *mut c_void*/); // proto: void QAbstractProxyModel::QAbstractProxyModel(QObject * parent); fn C_ZN19QAbstractProxyModelC2EP7QObject(arg0: *mut c_void) -> u64; // proto: bool QAbstractProxyModel::hasChildren(const QModelIndex & parent); fn C_ZNK19QAbstractProxyModel11hasChildrenERK11QModelIndex(qthis: u64 /* *mut c_void*/, arg0: *mut c_void) -> c_char; // proto: QAbstractItemModel * QAbstractProxyModel::sourceModel(); fn C_ZNK19QAbstractProxyModel11sourceModelEv(qthis: u64 /* *mut c_void*/) -> *mut c_void; } // <= ext block end // body block begin => // class sizeof(QAbstractProxyModel)=1 #[derive(Default)] pub struct QAbstractProxyModel { qbase: QAbstractItemModel, pub qclsinst: u64 /* *mut c_void*/, pub _sourceModelChanged: QAbstractProxyModel_sourceModelChanged_signal, } impl /*struct*/ QAbstractProxyModel { pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QAbstractProxyModel { return QAbstractProxyModel{qbase: QAbstractItemModel::inheritFrom(qthis), qclsinst: qthis, ..Default::default()}; } } impl Deref for QAbstractProxyModel { type Target = QAbstractItemModel; fn deref(&self) -> &QAbstractItemModel { return & self.qbase; } } impl AsRef<QAbstractItemModel> for QAbstractProxyModel { fn as_ref(& self) -> & QAbstractItemModel { return & self.qbase; } } // proto: QStringList QAbstractProxyModel::mimeTypes(); impl /*struct*/ QAbstractProxyModel { pub fn mimeTypes<RetType, T: QAbstractProxyModel_mimeTypes<RetType>>(& self, overload_args: T) -> RetType { return overload_args.mimeTypes(self); // return 1; } } pub trait QAbstractProxyModel_mimeTypes<RetType> { fn mimeTypes(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QStringList QAbstractProxyModel::mimeTypes(); impl<'a> /*trait*/ QAbstractProxyModel_mimeTypes<QStringList> for () { fn mimeTypes(self , rsthis: & QAbstractProxyModel) -> QStringList { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel9mimeTypesEv()}; let mut ret = unsafe {C_ZNK19QAbstractProxyModel9mimeTypesEv(rsthis.qclsinst)}; let mut ret1 = QStringList::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: void QAbstractProxyModel::revert(); impl /*struct*/ QAbstractProxyModel { pub fn revert<RetType, T: QAbstractProxyModel_revert<RetType>>(& self, overload_args: T) -> RetType { return overload_args.revert(self); // return 1; } } pub trait QAbstractProxyModel_revert<RetType> { fn revert(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: void QAbstractProxyModel::revert(); impl<'a> /*trait*/ QAbstractProxyModel_revert<()> for () { fn revert(self , rsthis: & QAbstractProxyModel) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN19QAbstractProxyModel6revertEv()}; unsafe {C_ZN19QAbstractProxyModel6revertEv(rsthis.qclsinst)}; // return 1; } } // proto: QItemSelection QAbstractProxyModel::mapSelectionToSource(const QItemSelection & selection); impl /*struct*/ QAbstractProxyModel { pub fn mapSelectionToSource<RetType, T: QAbstractProxyModel_mapSelectionToSource<RetType>>(& self, overload_args: T) -> RetType { return overload_args.mapSelectionToSource(self); // return 1; } } pub trait QAbstractProxyModel_mapSelectionToSource<RetType> { fn mapSelectionToSource(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QItemSelection QAbstractProxyModel::mapSelectionToSource(const QItemSelection & selection); impl<'a> /*trait*/ QAbstractProxyModel_mapSelectionToSource<QItemSelection> for (&'a QItemSelection) { fn mapSelectionToSource(self , rsthis: & QAbstractProxyModel) -> QItemSelection { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel20mapSelectionToSourceERK14QItemSelection()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel20mapSelectionToSourceERK14QItemSelection(rsthis.qclsinst, arg0)}; let mut ret1 = QItemSelection::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: QItemSelection QAbstractProxyModel::mapSelectionFromSource(const QItemSelection & selection); impl /*struct*/ QAbstractProxyModel { pub fn mapSelectionFromSource<RetType, T: QAbstractProxyModel_mapSelectionFromSource<RetType>>(& self, overload_args: T) -> RetType { return overload_args.mapSelectionFromSource(self); // return 1; } } pub trait QAbstractProxyModel_mapSelectionFromSource<RetType> { fn mapSelectionFromSource(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QItemSelection QAbstractProxyModel::mapSelectionFromSource(const QItemSelection & selection); impl<'a> /*trait*/ QAbstractProxyModel_mapSelectionFromSource<QItemSelection> for (&'a QItemSelection) { fn mapSelectionFromSource(self , rsthis: & QAbstractProxyModel) -> QItemSelection { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel22mapSelectionFromSourceERK14QItemSelection()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel22mapSelectionFromSourceERK14QItemSelection(rsthis.qclsinst, arg0)}; let mut ret1 = QItemSelection::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: QVariant QAbstractProxyModel::data(const QModelIndex & proxyIndex, int role); impl /*struct*/ QAbstractProxyModel { pub fn data<RetType, T: QAbstractProxyModel_data<RetType>>(& self, overload_args: T) -> RetType { return overload_args.data(self); // return 1; } } pub trait QAbstractProxyModel_data<RetType> { fn data(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QVariant QAbstractProxyModel::data(const QModelIndex & proxyIndex, int role); impl<'a> /*trait*/ QAbstractProxyModel_data<QVariant> for (&'a QModelIndex, Option<i32>) { fn data(self , rsthis: & QAbstractProxyModel) -> QVariant { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel4dataERK11QModelIndexi()}; let arg0 = self.0.qclsinst as *mut c_void; let arg1 = (if self.1.is_none() {0 as i32} else {self.1.unwrap()}) as c_int; let mut ret = unsafe {C_ZNK19QAbstractProxyModel4dataERK11QModelIndexi(rsthis.qclsinst, arg0, arg1)}; let mut ret1 = QVariant::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: bool QAbstractProxyModel::submit(); impl /*struct*/ QAbstractProxyModel { pub fn submit<RetType, T: QAbstractProxyModel_submit<RetType>>(& self, overload_args: T) -> RetType { return overload_args.submit(self); // return 1; } } pub trait QAbstractProxyModel_submit<RetType> { fn submit(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: bool QAbstractProxyModel::submit(); impl<'a> /*trait*/ QAbstractProxyModel_submit<i8> for () { fn submit(self , rsthis: & QAbstractProxyModel) -> i8 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN19QAbstractProxyModel6submitEv()}; let mut ret = unsafe {C_ZN19QAbstractProxyModel6submitEv(rsthis.qclsinst)}; return ret as i8; // 1 // return 1; } } // proto: QSize QAbstractProxyModel::span(const QModelIndex & index); impl /*struct*/ QAbstractProxyModel { pub fn span<RetType, T: QAbstractProxyModel_span<RetType>>(& self, overload_args: T) -> RetType { return overload_args.span(self); // return 1; } } pub trait QAbstractProxyModel_span<RetType> { fn span(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QSize QAbstractProxyModel::span(const QModelIndex & index); impl<'a> /*trait*/ QAbstractProxyModel_span<QSize> for (&'a QModelIndex) { fn span(self , rsthis: & QAbstractProxyModel) -> QSize { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel4spanERK11QModelIndex()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel4spanERK11QModelIndex(rsthis.qclsinst, arg0)}; let mut ret1 = QSize::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: bool QAbstractProxyModel::canFetchMore(const QModelIndex & parent); impl /*struct*/ QAbstractProxyModel { pub fn canFetchMore<RetType, T: QAbstractProxyModel_canFetchMore<RetType>>(& self, overload_args: T) -> RetType { return overload_args.canFetchMore(self); // return 1; } } pub trait QAbstractProxyModel_canFetchMore<RetType> { fn canFetchMore(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: bool QAbstractProxyModel::canFetchMore(const QModelIndex & parent); impl<'a> /*trait*/ QAbstractProxyModel_canFetchMore<i8> for (&'a QModelIndex) { fn canFetchMore(self , rsthis: & QAbstractProxyModel) -> i8 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel12canFetchMoreERK11QModelIndex()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel12canFetchMoreERK11QModelIndex(rsthis.qclsinst, arg0)}; return ret as i8; // 1 // return 1; } } // proto: const QMetaObject * QAbstractProxyModel::metaObject(); impl /*struct*/ QAbstractProxyModel { pub fn metaObject<RetType, T: QAbstractProxyModel_metaObject<RetType>>(& self, overload_args: T) -> RetType { return overload_args.metaObject(self); // return 1; } } pub trait QAbstractProxyModel_metaObject<RetType> { fn metaObject(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: const QMetaObject * QAbstractProxyModel::metaObject(); impl<'a> /*trait*/ QAbstractProxyModel_metaObject<QMetaObject> for () { fn metaObject(self , rsthis: & QAbstractProxyModel) -> QMetaObject { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel10metaObjectEv()}; let mut ret = unsafe {C_ZNK19QAbstractProxyModel10metaObjectEv(rsthis.qclsinst)}; let mut ret1 = QMetaObject::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: QModelIndex QAbstractProxyModel::mapToSource(const QModelIndex & proxyIndex); impl /*struct*/ QAbstractProxyModel { pub fn mapToSource<RetType, T: QAbstractProxyModel_mapToSource<RetType>>(& self, overload_args: T) -> RetType { return overload_args.mapToSource(self); // return 1; } } pub trait QAbstractProxyModel_mapToSource<RetType> { fn mapToSource(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QModelIndex QAbstractProxyModel::mapToSource(const QModelIndex & proxyIndex); impl<'a> /*trait*/ QAbstractProxyModel_mapToSource<QModelIndex> for (&'a QModelIndex) { fn mapToSource(self , rsthis: & QAbstractProxyModel) -> QModelIndex { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel11mapToSourceERK11QModelIndex()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel11mapToSourceERK11QModelIndex(rsthis.qclsinst, arg0)}; let mut ret1 = QModelIndex::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: QModelIndex QAbstractProxyModel::mapFromSource(const QModelIndex & sourceIndex); impl /*struct*/ QAbstractProxyModel { pub fn mapFromSource<RetType, T: QAbstractProxyModel_mapFromSource<RetType>>(& self, overload_args: T) -> RetType { return overload_args.mapFromSource(self); // return 1; } } pub trait QAbstractProxyModel_mapFromSource<RetType> { fn mapFromSource(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QModelIndex QAbstractProxyModel::mapFromSource(const QModelIndex & sourceIndex); impl<'a> /*trait*/ QAbstractProxyModel_mapFromSource<QModelIndex> for (&'a QModelIndex) { fn mapFromSource(self , rsthis: & QAbstractProxyModel) -> QModelIndex { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel13mapFromSourceERK11QModelIndex()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel13mapFromSourceERK11QModelIndex(rsthis.qclsinst, arg0)}; let mut ret1 = QModelIndex::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: QMap<int, QVariant> QAbstractProxyModel::itemData(const QModelIndex & index); impl /*struct*/ QAbstractProxyModel { pub fn itemData<RetType, T: QAbstractProxyModel_itemData<RetType>>(& self, overload_args: T) -> RetType { return overload_args.itemData(self); // return 1; } } pub trait QAbstractProxyModel_itemData<RetType> { fn itemData(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QMap<int, QVariant> QAbstractProxyModel::itemData(const QModelIndex & index); impl<'a> /*trait*/ QAbstractProxyModel_itemData<u64> for (&'a QModelIndex) { fn itemData(self , rsthis: & QAbstractProxyModel) -> u64 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel8itemDataERK11QModelIndex()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel8itemDataERK11QModelIndex(rsthis.qclsinst, arg0)}; return ret as u64; // 5 // return 1; } } // proto: QModelIndex QAbstractProxyModel::buddy(const QModelIndex & index); impl /*struct*/ QAbstractProxyModel { pub fn buddy<RetType, T: QAbstractProxyModel_buddy<RetType>>(& self, overload_args: T) -> RetType { return overload_args.buddy(self); // return 1; } } pub trait QAbstractProxyModel_buddy<RetType> { fn buddy(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QModelIndex QAbstractProxyModel::buddy(const QModelIndex & index); impl<'a> /*trait*/ QAbstractProxyModel_buddy<QModelIndex> for (&'a QModelIndex) { fn buddy(self , rsthis: & QAbstractProxyModel) -> QModelIndex { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel5buddyERK11QModelIndex()}; let arg0 = self.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel5buddyERK11QModelIndex(rsthis.qclsinst, arg0)}; let mut ret1 = QModelIndex::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: void QAbstractProxyModel::setSourceModel(QAbstractItemModel * sourceModel); impl /*struct*/ QAbstractProxyModel { pub fn setSourceModel<RetType, T: QAbstractProxyModel_setSourceModel<RetType>>(& self, overload_args: T) -> RetType { return overload_args.setSourceModel(self); // return 1; } } pub trait QAbstractProxyModel_setSourceModel<RetType> { fn setSourceModel(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: void QAbstractProxyModel::setSourceModel(QAbstractItemModel * sourceModel); impl<'a> /*trait*/ QAbstractProxyModel_setSourceModel<()> for (&'a QAbstractItemModel) { fn setSourceModel(self , rsthis: & QAbstractProxyModel) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN19QAbstractProxyModel14setSourceModelEP18QAbstractItemModel()}; let arg0 = self.qclsinst as *mut c_void; unsafe {C_ZN19QAbstractProxyModel14setSourceModelEP18QAbstractItemModel(rsthis.qclsinst, arg0)}; // return 1; } } // proto: QModelIndex QAbstractProxyModel::sibling(int row, int column, const QModelIndex & idx); impl /*struct*/ QAbstractProxyModel { pub fn sibling<RetType, T: QAbstractProxyModel_sibling<RetType>>(& self, overload_args: T) -> RetType { return overload_args.sibling(self); // return 1; } } pub trait QAbstractProxyModel_sibling<RetType> { fn sibling(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QModelIndex QAbstractProxyModel::sibling(int row, int column, const QModelIndex & idx); impl<'a> /*trait*/ QAbstractProxyModel_sibling<QModelIndex> for (i32, i32, &'a QModelIndex) { fn sibling(self , rsthis: & QAbstractProxyModel) -> QModelIndex { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel7siblingEiiRK11QModelIndex()}; let arg0 = self.0 as c_int; let arg1 = self.1 as c_int; let arg2 = self.2.qclsinst as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel7siblingEiiRK11QModelIndex(rsthis.qclsinst, arg0, arg1, arg2)}; let mut ret1 = QModelIndex::inheritFrom(ret as u64); return ret1; // return 1; } } // proto: bool QAbstractProxyModel::setData(const QModelIndex & index, const QVariant & value, int role); impl /*struct*/ QAbstractProxyModel { pub fn setData<RetType, T: QAbstractProxyModel_setData<RetType>>(& self, overload_args: T) -> RetType { return overload_args.setData(self); // return 1; } } pub trait QAbstractProxyModel_setData<RetType> { fn setData(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: bool QAbstractProxyModel::setData(const QModelIndex & index, const QVariant & value, int role); impl<'a> /*trait*/ QAbstractProxyModel_setData<i8> for (&'a QModelIndex, &'a QVariant, Option<i32>) { fn setData(self , rsthis: & QAbstractProxyModel) -> i8 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN19QAbstractProxyModel7setDataERK11QModelIndexRK8QVarianti()}; let arg0 = self.0.qclsinst as *mut c_void; let arg1 = self.1.qclsinst as *mut c_void; let arg2 = (if self.2.is_none() {0 as i32} else {self.2.unwrap()}) as c_int; let mut ret = unsafe {C_ZN19QAbstractProxyModel7setDataERK11QModelIndexRK8QVarianti(rsthis.qclsinst, arg0, arg1, arg2)}; return ret as i8; // 1 // return 1; } } // proto: void QAbstractProxyModel::fetchMore(const QModelIndex & parent); impl /*struct*/ QAbstractProxyModel { pub fn fetchMore<RetType, T: QAbstractProxyModel_fetchMore<RetType>>(& self, overload_args: T) -> RetType { return overload_args.fetchMore(self); // return 1; } } pub trait QAbstractProxyModel_fetchMore<RetType> { fn fetchMore(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: void QAbstractProxyModel::fetchMore(const QModelIndex & parent); impl<'a> /*trait*/ QAbstractProxyModel_fetchMore<()> for (&'a QModelIndex) { fn fetchMore(self , rsthis: & QAbstractProxyModel) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN19QAbstractProxyModel9fetchMoreERK11QModelIndex()}; let arg0 = self.qclsinst as *mut c_void; unsafe {C_ZN19QAbstractProxyModel9fetchMoreERK11QModelIndex(rsthis.qclsinst, arg0)}; // return 1; } } // proto: void QAbstractProxyModel::~QAbstractProxyModel(); impl /*struct*/ QAbstractProxyModel { pub fn free<RetType, T: QAbstractProxyModel_free<RetType>>(& self, overload_args: T) -> RetType { return overload_args.free(self); // return 1; } } pub trait QAbstractProxyModel_free<RetType> { fn free(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: void QAbstractProxyModel::~QAbstractProxyModel(); impl<'a> /*trait*/ QAbstractProxyModel_free<()> for () { fn free(self , rsthis: & QAbstractProxyModel) -> () { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN19QAbstractProxyModelD2Ev()}; unsafe {C_ZN19QAbstractProxyModelD2Ev(rsthis.qclsinst)}; // return 1; } } // proto: void QAbstractProxyModel::QAbstractProxyModel(QObject * parent); impl /*struct*/ QAbstractProxyModel { pub fn new<T: QAbstractProxyModel_new>(value: T) -> QAbstractProxyModel { let rsthis = value.new(); return rsthis; // return 1; } } pub trait QAbstractProxyModel_new { fn new(self) -> QAbstractProxyModel; } // proto: void QAbstractProxyModel::QAbstractProxyModel(QObject * parent); impl<'a> /*trait*/ QAbstractProxyModel_new for (Option<&'a QObject>) { fn new(self) -> QAbstractProxyModel { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZN19QAbstractProxyModelC2EP7QObject()}; let ctysz: c_int = unsafe{QAbstractProxyModel_Class_Size()}; let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64; let arg0 = (if self.is_none() {0} else {self.unwrap().qclsinst}) as *mut c_void; let qthis: u64 = unsafe {C_ZN19QAbstractProxyModelC2EP7QObject(arg0)}; let rsthis = QAbstractProxyModel{qbase: QAbstractItemModel::inheritFrom(qthis), qclsinst: qthis, ..Default::default()}; return rsthis; // return 1; } } // proto: bool QAbstractProxyModel::hasChildren(const QModelIndex & parent); impl /*struct*/ QAbstractProxyModel { pub fn hasChildren<RetType, T: QAbstractProxyModel_hasChildren<RetType>>(& self, overload_args: T) -> RetType { return overload_args.hasChildren(self); // return 1; } } pub trait QAbstractProxyModel_hasChildren<RetType> { fn hasChildren(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: bool QAbstractProxyModel::hasChildren(const QModelIndex & parent); impl<'a> /*trait*/ QAbstractProxyModel_hasChildren<i8> for (Option<&'a QModelIndex>) { fn hasChildren(self , rsthis: & QAbstractProxyModel) -> i8 { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel11hasChildrenERK11QModelIndex()}; let arg0 = (if self.is_none() {QModelIndex::new(()).qclsinst} else {self.unwrap().qclsinst}) as *mut c_void; let mut ret = unsafe {C_ZNK19QAbstractProxyModel11hasChildrenERK11QModelIndex(rsthis.qclsinst, arg0)}; return ret as i8; // 1 // return 1; } } // proto: QAbstractItemModel * QAbstractProxyModel::sourceModel(); impl /*struct*/ QAbstractProxyModel { pub fn sourceModel<RetType, T: QAbstractProxyModel_sourceModel<RetType>>(& self, overload_args: T) -> RetType { return overload_args.sourceModel(self); // return 1; } } pub trait QAbstractProxyModel_sourceModel<RetType> { fn sourceModel(self , rsthis: & QAbstractProxyModel) -> RetType; } // proto: QAbstractItemModel * QAbstractProxyModel::sourceModel(); impl<'a> /*trait*/ QAbstractProxyModel_sourceModel<QAbstractItemModel> for () { fn sourceModel(self , rsthis: & QAbstractProxyModel) -> QAbstractItemModel { // let qthis: *mut c_void = unsafe{calloc(1, 32)}; // unsafe{_ZNK19QAbstractProxyModel11sourceModelEv()}; let mut ret = unsafe {C_ZNK19QAbstractProxyModel11sourceModelEv(rsthis.qclsinst)}; let mut ret1 = QAbstractItemModel::inheritFrom(ret as u64); return ret1; // return 1; } } #[derive(Default)] // for QAbstractProxyModel_sourceModelChanged pub struct QAbstractProxyModel_sourceModelChanged_signal{poi:u64} impl /* struct */ QAbstractProxyModel { pub fn sourceModelChanged(&self) -> QAbstractProxyModel_sourceModelChanged_signal { return QAbstractProxyModel_sourceModelChanged_signal{poi:self.qclsinst}; } } impl /* struct */ QAbstractProxyModel_sourceModelChanged_signal { pub fn connect<T: QAbstractProxyModel_sourceModelChanged_signal_connect>(self, overload_args: T) { overload_args.connect(self); } } pub trait QAbstractProxyModel_sourceModelChanged_signal_connect { fn connect(self, sigthis: QAbstractProxyModel_sourceModelChanged_signal); } // <= body block end
#![forbid(unsafe_code)] pub mod borsh_state; pub mod borsh_utils; pub mod error; pub mod instruction; pub mod processor; pub mod state; use crate::error::Error; use borsh_state::InitBorshState; use solana_program::{ account_info::AccountInfo, program_error::ProgramError, program_pack::IsInitialized }; // expected struct `solana_program::pubkey::Pubkey`, found struct `state::PublicKey`rustc(E0308) use crate::state::PublicKey; use state::{ImpermenantLossStopLoss}; #[cfg(not(feature = "no-entrypoint"))] pub mod entrypoint; /// Read resolved min_change_factor from the ImpermenantLossStopLoss pub fn read_min_change_factor( impermenant_loss_stop_loss_info: &AccountInfo, ) -> Result<u64, ProgramError> { let impermenant_loss_stop_loss = ImpermenantLossStopLoss::load_initialized(&impermenant_loss_stop_loss_info)?; if !impermenant_loss_stop_loss.is_initialized() { return Err(Error::OwnerMismatch)?; } Ok(impermenant_loss_stop_loss.min_change_factor) } /// Read resolved min_change_factor from the ImpermenantLossStopLoss pub fn read_token_a( impermenant_loss_stop_loss_info: &AccountInfo, ) -> Result<PublicKey, ProgramError> { let impermenant_loss_stop_loss = ImpermenantLossStopLoss::load_initialized(&impermenant_loss_stop_loss_info)?; if !impermenant_loss_stop_loss.is_initialized() { return Err(Error::UnknownError)?; } Ok(impermenant_loss_stop_loss.token_a) } // Export current sdk types for downstream users building with a different pub use solana_program;
use std::collections::HashMap; fn dfs<'a>( mut visited: std::collections::HashMap<&'a str, u8>, tree: &'a HashMap<&str, Vec<&str>>, node: &'a str, ) { if !visited.contains_key(&node) { println!("{}", node); visited.insert(node, 1); for brother in &tree[node] { dfs(visited.clone(), tree, brother) } } } fn main() { let mut tree: HashMap<&str, Vec<&str>> = HashMap::new(); tree.insert("A", vec!["B", "C"]); tree.insert("B", vec!["D", "E"]); tree.insert("C", vec![]); tree.insert("D", vec![]); tree.insert("E", vec![]); dfs(HashMap::new(), &tree, "A"); }
struct Greeter; impl Greeter { fn call(&self) { println!("Hello rust"); } fn call_mut(&mut self) { self.call(); } fn call_once(self) { self.call(); } } fn main() { let mut greeter = Greeter {}; greeter.call(); // greeter.call_once(); greeter.call_mut(); greeter.call(); }
extern crate futures; extern crate futures_test; extern crate futures_watch; use futures::{Stream}; use futures_test::Harness; use futures_watch::*; #[test] fn smoke() { let (mut watch, mut store) = Watch::new("one"); // Check the value assert_eq!(*watch.borrow(), "one"); assert!(!watch.is_final()); Harness::poll_fn(|| watch.poll()).with(|harness| { assert!(!harness.poll().unwrap().is_ready()); // Change the value. assert_eq!(store.store("two").unwrap(), "one"); // The watch was notified assert!(harness.poll().unwrap().is_ready()); }); assert!(!watch.is_final()); assert_eq!(*watch.borrow(), "two"); Harness::poll_fn(|| watch.poll()).with(|harness| { assert!(!harness.poll().unwrap().is_ready()); // Dropping `store` notifies watches drop(store); // The watch was notified assert!(harness.poll().unwrap().is_ready()); }); assert!(watch.is_final()); assert_eq!(*watch.borrow(), "two"); } #[test] fn multiple_watches() { let (mut watch1, mut store) = Watch::new("one"); let mut watch2 = watch1.clone(); { let mut h1 = Harness::poll_fn(|| watch1.poll()); let mut h2 = Harness::poll_fn(|| watch2.poll()); assert!(!h1.poll().unwrap().is_ready()); // Change the value. assert_eq!(store.store("two").unwrap(), "one"); // The watch was notified assert!(h1.poll().unwrap().is_ready()); assert!(h2.poll().unwrap().is_ready()); } assert_eq!(*watch1.borrow(), "two"); assert_eq!(*watch2.borrow(), "two"); }
extern crate git2; mod lib; use lib::merge_all_hooks; use futures::executor::block_on; fn main() { block_on(merge_all_hooks()) }
use std::{collections::HashSet, fs}; const FILENAME: &str = "inputs/inputday5"; pub fn solve() { let input = fs::read_to_string(FILENAME).expect("Failed to read file"); let mut seats: Vec<i32> = Vec::new(); for line in input.lines() { let mut row = 0; let mut row_off = 64; let mut col = 0; let mut col_off = 4; for c in line.chars() { match c { 'F' => row_off /= 2, 'B' => { row += row_off; row_off /= 2 } 'L' => col_off /= 2, 'R' => { col += col_off; col_off /= 2 } _ => {} } } seats.push(row * 8 + col); } let max: i32 = *(seats.iter().max().unwrap()); println!("Answer to day 5 part 1 is {}", max); let seats_part2: HashSet<i32> = seats.into_iter().collect(); for i in 8..max { if seats_part2.contains(&(i - 1)) && seats_part2.contains(&(i - 1)) && !seats_part2.contains(&i) { println!("Answer to day 5 part 2 is {}", i); } } }
use input_i_scanner::InputIScanner; use join::Join; fn main() { let stdin = std::io::stdin(); let mut _i_i = InputIScanner::from(stdin.lock()); macro_rules! scan { (($($t: ty),+)) => { ($(scan!($t)),+) }; ($t: ty) => { _i_i.scan::<$t>() as $t }; (($($t: ty),+); $n: expr) => { std::iter::repeat_with(|| scan!(($($t),+))).take($n).collect::<Vec<_>>() }; ($t: ty; $n: expr) => { std::iter::repeat_with(|| scan!($t)).take($n).collect::<Vec<_>>() }; } let s = scan!(String); let k = scan!(usize); let mut s: Vec<char> = s.chars().collect(); let n = s.len(); let mut k = k; for i in 0..n { if s[i] == 'a' { continue; } let j = 'z' as usize - s[i] as usize + 1; if j <= k { s[i] = 'a'; k -= j; } } k %= 26; let last = s[n - 1]; assert!(last as u8 + k as u8 <= 'z' as u8); s[n - 1] = (last as u8 + k as u8) as char; println!("{}", s.iter().join("")); }
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // exec-env:RUST_POISON_ON_FREE=1 // Test argument patterns where we create refs to the inside of // boxes. Make sure that we don't free the box as we match the // pattern. #![feature(box_patterns)] #![feature(box_syntax)] fn getaddr(box ref x: Box<usize>) -> *const usize { let addr: *const usize = &*x; addr } fn checkval(box ref x: Box<usize>) -> usize { *x } pub fn main() { let obj: Box<_> = box 1; let objptr: *const usize = &*obj; let xptr = getaddr(obj); assert_eq!(objptr, xptr); let obj = box 22; assert_eq!(checkval(obj), 22); }
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)] #[cfg(feature = "Devices_Lights_Effects")] pub mod Effects; #[link(name = "windows")] extern "system" {} pub type Lamp = *mut ::core::ffi::c_void; pub type LampArray = *mut ::core::ffi::c_void; #[repr(transparent)] pub struct LampArrayKind(pub i32); impl LampArrayKind { pub const Undefined: Self = Self(0i32); pub const Keyboard: Self = Self(1i32); pub const Mouse: Self = Self(2i32); pub const GameController: Self = Self(3i32); pub const Peripheral: Self = Self(4i32); pub const Scene: Self = Self(5i32); pub const Notification: Self = Self(6i32); pub const Chassis: Self = Self(7i32); pub const Wearable: Self = Self(8i32); pub const Furniture: Self = Self(9i32); pub const Art: Self = Self(10i32); } impl ::core::marker::Copy for LampArrayKind {} impl ::core::clone::Clone for LampArrayKind { fn clone(&self) -> Self { *self } } pub type LampAvailabilityChangedEventArgs = *mut ::core::ffi::c_void; pub type LampInfo = *mut ::core::ffi::c_void; #[repr(transparent)] pub struct LampPurposes(pub u32); impl LampPurposes { pub const Undefined: Self = Self(0u32); pub const Control: Self = Self(1u32); pub const Accent: Self = Self(2u32); pub const Branding: Self = Self(4u32); pub const Status: Self = Self(8u32); pub const Illumination: Self = Self(16u32); pub const Presentation: Self = Self(32u32); } impl ::core::marker::Copy for LampPurposes {} impl ::core::clone::Clone for LampPurposes { fn clone(&self) -> Self { *self } }
use crate::style::Style; use crate::test_util::expect_debug; use crate::{AsciiCanvas, AsciiView}; #[test] fn draw_box() { let mut canvas = AsciiCanvas::new(5, 10); { let view: &mut dyn AsciiView = &mut canvas; view.draw_vertical_line(2..5, 2); view.draw_vertical_line(2..5, 7); view.draw_horizontal_line(2, 2..8); view.draw_horizontal_line(4, 2..8); } expect_debug( &canvas.to_strings(), r#" [ "", "", " ┌────┐", " │ │", " └────┘", ] "# .trim(), ); } #[test] fn grow_box() { let mut canvas = AsciiCanvas::new(0, 10); { let view: &mut dyn AsciiView = &mut canvas; view.draw_vertical_line(2..5, 2); view.draw_vertical_line(2..5, 7); view.draw_horizontal_line(2, 2..8); view.draw_horizontal_line(4, 2..8); } expect_debug( &canvas.to_strings(), r#" [ "", "", " ┌────┐", " │ │", " └────┘", ] "# .trim(), ); } #[test] fn shift() { let mut canvas = AsciiCanvas::new(0, 10); { let canvas: &mut dyn AsciiView = &mut canvas; let view: &mut dyn AsciiView = &mut canvas.shift(1, 2); view.draw_vertical_line(2..5, 2); view.draw_vertical_line(2..5, 7); view.draw_horizontal_line(2, 2..8); view.draw_horizontal_line(4, 2..8); view.write_chars(3, 3, "Hi!".chars(), Style::new()); } expect_debug( &canvas.to_strings(), r#" [ "", "", "", " ┌────┐", " │Hi! │", " └────┘", ] "# .trim(), ); }
use proconio::{input, marker::Chars}; fn similar(x: char, y: char) -> bool { x == y || (x == '1' && y == 'l') || (x == 'l' && y == '1') || (x == '0' && y == 'o') || (x == 'o' && y == '0') } fn main() { input! { n: usize, s: Chars, t: Chars, }; let mut ok = true; for i in 0..n { if similar(s[i], t[i]) == false { ok = false; break; } } if ok { println!("Yes"); } else { println!("No"); } }
use crate::auth; use crate::handlers::types::*; use crate::Pool; use actix_web::{web, Error, HttpResponse}; use actix_web_httpauth::extractors::bearer::BearerAuth; use crate::controllers::user_chat_controller::*; use crate::diesel::QueryDsl; use crate::diesel::RunQueryDsl; use crate::helpers::socket::push_user_message; use crate::model::{ChatList, NewChatList, NewUserChat, User, UserChat}; use crate::schema::unread_user_chat::dsl::user_id as unread_chat_user_id; use crate::schema::unread_user_chat::dsl::*; use crate::schema::user_chat::dsl::*; use crate::schema::users::dsl::*; use tokio::task; use diesel::dsl::insert_into; use diesel::prelude::*; pub async fn send_message( db: web::Data<Pool>, token: BearerAuth, other_user_id: web::Path<IdPathInfo>, item: web::Json<ChatMessage>, ) -> Result<HttpResponse, Error> { match auth::validate_token(&token.token().to_string()) { Ok(res) => { if res == true { let conn = db.get().unwrap(); let decoded_token = auth::decode_token(&token.token().to_string()); let user = users .find(decoded_token.parse::<i32>().unwrap()) .first::<User>(&conn); match user { Ok(user) => { let new_chat = NewUserChat { user_id: &user.id, reciever: &other_user_id.id, chat: &item.chat, created_at: chrono::Local::now().naive_local(), }; let socket_channel = format!("user-chat-{}-{}", &user.id, other_user_id.id); let response = insert_into(user_chat).values(&new_chat).get_result(&conn); match response { Ok(response) => { let socket_message = UserMessage { message: response, user: user.clone(), }; push_user_message( &socket_channel, &"user_chat_created".to_string(), &socket_message, ) .await; //spawn task in new thread task::spawn(async move { //check if user exist in chat list let list = unread_user_chat .filter(unread_chat_user_id.eq(other_user_id.id)) .filter(other.eq(&user.id)) .first::<ChatList>(&conn); match list { Ok(_list_item) => { //update item for both users diesel::update( unread_user_chat .filter( unread_chat_user_id.eq(other_user_id.id), ) .filter(other.eq(&user.id)), ) .set(updated_at.eq(chrono::Local::now().naive_local())) .execute(&conn) .unwrap(); diesel::update( unread_user_chat .filter(unread_chat_user_id.eq(&user.id)) .filter(other.eq(other_user_id.id)), ) .set(updated_at.eq(chrono::Local::now().naive_local())) .execute(&conn) .unwrap(); } Err(diesel::result::Error::NotFound) => { //create new chat list for both users if none exist let user1 = NewChatList { user_id: &user.id, other: &other_user_id.id, updated_at: chrono::Local::now().naive_local(), }; let user2 = NewChatList { user_id: &user.id, other: &other_user_id.id, updated_at: chrono::Local::now().naive_local(), }; insert_into(unread_user_chat) .values(&user1) .execute(&conn) .unwrap(); insert_into(unread_user_chat) .values(&user2) .execute(&conn) .unwrap(); } _ => { println!("encountered an error") } } }); Ok(HttpResponse::Ok().json(Response::new( true, "message sent successfully".to_string(), ))) } _ => Ok(HttpResponse::Ok() .json(ResponseError::new(false, "error adding chat".to_string()))), } } _ => Ok(HttpResponse::Ok().json(ResponseError::new( false, "error getting user details".to_string(), ))), } } else { Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))) } } Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))), } } pub async fn update_message( db: web::Data<Pool>, token: BearerAuth, other_user_id: web::Path<MultiIdPathInfo>, item: web::Json<ChatMessage>, ) -> Result<HttpResponse, Error> { match auth::validate_token(&token.token().to_string()) { Ok(res) => { if res == true { let conn = db.get().unwrap(); let decoded_token = auth::decode_token(&token.token().to_string()); let user = users .find(decoded_token.parse::<i32>().unwrap()) .first::<User>(&conn); match user { Ok(user) => { let updated_chat = diesel::update(user_chat.find(other_user_id.user_id)) .set(chat.eq(&item.chat)) .execute(&conn); match updated_chat { Ok(_updated_chat) => { let message = user_chat .find(other_user_id.chat_id) .first::<UserChat>(&conn) .unwrap(); let socket_channel = format!("user-chat-{}-{}", &user.id, other_user_id.user_id); let socket_message = UserMessage { message: message, user: user, }; push_user_message( &socket_channel, &"user_chat_update".to_string(), &socket_message, ) .await; Ok(HttpResponse::Ok().json(Response::new( true, "message updated successfully".to_string(), ))) } _ => Ok(HttpResponse::Ok() .json(Response::new(false, "error updating chat".to_string()))), } } _ => Ok(HttpResponse::Ok().json(ResponseError::new( false, "error getting user details".to_string(), ))), } } else { Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))) } } Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))), } } pub async fn get_all_message( db: web::Data<Pool>, auth: BearerAuth, other_user_id: web::Path<IdPathInfo>, item: web::Query<PaginateQuery>, ) -> Result<HttpResponse, Error> { match auth::validate_token(&auth.token().to_string()) { Ok(res) => { if res == true { Ok(web::block(move || { get_all_message_db(db, auth.token().to_string(), other_user_id, item) }) .await .map(|response| HttpResponse::Ok().json(response)) .map_err(|_| { HttpResponse::Ok().json(Response::new(false, "Error getting chat".to_string())) })?) } else { Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))) } } Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))), } } pub async fn delete_message( db: web::Data<Pool>, auth: BearerAuth, chat_id: web::Path<IdPathInfo>, ) -> Result<HttpResponse, Error> { match auth::validate_token(&auth.token().to_string()) { Ok(res) => { if res == true { Ok( web::block(move || delete_message_db(db, auth.token().to_string(), chat_id)) .await .map(|response| HttpResponse::Ok().json(response)) .map_err(|_| { HttpResponse::Ok() .json(Response::new(false, "Error deleting chat".to_string())) })?, ) } else { Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))) } } Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))), } } pub async fn get_chat_list(db: web::Data<Pool>, auth: BearerAuth) -> Result<HttpResponse, Error> { match auth::validate_token(&auth.token().to_string()) { Ok(res) => { if res == true { Ok( web::block(move || get_chat_list_db(db, auth.token().to_string())) .await .map(|response| HttpResponse::Ok().json(response)) .map_err(|_| { HttpResponse::Ok() .json(Response::new(false, "Error getting list".to_string())) })?, ) } else { Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))) } } Err(_) => Ok(HttpResponse::Ok().json(ResponseError::new(false, "jwt error".to_string()))), } }
#[doc = "Reader of register FDCAN_TTCTC"] pub type R = crate::R<u32, super::FDCAN_TTCTC>; #[doc = "Reader of field `CT`"] pub type CT_R = crate::R<u16, u16>; #[doc = "Reader of field `CC`"] pub type CC_R = crate::R<u8, u8>; impl R { #[doc = "Bits 0:15 - Cycle Time"] #[inline(always)] pub fn ct(&self) -> CT_R { CT_R::new((self.bits & 0xffff) as u16) } #[doc = "Bits 16:21 - Cycle Count"] #[inline(always)] pub fn cc(&self) -> CC_R { CC_R::new(((self.bits >> 16) & 0x3f) as u8) } }
/* Copyright (c) 2015, 2016 Saurav Sachidanand Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #![allow(non_snake_case)] extern crate astro; use astro::*; #[test] fn rho_sin_cos_phi() { let (rho_sin_phi, rho_cos_phi) = planet::earth::rho_sin_cos_phi ( angle::deg_frm_dms(33, 21, 22.0).to_radians(), 1706.0 ); assert_eq!( (util::round_upto_digits(rho_sin_phi, 6), util::round_upto_digits(rho_cos_phi, 6)), (0.546861, 0.836339) ); } #[test] fn geodesic_dist() { let paris = coords::GeographPoint { long: angle::deg_frm_dms(-2, 20, 14.0).to_radians(), lat : angle::deg_frm_dms(48, 50, 11.0).to_radians() }; let washington = coords::GeographPoint { long: angle::deg_frm_dms(77, 3, 56.0).to_radians(), lat : angle::deg_frm_dms(38, 55, 17.0).to_radians() }; let distance = planet::earth::geodesic_dist(&paris, &washington); assert_eq!(util::round_upto_digits(distance, 2), 6181.63); let approx_distance = planet::earth::approx_geodesic_dist ( &paris, &washington ); assert_eq!(util::round_upto_digits(approx_distance, 0), 6166.0); } #[test] fn radii() { let lat = 42_f64.to_radians(); let Rp = planet::earth::rad_of_parll_lat(lat); assert_eq!( util::round_upto_digits(Rp, 0), util::round_upto_digits(4747.001, 0) ); let lin_vel = planet::earth::linear_velocity_at_lat(lat); assert_eq!(util::round_upto_digits(lin_vel, 5), 0.34616); let Rm = planet::earth::rad_curv_of_meridian(lat); assert_eq!( util::round_upto_digits(Rm, 2), util::round_upto_digits(6364.033, 2) ); }
//! Time Filter //! //! The time filter can be used to restrict log events to only a certain portion of the day. //! //! # Example //! //! ```toml //! # Restrict log events between 22:00:00 UTC and 23:59:59 UTC to only Error. //! [filter.time] //! start = 22:00:00 //! # Optional (Defaults shown) //! end = 23:59:59 //! level = "Warn" //! offset = 0 //! exclude = true //! on_match = "Accept" //! on_mismatch = "Deny" //! ``` use Filter; use chrono::{Duration, NaiveTime, UTC}; use config::filter::MatchAction; use config::filter::MatchAction::*; use log::{LogLevelFilter, LogRecord}; #[cfg_attr(test, derive(PartialEq))] #[derive(Clone, Debug)] /// Time Filter struct pub struct TimeFilter { /// Optional level of messages to be filtered. Anything at or below this level will be filtered /// out if within the specified time. The default is `Warn` meaning any messages that are /// higher than `Warn` will be logged regardless of the time. level: Option<LogLevelFilter>, /// A time in HH:mm:ss format. start: NaiveTime, /// A time in HH:mm:ss format. Specifying an end time less than the start time will result in /// no log entries being written. If no end time is specified, defaults to 23:59:59. end: Option<NaiveTime>, /// The timezone offset from UTC to use when comparing `start` and `end` to the event timestamp. /// The default is 0 meaning all times are compared in UTC. offset: Option<i64>, /// Flips the check. If false, if the event falls between start and end it will be included, /// rather than exclude. The default is true. exclude: Option<bool>, /// Action to take when the filter matches. May be `Accept`, `Deny` or `Neutral`. The default /// value is `Neutral`. on_match: Option<MatchAction>, /// Action to take when the filter does not match. May be `Accept`, `Deny` or `Neutral`. The /// default value is `Deny`. on_mismatch: Option<MatchAction>, } impl TimeFilter { /// Create a new ThresholdFilter with the given minimum level. pub fn new(start: NaiveTime) -> TimeFilter { TimeFilter { level: None, start: start, end: None, offset: None, exclude: None, on_match: None, on_mismatch: None, } } /// Set the minimum level to be filtered. pub fn level(mut self, level: Option<LogLevelFilter>) -> TimeFilter { self.level = level; self } /// Set the end NaiveTime to filter. pub fn end(mut self, end: Option<NaiveTime>) -> TimeFilter { self.end = end; self } /// Set the timezone offset from UTC. pub fn offset(mut self, offset: Option<i64>) -> TimeFilter { self.offset = offset; self } /// Set the timezone offset from UTC. pub fn exclude(mut self, exclude: Option<bool>) -> TimeFilter { self.exclude = exclude; self } /// Set the on match MatchAction. Default is `Neutral`. pub fn on_match(mut self, action: Option<MatchAction>) -> TimeFilter { self.on_match = action; self } /// Set the on mis-match MatchAction. Default is `Deny`. pub fn on_mismatch(mut self, action: Option<MatchAction>) -> TimeFilter { self.on_mismatch = action; self } } impl Filter for TimeFilter { fn filter(&self, record: &LogRecord) -> MatchAction { // now as UTC let now = UTC::now(); // Get the offset from UTC from the config. let offset = match self.offset { Some(o) => o, None => 0, }; // Get the exclude flag. Defaults to true. Exclude false means include when between start // and end. let exclude = match self.exclude { Some(e) => e, None => true, }; // Get the minimum level to filter. let level = match self.level { Some(l) => l, None => LogLevelFilter::Warn, }; // Setup the NaiveTime for the end parameter. let naive_end = match self.end { Some(e) => e, None => NaiveTime::from_hms(23, 59, 59), }; // The start datetime with offset. let start_dt = match now.date().and_time(self.start) { Some(s) => s, None => now, }; // The end datetime with offset. let end_dt = match now.date().and_time(naive_end) { Some(e) => e, None => now, }; // The start datetime in UTC. let start_dt_utc = start_dt - Duration::hours(offset); // The end datetime in UTC. let end_dt_utc = end_dt - Duration::hours(offset); // now is not between start and end let mut matched = !((start_dt_utc <= now) && (now <= end_dt_utc)); // If we are including, rather than excluding, flip the match. if !exclude { matched = !matched } // Check for a match, or if the record level is less that the configured minimum level. if matched || record.level() < level { // Return on_match result. match self.on_match { Some(ref m) => m.clone(), None => Neutral, } } else { // Return on_mismatch result. match self.on_mismatch { Some(ref m) => m.clone(), None => Deny, } } } } #[cfg(feature = "rustc-serialize")] mod rs { use config::rs::{read_llf_opt, read_naivetime, read_naivetime_opt}; use rustc_serialize::{Decodable, Decoder}; use super::*; impl Decodable for TimeFilter { fn decode<D: Decoder>(d: &mut D) -> Result<TimeFilter, D::Error> { d.read_struct("TimeFilter", 6, |d| { let level = try!(d.read_struct_field("level", 1, |d| d.read_option(read_llf_opt))); let start = try!(d.read_struct_field("start", 2, read_naivetime)); let end = try!(d.read_struct_field("end", 3, |d| d.read_option(read_naivetime_opt))); let offset = try!(d.read_struct_field("offset", 4, |d| Decodable::decode(d))); let exclude = try!(d.read_struct_field("exclude", 5, |d| Decodable::decode(d))); let on_match = try!(d.read_struct_field("on_match", 6, |d| Decodable::decode(d))); let on_mismatch = try!(d.read_struct_field("on_mismatch", 7, |d| Decodable::decode(d))); let tf = TimeFilter::new(start) .level(level) .end(end) .offset(offset) .exclude(exclude) .on_match(on_match) .on_mismatch(on_mismatch); Ok(tf) }) } } } #[cfg(feature = "serde")] mod serde { use config::serde::{LogLevelFilterField, NaiveTimeField}; use config::filter::serde::MatchActionField; use super::*; use serde::{Deserialize, Deserializer}; use serde::de::{MapVisitor, Visitor}; enum TimeFilterField { Level, Start, End, Offset, Exclude, OnMatch, OnMismatch, } impl Deserialize for TimeFilterField { fn deserialize<D>(deserializer: &mut D) -> Result<TimeFilterField, D::Error> where D: Deserializer { struct TimeFilterFieldVisitor; impl Visitor for TimeFilterFieldVisitor { type Value = TimeFilterField; fn visit_str<E>(&mut self, value: &str) -> Result<TimeFilterField, E> where E: ::serde::de::Error { match value { "level" => Ok(TimeFilterField::Level), "start" => Ok(TimeFilterField::Start), "end" => Ok(TimeFilterField::End), "offset" => Ok(TimeFilterField::Offset), "exclude" => Ok(TimeFilterField::Exclude), "on_match" => Ok(TimeFilterField::OnMatch), "on_mismatch" => Ok(TimeFilterField::OnMismatch), _ => Err(::serde::de::Error::syntax("Unexpected field!")), } } } deserializer.visit(TimeFilterFieldVisitor) } } impl Deserialize for TimeFilter { fn deserialize<D>(deserializer: &mut D) -> Result<TimeFilter, D::Error> where D: Deserializer { static FIELDS: &'static [&'static str] = &["level", "start", "end", "offset", "exclude", "on_match", "on_mismatch"]; deserializer.visit_struct("TimeFilter", FIELDS, TimeFilterVisitor) } } struct TimeFilterVisitor; impl Visitor for TimeFilterVisitor { type Value = TimeFilter; fn visit_map<V>(&mut self, mut visitor: V) -> Result<TimeFilter, V::Error> where V: MapVisitor { let mut level: Option<LogLevelFilterField> = None; let mut start: Option<NaiveTimeField> = None; let mut end: Option<NaiveTimeField> = None; let mut offset = None; let mut exclude = None; let mut on_match: Option<MatchActionField> = None; let mut on_mismatch: Option<MatchActionField> = None; loop { match try!(visitor.visit_key()) { Some(TimeFilterField::Level) => { level = Some(try!(visitor.visit_value())); } Some(TimeFilterField::Start) => { start = Some(try!(visitor.visit_value())); } Some(TimeFilterField::End) => { end = Some(try!(visitor.visit_value())); } Some(TimeFilterField::Offset) => { offset = Some(try!(visitor.visit_value())); } Some(TimeFilterField::Exclude) => { exclude = Some(try!(visitor.visit_value())); } Some(TimeFilterField::OnMatch) => { on_match = Some(try!(visitor.visit_value())); } Some(TimeFilterField::OnMismatch) => { on_mismatch = Some(try!(visitor.visit_value())); } None => { break; } } } let start_nt = match start { Some(s) => s.naive_time(), None => return visitor.missing_field("start"), }; let lvl = match level { Some(l) => Some(l.level()), None => None, }; let end_nt = match end { Some(e) => Some(e.naive_time()), None => None, }; let omma = match on_match { Some(om) => Some(om.match_action()), None => None, }; let ommma = match on_mismatch { Some(omm) => Some(omm.match_action()), None => None, }; try!(visitor.end()); let tf = TimeFilter::new(start_nt) .level(lvl) .end(end_nt) .offset(offset) .exclude(exclude) .on_match(omma) .on_mismatch(ommma); Ok(tf) } } } #[cfg(test)] mod test { use decode; use super::*; const BASE_CONFIG: &'static str = r#" start = "07:00:00" "#; const ALL_CONFIG: &'static str = r#" start = "07:00:00" end = "20:00:00" offset = -5 exclude = false level = "Debug" on_match = "Accept" on_mismatch = "Neutral" "#; static VALIDS: &'static [&'static str] = &[BASE_CONFIG, ALL_CONFIG]; const INVALID_CONFIG_0: &'static str = r#""#; const INVALID_CONFIG_1: &'static str = r#" start = "NOt A HH:MM:SS" "#; const INVALID_CONFIG_2: &'static str = r#" start = "07:00:00" end = "NOt A HH:MM:SS" "#; const INVALID_CONFIG_3: &'static str = r#" start = "07:00:00" offset = "not a number" "#; const INVALID_CONFIG_4: &'static str = r#" start = "07:00:00" exclude = "not a bool" "#; const INVALID_CONFIG_5: &'static str = r#" start = "07:00:00" on_match = 1 "#; const INVALID_CONFIG_6: &'static str = r#" start = "07:00:00" on_mismatch = 1 "#; const INVALID_CONFIG_7: &'static str = r#" invalid = "invalid field" "#; static INVALIDS: &'static [&'static str] = &[INVALID_CONFIG_0, INVALID_CONFIG_1, INVALID_CONFIG_2, INVALID_CONFIG_3, INVALID_CONFIG_4, INVALID_CONFIG_5, INVALID_CONFIG_6, INVALID_CONFIG_7]; #[test] fn test_valid_configs() { let mut results = Vec::new(); for valid in VALIDS { match decode::<TimeFilter>(valid) { Ok(_) => results.push(true), Err(_) => assert!(false), }; } assert!(results.iter().all(|x| *x)); } #[test] fn test_invalid_configs() { let mut results = Vec::new(); for invalid in INVALIDS { match decode::<TimeFilter>(invalid) { Ok(_) => assert!(false), Err(_) => results.push(true), }; } assert!(results.iter().all(|x| *x)); } }
#[doc = "Reader of register FMPRE0"] pub type R = crate::R<u32, super::FMPRE0>; #[doc = "Writer for register FMPRE0"] pub type W = crate::W<u32, super::FMPRE0>; #[doc = "Register FMPRE0 `reset()`'s with value 0"] impl crate::ResetValue for super::FMPRE0 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } impl R {} impl W {}
use regex::{Captures, Match, Regex}; #[derive(Debug, PartialEq)] pub enum MatchType { Normal(String), Group(String), } #[derive(Debug, PartialEq)] pub struct MatchItem<'a> { pub text: &'a str, pub mtype: MatchType, } #[derive(Debug)] pub struct MatchSet<'a> { pub full_text: &'a str, pub re: &'a Regex, // pub items: Vec<MatchType>, } impl<'a> MatchSet<'a> { pub fn from(full_text: &'a str, re: &'a Regex) -> Self { MatchSet { full_text, re } } pub fn raw_line(&self) -> String { self.full_text.to_string() } pub fn to_csv_row(&self) -> String { self.to_strings().join(",") } pub fn to_strings(&self) -> Vec<String> { let mut items = Vec::new(); let captures = self.re.captures(self.full_text).unwrap(); match captures.len() { 0..=1 => items.push(MatchType::Normal(self.full_text.to_string())), _ => { let mut previous_end = 0; for i in 1..captures.len() { if let Some(mat) = captures.get(i) { if mat.start() != previous_end { items.push(MatchType::Normal( self.full_text[previous_end..mat.start()].to_string(), )); } items.push(MatchType::Group( self.full_text[mat.start()..mat.end()].to_string(), )); previous_end = mat.end(); } } if previous_end != self.full_text.len() { items.push(MatchType::Normal( self.full_text[previous_end..].to_string(), )) } } } let res: Vec<String> = items .iter() .filter(|mt| match mt { MatchType::Group(_) => true, _ => false, }) .map(|mt| match mt { MatchType::Group(s) => s.to_string(), _ => "".to_string(), }) .collect(); res } pub fn to_matchtypes(&self) -> Vec<MatchType> { let mut items = Vec::new(); let captures = self.re.captures(self.full_text).unwrap(); match captures.len() { 0..=1 => items.push(MatchType::Normal(self.full_text.to_string())), _ => { let mut previous_end = 0; for i in 1..captures.len() { if let Some(mat) = captures.get(i) { if mat.start() != previous_end { items.push(MatchType::Normal( self.full_text[previous_end..mat.start()].to_string(), )); } items.push(MatchType::Group( self.full_text[mat.start()..mat.end()].to_string(), )); previous_end = mat.end(); } } if previous_end != self.full_text.len() { items.push(MatchType::Normal( self.full_text[previous_end..].to_string(), )) } } } items } pub fn to_tsv_row(&self) -> String { self.to_strings().join("\t") } } pub fn filter_matches<'a>(contents: &'a [String], re: &Regex) -> Vec<&'a str> { contents .iter() .map(String::as_str) .filter(|s| re.is_match(s)) .collect() } pub fn into_matchsets<'a>(text_lines: &[&'a str], re: &'a Regex) -> Vec<MatchSet<'a>> { let result: Vec<MatchSet> = text_lines.iter().map(|s| MatchSet::from(&s, &re)).collect(); result } #[cfg(test)] #[allow(non_snake_case)] mod tests { use super::*; macro_rules! matchtype { ($style:ident $string:expr) => { MatchType::$style($string.to_string()) }; } macro_rules! test_matchset_from { ($($func_name:ident: $value:expr,)*) => { $( #[test] fn $func_name() { // Given let (re, content, items) = $value; let re = Regex::new(re).unwrap(); // let expected = MatchSet { // full_text: content, // re: &re // }; // When let actual: Vec<MatchType> = MatchSet::from(content, &re).to_matchtypes(); // Then assert_eq!(items, actual) } )* }; } test_matchset_from! { into_match_set_basetest : (r".+(hello).+(world)", "lala hello bleble world", vec![ matchtype!(Normal "lala "), matchtype!(Group "hello"), matchtype!(Normal " bleble "), matchtype!(Group "world"), ]), givenNoCaptureGroups_thenFullTextAsSingleElement : (r".*", "lala hello ", vec![matchtype!(Normal "lala hello ")]), givenEmptyPattern_thenReturnFullTextAsSingleElement : (r"", "lala ", vec![matchtype!(Normal "lala ")]), givenPartialMatch_thenReturnRemainingSubstringsAsNormal : (r".*(lala)", "1337 lala hey ho!", vec![ matchtype!(Normal "1337 "), matchtype!(Group "lala"), matchtype!(Normal " hey ho!"), ]), givenNonCapturingGroup_thenReturnNormal : (r"(?:lala )(bleble)", "lala bleble", vec![ matchtype!(Normal "lala "), matchtype!(Group "bleble"), ]), given0or1MatchReturnsNone_thenDoNotReturnIt : (r"(lala)?(bleble)", "bleble", vec![ matchtype!(Group "bleble"), ]), } macro_rules! test_print_options { ($($func_name:ident: $test_name:ident: $values:expr,)*) => { $( #[test] fn $test_name() { // Given let (expected, full_text, re) = $values; let match_set = MatchSet{full_text, re: &Regex::new(re).unwrap()}; assert_eq!(expected, &match_set.$func_name()) } )* } } test_print_options! { to_csv_row : return_comma_separated_row : ("remain,remain also", "drop remain remain also", r"\w+ (\w+) (\w+ \w+)"), to_tsv_row : return_tab_separated_row : ("remain\tremain also", "drop remain remain also", r"\w+ (\w+) (\w+ \w+)"), } }
use peg; fn main() { peg::cargo_build("src/sparql/sparql_grammar.rustpeg"); }
use std::collections::HashMap; use std::fmt; use std::io; use std::io::Read; use std::ops::Index; use std::string::{self, ToString}; use byteorder::{BigEndian, ReadBytesExt}; use flate2::read::{GzDecoder, ZlibDecoder}; use rustc_serialize; use rustc_serialize::hex::ToHex; use self::DecoderError::*; /// Represents a NBT value #[derive(Clone, PartialEq)] pub enum Nbt { Byte(i8), Short(i16), Int(i32), Long(i64), Float(f32), Double(f64), ByteArray(Vec<u8>), IntArray(Vec<i32>), String(String), List(List), Compound(Compound), } impl fmt::Debug for Nbt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Nbt::Byte(x) => write!(f, "{}b", x), Nbt::Short(x) => write!(f, "{}s", x), Nbt::Int(x) => write!(f, "{}i", x), Nbt::Long(x) => write!(f, "{}L", x), Nbt::Float(x) => write!(f, "{:.1}f", x), Nbt::Double(x) => write!(f, "{:.1}", x), Nbt::ByteArray(ref x) => write!(f, "b<{}>", x[..].to_hex()), Nbt::IntArray(ref x) => write!(f, "{:?}", *x), Nbt::String(ref x) => write!(f, "\"{}\"", *x), Nbt::List(ref x) => write!(f, "{:?}", *x), Nbt::Compound(ref x) => write!(f, "{:?}", *x), } } } impl fmt::Display for Nbt { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Nbt::Byte(x) => write!(f, "{}", x), Nbt::Short(x) => write!(f, "{}", x), Nbt::Int(x) => write!(f, "{}", x), Nbt::Long(x) => write!(f, "{}", x), Nbt::Float(x) => write!(f, "{:.1}", x), Nbt::Double(x) => write!(f, "{:.1}", x), Nbt::ByteArray(ref x) => write!(f, "<{}>", x[..].to_hex()), Nbt::IntArray(ref x) => write!(f, "{:?}", *x), Nbt::String(ref x) => write!(f, "\"{}\"", *x), Nbt::List(ref x) => write!(f, "{:?}", *x), Nbt::Compound(ref x) => write!(f, "{:?}", *x), } } } /// An ordered list of NBT values. #[derive(Clone, PartialEq, Debug)] pub enum List { Byte(Vec<i8>), Short(Vec<i16>), Int(Vec<i32>), Long(Vec<i64>), Float(Vec<f32>), Double(Vec<f64>), ByteArray(Vec<Vec<u8>>), IntArray(Vec<Vec<i32>>), String(Vec<String>), List(Vec<List>), Compound(Vec<Compound>), } /// An unordered list of named NBT values. pub type Compound = HashMap<String, Nbt>; impl Nbt { pub fn from_reader<R: Read>(r: R) -> NbtReaderResult<Nbt> { Ok(NbtReader::new(r).tag()?.unwrap().0) } pub fn from_gzip(data: &[u8]) -> NbtReaderResult<Nbt> { let reader = GzDecoder::new(data); Nbt::from_reader(reader) } pub fn from_zlib(data: &[u8]) -> NbtReaderResult<Nbt> { let reader = ZlibDecoder::new(data); Nbt::from_reader(reader) } pub fn as_byte(&self) -> Option<i8> { match *self { Nbt::Byte(b) => Some(b), _ => None, } } pub fn into_compound(self) -> Result<Compound, Nbt> { match self { Nbt::Compound(c) => Ok(c), x => Err(x), } } pub fn into_compound_list(self) -> Result<Vec<Compound>, Nbt> { match self { Nbt::List(List::Compound(c)) => Ok(c), x => Err(x), } } pub fn as_bytearray(&self) -> Option<&[u8]> { match *self { Nbt::ByteArray(ref b) => Some(&b[..]), _ => None, } } pub fn into_bytearray(self) -> Result<Vec<u8>, Nbt> { match self { Nbt::ByteArray(b) => Ok(b), x => Err(x), } } pub fn as_float_list(&self) -> Option<&[f32]> { match *self { Nbt::List(List::Float(ref f)) => Some(&f[..]), _ => None, } } pub fn as_double_list(&self) -> Option<&[f64]> { match *self { Nbt::List(List::Double(ref d)) => Some(&d[..]), _ => None, } } } impl<'a> Index<&'a str> for Nbt { type Output = Nbt; fn index<'b>(&'b self, s: &'a str) -> &'b Nbt { match *self { Nbt::Compound(ref c) => c.get(s).unwrap(), _ => panic!("cannot index non-compound Nbt ({:?}) with '{}'", self, s), } } } const TAG_END: i8 = 0; const TAG_BYTE: i8 = 1; const TAG_SHORT: i8 = 2; const TAG_INT: i8 = 3; const TAG_LONG: i8 = 4; const TAG_FLOAT: i8 = 5; const TAG_DOUBLE: i8 = 6; const TAG_BYTE_ARRAY: i8 = 7; const TAG_STRING: i8 = 8; const TAG_LIST: i8 = 9; const TAG_COMPOUND: i8 = 10; const TAG_INT_ARRAY: i8 = 11; pub type NbtReaderResult<T> = Result<T, NbtReaderError>; #[derive(Debug)] pub enum NbtReaderError { Io(io::Error), Utf8(string::FromUtf8Error), } impl From<io::Error> for NbtReaderError { fn from(err: io::Error) -> NbtReaderError { NbtReaderError::Io(err) } } impl From<string::FromUtf8Error> for NbtReaderError { fn from(err: string::FromUtf8Error) -> NbtReaderError { NbtReaderError::Utf8(err) } } pub struct NbtReader<R> { reader: R, } impl<R: Read> NbtReader<R> { pub fn new(reader: R) -> NbtReader<R> { NbtReader { reader } } fn i8(&mut self) -> NbtReaderResult<i8> { self.reader.read_i8().map_err(NbtReaderError::from) } fn i16(&mut self) -> NbtReaderResult<i16> { self.reader .read_i16::<BigEndian>() .map_err(NbtReaderError::from) } fn i32(&mut self) -> NbtReaderResult<i32> { self.reader .read_i32::<BigEndian>() .map_err(NbtReaderError::from) } fn i64(&mut self) -> NbtReaderResult<i64> { self.reader .read_i64::<BigEndian>() .map_err(NbtReaderError::from) } fn f32(&mut self) -> NbtReaderResult<f32> { self.reader .read_f32::<BigEndian>() .map_err(NbtReaderError::from) } fn f64(&mut self) -> NbtReaderResult<f64> { self.reader .read_f64::<BigEndian>() .map_err(NbtReaderError::from) } fn string(&mut self) -> NbtReaderResult<String> { let len = self.reader.read_u16::<BigEndian>()? as usize; let mut v = Vec::with_capacity(len); for _ in 0..len { let mut c = [0]; self.reader.read_exact(&mut c)?; v.push(c[0]) } String::from_utf8(v).map_err(NbtReaderError::from) } fn array_u8(&mut self) -> NbtReaderResult<Vec<u8>> { let len = self.i32()? as usize; let mut v = Vec::with_capacity(len); for _ in 0..len { let mut c = [0]; self.reader.read_exact(&mut c)?; v.push(c[0]) } Ok(v) } fn array<T, F>(&mut self, mut read: F) -> NbtReaderResult<Vec<T>> where F: FnMut(&mut NbtReader<R>) -> NbtReaderResult<T>, { let len = self.i32()? as usize; let mut v = Vec::with_capacity(len); for _ in 0..len { v.push(read(self)?) } Ok(v) } fn compound(&mut self) -> NbtReaderResult<Compound> { let mut map = HashMap::new(); while let Some((v, name)) = self.tag()? { map.insert(name, v); } Ok(map) } fn list(&mut self) -> NbtReaderResult<List> { match self.i8()? { TAG_END => { assert_eq!(self.i32()?, 0); Ok(List::Compound(Vec::new())) } TAG_BYTE => self.array(|r| r.i8()).map(List::Byte), TAG_SHORT => self.array(|r| r.i16()).map(List::Short), TAG_INT => self.array(|r| r.i32()).map(List::Int), TAG_LONG => self.array(|r| r.i64()).map(List::Long), TAG_FLOAT => self.array(|r| r.f32()).map(List::Float), TAG_DOUBLE => self.array(|r| r.f64()).map(List::Double), TAG_BYTE_ARRAY => self.array(|r| r.array_u8()).map(List::ByteArray), TAG_INT_ARRAY => self.array(|r| r.array(|r| r.i32())).map(List::IntArray), TAG_STRING => self.array(|r| r.string()).map(List::String), TAG_LIST => self.array(|r| r.list()).map(List::List), TAG_COMPOUND => self.array(|r| r.compound()).map(List::Compound), tag_type => panic!("Unexpected tag type {}", tag_type), } } pub fn tag(&mut self) -> NbtReaderResult<Option<(Nbt, String)>> { Ok(match self.i8()? { TAG_END => None, tag_type => { let name = self.string()?; Some(( match tag_type { TAG_BYTE => self.i8().map(Nbt::Byte), TAG_SHORT => self.i16().map(Nbt::Short), TAG_INT => self.i32().map(Nbt::Int), TAG_LONG => self.i64().map(Nbt::Long), TAG_FLOAT => self.f32().map(Nbt::Float), TAG_DOUBLE => self.f64().map(Nbt::Double), TAG_BYTE_ARRAY => self.array_u8().map(Nbt::ByteArray), TAG_INT_ARRAY => self.array(|r| r.i32()).map(Nbt::IntArray), TAG_STRING => self.string().map(Nbt::String), TAG_LIST => self.list().map(Nbt::List), TAG_COMPOUND => self.compound().map(Nbt::Compound), tag_type => panic!("Unexpected tag type {}", tag_type), }?, name, )) } }) } } /// A structure to decode NBT to values in rust. pub struct Decoder { stack: Vec<DecodeResult<Nbt>>, } #[derive(Clone, PartialEq, Eq, Debug)] pub enum DecoderError { ExpectedError(String, String), MissingFieldError(String), UnknownVariantError(String), ApplicationError(String), } pub type DecodeResult<T> = Result<T, DecoderError>; impl Decoder { /// Creates a new decoder instance for decoding the specified NBT value. pub fn new(nbt: Nbt) -> Decoder { Decoder { stack: vec![Ok(nbt)], } } fn pop(&mut self) -> DecodeResult<Nbt> { self.stack.pop().unwrap() } fn push(&mut self, nbt: Nbt) { self.stack.push(Ok(nbt)) } fn push_all<T, F>(&mut self, list: Vec<T>, f: F) -> usize where F: FnMut(T) -> Nbt, { let len = list.len(); self.stack .extend(list.into_iter().rev().map(f).map(Ok::<Nbt, DecoderError>)); len } } // impl Decodable for Nbt { // fn decode<D: serialize::Decoder>(d: &mut D) -> Result<Self, serialize::Decoder::Error> { // d.pop() // } // } macro_rules! expect( ($s:expr, $t:path) => ({ match $s.pop() { Ok($t(v)) => Ok(v), Ok(other) => { Err(ExpectedError(stringify!($t).to_string(), other.to_string())) } Err(e) => Err(e) } }); ($s:expr, $t:ident as $to:ty) => (expect!($s, $t).map(|x| x as $to)) ); impl rustc_serialize::Decoder for Decoder { type Error = DecoderError; fn read_nil(&mut self) -> DecodeResult<()> { Err(ExpectedError("()".to_string(), self.pop()?.to_string())) } fn read_usize(&mut self) -> DecodeResult<usize> { Ok(self.read_isize()? as usize) } fn read_u64(&mut self) -> DecodeResult<u64> { expect!(self, Nbt::Long).map(|x| x as u64) } fn read_u32(&mut self) -> DecodeResult<u32> { expect!(self, Nbt::Int).map(|x| x as u32) } fn read_u16(&mut self) -> DecodeResult<u16> { expect!(self, Nbt::Short).map(|x| x as u16) } fn read_u8(&mut self) -> DecodeResult<u8> { expect!(self, Nbt::Byte).map(|x| x as u8) } fn read_isize(&mut self) -> DecodeResult<isize> { match self.pop()? { Nbt::Byte(x) => Ok(x as isize), Nbt::Short(x) => Ok(x as isize), Nbt::Int(x) => Ok(x as isize), Nbt::Long(x) => Ok(x as isize), other => Err(ExpectedError("isize".to_string(), other.to_string())), } } fn read_i64(&mut self) -> DecodeResult<i64> { expect!(self, Nbt::Long) } fn read_i32(&mut self) -> DecodeResult<i32> { expect!(self, Nbt::Int) } fn read_i16(&mut self) -> DecodeResult<i16> { expect!(self, Nbt::Short) } fn read_i8(&mut self) -> DecodeResult<i8> { expect!(self, Nbt::Byte) } fn read_bool(&mut self) -> DecodeResult<bool> { Ok(self.read_u8()? != 0) } fn read_f64(&mut self) -> DecodeResult<f64> { expect!(self, Nbt::Double) } fn read_f32(&mut self) -> DecodeResult<f32> { expect!(self, Nbt::Float) } fn read_char(&mut self) -> DecodeResult<char> { let s = self.read_str()?; { let mut it = s.chars(); if let (Some(c), None) = (it.next(), it.next()) { return Ok(c); } } Err(ExpectedError("single character string".to_string(), s)) } fn read_str(&mut self) -> DecodeResult<String> { expect!(self, Nbt::String) } fn read_enum<T, F>(&mut self, _name: &str, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { f(self) } fn read_enum_variant<T, F>(&mut self, names: &[&str], mut f: F) -> DecodeResult<T> where F: FnMut(&mut Self, usize) -> DecodeResult<T>, { let name = match self.pop()? { Nbt::String(s) => s, Nbt::Compound(mut o) => { let name = match o.remove("variant") { Some(Nbt::String(s)) => s, Some(val) => return Err(ExpectedError("String".to_string(), val.to_string())), None => return Err(MissingFieldError("variant".to_string())), }; match o.remove("fields") { Some(v) => { self.push(v); self.read_seq(|_, _| Ok(()))?; } None => return Err(MissingFieldError("fields".to_string())), } name } nbt => { return Err(ExpectedError( "String or Compound".to_string(), nbt.to_string(), )) } }; let idx = match names.iter().position(|n| n == &name) { Some(idx) => idx, None => return Err(UnknownVariantError(name)), }; f(self, idx) } fn read_enum_variant_arg<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { f(self) } fn read_enum_struct_variant<T, F>(&mut self, names: &[&str], f: F) -> DecodeResult<T> where F: FnMut(&mut Self, usize) -> DecodeResult<T>, { self.read_enum_variant(names, f) } fn read_enum_struct_variant_field<T, F>( &mut self, _name: &str, idx: usize, f: F, ) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { self.read_enum_variant_arg(idx, f) } fn read_struct<T, F>(&mut self, _name: &str, _len: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { let value = f(self)?; let _ = self.pop(); Ok(value) } fn read_struct_field<T, F>(&mut self, name: &str, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { let mut obj = expect!(self, Nbt::Compound)?; let value = match obj.remove(name) { None => return Err(MissingFieldError(name.to_string())), Some(v) => { self.stack.push(Ok(v)); f(self)? } }; self.push(Nbt::Compound(obj)); Ok(value) } fn read_tuple<T, F>(&mut self, tuple_len: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { self.read_seq(move |d, len| { if len == tuple_len { f(d) } else { Err(ExpectedError( format!("Tuple{}", tuple_len), format!("Tuple{}", len), )) } }) } fn read_tuple_arg<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { self.read_seq_elt(idx, f) } fn read_tuple_struct<T, F>(&mut self, _name: &str, len: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { self.read_tuple(len, f) } fn read_tuple_struct_arg<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { self.read_tuple_arg(idx, f) } fn read_option<T, F>(&mut self, mut f: F) -> DecodeResult<T> where F: FnMut(&mut Self, bool) -> DecodeResult<T>, { match self.pop() { Ok(value) => { self.push(value); f(self, true) } Err(MissingFieldError(_)) => f(self, false), Err(e) => Err(e), } } fn read_seq<T, F>(&mut self, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self, usize) -> DecodeResult<T>, { let len = match expect!(self, Nbt::List)? { List::Byte(list) => self.push_all(list, Nbt::Byte), List::Short(list) => self.push_all(list, Nbt::Short), List::Int(list) => self.push_all(list, Nbt::Int), List::Long(list) => self.push_all(list, Nbt::Long), List::Float(list) => self.push_all(list, Nbt::Float), List::Double(list) => self.push_all(list, Nbt::Double), List::ByteArray(list) => self.push_all(list, Nbt::ByteArray), List::IntArray(list) => self.push_all(list, Nbt::IntArray), List::String(list) => self.push_all(list, Nbt::String), List::List(list) => self.push_all(list, Nbt::List), List::Compound(list) => self.push_all(list, Nbt::Compound), }; f(self, len) } fn read_seq_elt<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { f(self) } fn read_map<T, F>(&mut self, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self, usize) -> DecodeResult<T>, { let obj = expect!(self, Nbt::Compound)?; let len = obj.len(); for (key, value) in obj.into_iter() { self.push(value); self.push(Nbt::String(key)); } f(self, len) } fn read_map_elt_key<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { f(self) } fn read_map_elt_val<T, F>(&mut self, _idx: usize, f: F) -> DecodeResult<T> where F: FnOnce(&mut Self) -> DecodeResult<T>, { f(self) } fn error(&mut self, err: &str) -> DecoderError { ApplicationError(err.to_string()) } }
pub mod aftermath; pub mod tree;
fn merge (list: Vec<i32>, start_idx: usize, start_second_idx: usize, stop_idx: usize) -> Vec<i32> { let mut new: Vec<i32> = Vec::new(); let mut current = start_idx; let mut second = start_second_idx; while current <= stop_idx { if current >= start_second_idx { //push all elements from start_second on. new.push(list[start_second_idx].clone()); second += 1; } if second >= stop_idx { //push all elements from current on. new.push(list[current].clone()); current += 1; } if list[current] >= list[start_second_idx]{ //swap(list, current, start_second_idx); new.push(list[start_second_idx].clone()); second += 1; } else { //swap(list, start_second_idx, current); new.push(list[current].clone()); current += 1; } } new } pub fn merge_sort(list: Vec<i32>, list_length: usize) -> Vec<i32> { return merge_sort_recur(list, 0, list_length-1); } fn merge_sort_recur(list: Vec<i32>, start_idx: usize, stop_idx: usize) -> Vec<i32> { if stop_idx - start_idx <= 0 { return list; } let mid = start_idx + (stop_idx - start_idx)/2; // 0 + (3 - 0)/2 = 1 merge_sort_recur(list.clone(), start_idx, mid); //left 0 , 1 merge_sort_recur(list.clone(), mid + 1, stop_idx); //right 2, 3 merge(list, start_idx, mid + 1, stop_idx) }
pub struct Solution; impl Solution { pub fn combination_sum(candidates: Vec<i32>, target: i32) -> Vec<Vec<i32>> { fn rec(candidates: &[i32], target: i32) -> Vec<Vec<i32>> { let n = candidates.len(); if n == 0 { if target == 0 { vec![vec![]] } else { vec![] } } else if n == 1 { let value = candidates[0]; if target % value == 0 { vec![vec![value; (target / value) as usize]] } else { vec![] } } else { let mut combinations = rec(&candidates[..n - 1], target); let last = candidates[n - 1]; if target >= last { for mut nums in rec(candidates, target - last) { nums.push(last); combinations.push(nums); } } combinations } } rec(&candidates, target) } } #[test] fn test0039() { assert_eq!( Solution::combination_sum(vec![2, 3, 6, 7], 7), vec![vec![2, 2, 3], vec![7]] ); assert_eq!( Solution::combination_sum(vec![2, 3, 5], 8), vec![vec![2, 2, 2, 2], vec![2, 3, 3], vec![3, 5]] ); }
#![warn(clippy::all, clippy::pedantic, clippy::cargo)] #![allow(clippy::multiple_crate_versions)] #![forbid(unsafe_code)] use clap::{crate_authors, crate_description, crate_version, App, Arg}; use color_eyre::eyre::{ContextCompat, Result, WrapErr}; use crate::config::Config; use crate::state::State; use prompt::select; mod app_config; mod cargo; mod changelog; mod command; mod config; mod conventional_commits; mod git; mod issues; mod package_json; mod prompt; mod pyproject; mod semver; mod state; mod step; mod workflow; fn main() -> Result<()> { color_eyre::install().expect("Could not set up error handling with color_eyre"); let matches = App::new("Dobby") .version(crate_version!()) .author(crate_authors!()) .about(crate_description!()) .arg( Arg::with_name("WORKFLOW") .help("Name a workflow to bypass the interactive select and just run it.") .index(1), ) .get_matches(); let preselected_workflow = matches.value_of("WORKFLOW"); let Config { workflows, jira, github, } = Config::load("dobby.toml").wrap_err("Could not load config file at dobby.toml")?; let workflow = match preselected_workflow { None => select(workflows, "Select a workflow")?, Some(name) => workflows .into_iter() .find(|w| w.name == name) .wrap_err_with(|| format!("No workflow named {}", name))?, }; let state = State::new(jira, github); workflow::run_workflow(workflow, state) }
//! Re-exports commonly-used APIs that can be imported at once. pub use crate::interrupt::without_interrupts;
use core::fmt; use core::hash::{Hash, Hasher}; use firefly_diagnostics::{SourceSpan, Span, Spanned}; use firefly_intern::Ident; use crate::FunctionName; /// Represents a deprecated function or module #[derive(Debug, Copy, Clone, Spanned)] pub enum Deprecation { Module { #[span] span: SourceSpan, flag: DeprecatedFlag, }, Function { #[span] span: SourceSpan, function: Span<FunctionName>, flag: DeprecatedFlag, }, } impl PartialEq for Deprecation { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::Module { .. }, Self::Module { .. }) => true, // We ignore the flag because it used only for display, // the function/arity determines equality ( Self::Function { function: ref x1, .. }, Self::Function { function: ref y1, .. }, ) => x1 == y1, _ => false, } } } impl Eq for Deprecation {} impl Hash for Deprecation { fn hash<H: Hasher>(&self, state: &mut H) { let discriminant = std::mem::discriminant(self); discriminant.hash(state); match self { Self::Module { flag, .. } => flag.hash(state), Self::Function { ref function, flag, .. } => { flag.hash(state); function.hash(state) } } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum DeprecatedFlag { Eventually, NextVersion, NextMajorRelease, Description(Ident), } impl fmt::Display for DeprecatedFlag { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Self::Eventually => write!(f, "eventually"), Self::NextVersion => write!(f, "in the next version"), Self::NextMajorRelease => write!(f, "in the next major release"), Self::Description(descr) => write!(f, "{}", descr.name), } } }
//! Server logic for driving the library via commands. use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use std::hash::Hash; use std::rc::Rc; use std::time::{Duration, Instant}; use timely::communication::Allocate; use timely::dataflow::operators::capture::event::link::EventLink; use timely::dataflow::operators::UnorderedInput; use timely::dataflow::{ProbeHandle, Scope}; use timely::logging::{BatchLogger, TimelyEvent}; use timely::progress::Timestamp; use timely::worker::Worker; use differential_dataflow::collection::{AsCollection, Collection}; use differential_dataflow::lattice::Lattice; use differential_dataflow::logging::DifferentialEvent; use differential_dataflow::operators::Threshold; use differential_dataflow::ExchangeData; use crate::domain::{AsSingletonDomain, Domain}; use crate::logging::DeclarativeEvent; use crate::operators::LastWriteWins; use crate::scheduling::Scheduler; use crate::sinks::Sink; use crate::sources::{Source, Sourceable, SourcingContext}; use crate::Rule; use crate::{ implement, implement_neu, AttributeConfig, IndexDirection, InputSemantics, ShutdownHandle, }; use crate::{AsAid, Datom, Error, Rewind, Time, Value}; /// Server configuration. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Configuration { /// Automatic domain tick interval. pub tick: Option<Duration>, /// Do clients have to call AdvanceDomain explicitely? pub manual_advance: bool, /// Should logging streams be created? pub enable_logging: bool, /// Should queries use the optimizer during implementation? pub enable_optimizer: bool, } impl Default for Configuration { fn default() -> Self { Configuration { tick: None, manual_advance: false, enable_logging: false, enable_optimizer: false, } } } #[cfg(feature = "getopts")] impl Configuration { /// Returns a `getopts::Options` struct describing all available /// configuration options. pub fn options() -> getopts::Options { let mut opts = getopts::Options::new(); opts.optopt( "", "tick", "advance domain at a regular interval", "SECONDS", ); opts.optflag( "", "manual-advance", "forces clients to call AdvanceDomain explicitely", ); opts.optflag("", "enable-logging", "enable log event sources"); opts.optflag("", "enable-optimizer", "enable WCO queries"); opts.optflag("", "enable-meta", "enable queries on the query graph"); opts } /// Parses configuration options from the provided arguments. pub fn from_args<I: Iterator<Item = String>>(args: I) -> Result<Self, String> { let default: Self = Default::default(); let opts = Self::options(); let matches = opts.parse(args)?; let tick: Option<Duration> = matches .opt_str("tick") .map(|x| Duration::from_secs(x.parse().expect("failed to parse tick duration"))); Self { tick, manual_advance: matches.opt_present("manual-advance"), enable_logging: matches.opt_present("enable-logging"), enable_optimizer: matches.opt_present("enable-optimizer"), } } } /// Transaction ids. pub type TxId = u64; /// A request expressing interest in receiving results published under /// the specified name. #[derive(Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Serialize, Deserialize)] pub struct Interest { /// The name of a previously registered dataflow. pub name: String, /// Granularity at which to send results. None indicates no delay. pub granularity: Option<Time>, /// An optional sink configuration. pub sink: Option<Sink>, /// Whether or not to log events from this dataflow. pub disable_logging: Option<bool>, } impl std::convert::From<&Interest> for crate::sinks::SinkingContext { fn from(interest: &Interest) -> Self { Self { name: interest.name.clone(), granularity: interest.granularity.clone(), } } } /// A request with the intent of synthesising one or more new rules /// and optionally publishing one or more of them. #[derive(Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Serialize, Deserialize)] pub struct Register<A: AsAid> { /// A list of rules to synthesise in order. pub rules: Vec<Rule<A>>, /// The names of rules that should be published. pub publish: Vec<A>, } /// A request with the intent of creating a new named, globally /// available input that can be transacted upon. #[derive(Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Serialize, Deserialize)] pub struct CreateAttribute { /// A globally unique name under which to publish data sent via /// this input. pub name: String, /// Semantics enforced on this attribute by 3DF. pub config: AttributeConfig, } /// Possible request types. #[derive(Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Serialize, Deserialize)] pub enum Request<A: AsAid + From<&'static str>> { /// Sends inputs via one or more registered handles. Transact(Vec<Datom<A>>), /// Expresses interest in an entire attribute. Subscribe(String), /// Derives new attributes under a new namespace. #[cfg(feature = "graphql")] Derive(String, String), /// Expresses interest in a named relation. Interest(Interest), /// Expresses that the interest in a named relation has /// stopped. Once all interested clients have sent this, the /// dataflow can be cleaned up. Uninterest(String), /// Registers one or more named relations. Register(Register<A>), /// A request with the intent of attaching to an external data /// source that publishes one or more attributes and relations. RegisterSource(Source<A>), /// Creates a named input handle that can be `Transact`ed upon. CreateAttribute(CreateAttribute), /// Advances the specified domain to the specified time. AdvanceDomain(Option<String>, Time), /// Requests a domain advance to whatever epoch the server /// determines is *now*. Used by clients to enforce a minimum /// granularity of responses, if inputs happen only infrequently. Tick, /// Closes a named input handle. CloseInput(String), /// Client has disconnected. Disconnect, /// Requests any setup logic that needs to be executed /// deterministically across all workers. Setup, /// Requests a heartbeat containing status information. Status, /// Requests orderly shutdown of the system. Shutdown, } /// Server context maintaining globally registered arrangements and /// input handles. pub struct Server<A, T, Token> where A: AsAid, T: Timestamp + Lattice, Token: Hash + Eq + Copy, { /// Server configuration. pub config: Configuration, /// A timer started at the initiation of the timely computation /// (copied from worker). pub t0: Instant, /// Internal domain in server time. pub internal: Domain<A, T>, /// Mapping from query names to interested client tokens. pub interests: HashMap<A, HashSet<Token>>, // Mapping from query names to their shutdown handles. This is // separate from internal shutdown handles on domains, because // user queries might be one-off and not result in a new domain // being created. shutdown_handles: HashMap<A, ShutdownHandle>, /// Probe keeping track of overall dataflow progress. pub probe: ProbeHandle<T>, /// Scheduler managing deferred operator activations. pub scheduler: Rc<RefCell<Scheduler<T>>>, // Link to replayable Timely logging events. timely_events: Option<Rc<EventLink<Duration, (Duration, usize, TimelyEvent)>>>, // Link to replayable Differential logging events. differential_events: Option<Rc<EventLink<Duration, (Duration, usize, DifferentialEvent)>>>, } impl<A, T, Token> Server<A, T, Token> where A: AsAid + ExchangeData + From<&'static str>, T: Timestamp + Lattice + Default + Rewind, Token: Hash + Eq + Copy, { /// Creates a new server state from a configuration. pub fn new(config: Configuration) -> Self { Server::new_at(config, Instant::now()) } /// Creates a new server state from a configuration with an /// additionally specified beginning of the computation: an /// instant in relation to which all durations will be measured. pub fn new_at(config: Configuration, t0: Instant) -> Self { let timely_events = Some(Rc::new(EventLink::new())); let differential_events = Some(Rc::new(EventLink::new())); let probe = ProbeHandle::new(); Server { config, t0, internal: Domain::new(Default::default()), interests: HashMap::new(), shutdown_handles: HashMap::new(), scheduler: Rc::new(RefCell::new(Scheduler::from(probe.clone()))), probe, timely_events, differential_events, } } /// Returns commands to install built-in plans. pub fn builtins() -> Vec<Request<A>> { vec![ // Request::CreateAttribute(CreateAttribute { // name: "df.pattern/e".to_string(), // semantics: InputSemantics::Raw, // }), // Request::CreateAttribute(CreateAttribute { // name: "df.pattern/a".to_string(), // semantics: InputSemantics::Raw, // }), // Request::CreateAttribute(CreateAttribute { // name: "df.pattern/v".to_string(), // semantics: InputSemantics::Raw, // }), ] } /// Drops all shutdown handles associated with the specified /// query, resulting in its dataflow getting cleaned up. fn shutdown_query(&mut self, name: &A) { info!("Shutting down {}", name); self.shutdown_handles.remove(name); } /// Handles a Transact request. pub fn transact( &mut self, tx_data: Vec<Datom<A>>, owner: usize, worker_index: usize, ) -> Result<(), Error> { // only the owner should actually introduce new inputs if owner == worker_index { self.internal.transact(tx_data) } else { Ok(()) } } /// Handles an Interest request. pub fn interest<S: Scope<Timestamp = T>>( &mut self, name: A, scope: &mut S, ) -> Result<Collection<S, Vec<Value>, isize>, Error> { let (mut rel_map, shutdown_handle) = if self.config.enable_optimizer { implement_neu(scope, &mut self.internal, name.clone())? } else { implement(scope, &mut self.internal, name.clone())? }; match rel_map.remove(&name) { None => Err(Error::fault(format!( "Relation of interest ({}) wasn't actually implemented.", name ))), Some(relation) => { self.shutdown_handles.insert(name, shutdown_handle); Ok(relation) } } } /// Handles a Register request. pub fn register(&mut self, req: Register<A>) -> Result<(), Error> { let Register { rules, .. } = req; for rule in rules.into_iter() { if self.internal.rules.contains_key(&rule.name) { // @TODO panic if hashes don't match // panic!("Attempted to re-register a named relation"); continue; } else { self.internal.rules.insert(rule.name.clone(), rule); } } Ok(()) } /// Handles a CreateAttribute request. pub fn create_attribute<X, S>( &mut self, scope: &mut S, name: X, config: AttributeConfig, ) -> Result<(), Error> where X: Into<A>, S: Scope<Timestamp = T>, S::Timestamp: std::convert::Into<crate::timestamp::Time>, { let ((handle, cap), pairs) = scope.new_unordered_input::<((Value, Value), S::Timestamp, isize)>(); let tuples = match config.input_semantics { InputSemantics::Raw => pairs.as_collection(), InputSemantics::LastWriteWins => pairs.as_collection().last_write_wins(), // Ensure that redundant (e,v) pairs don't cause // misleading proposals during joining. InputSemantics::Distinct => pairs.as_collection().distinct(), }; let mut scoped_domain = ((handle, cap), tuples).as_singleton_domain(name.into()); if let Some(slack) = config.trace_slack { scoped_domain = scoped_domain.with_slack(slack.into()); } // LastWriteWins is a special case, because count, propose, // and validate are all essentially the same. if config.input_semantics != InputSemantics::LastWriteWins { scoped_domain = scoped_domain.with_query_support(config.query_support); } if config.index_direction == IndexDirection::Both { scoped_domain = scoped_domain.with_reverse_indices(); } self.internal += scoped_domain.into(); Ok(()) } /// Returns a fresh sourcing context, useful for installing 3DF /// compatible sources manually. pub fn make_sourcing_context(&self) -> SourcingContext<T> { SourcingContext { t0: self.t0, scheduler: Rc::downgrade(&self.scheduler), domain_probe: self.internal.domain_probe().clone(), timely_events: self.timely_events.clone().unwrap(), differential_events: self.differential_events.clone().unwrap(), } } /// Handles a RegisterSource request. pub fn register_source<S>( &mut self, source: Box<dyn Sourceable<A, S>>, scope: &mut S, ) -> Result<(), Error> where S: Scope<Timestamp = T>, S::Timestamp: std::convert::Into<crate::timestamp::Time>, { // use timely::logging::Logger; // let timely_logger = scope.log_register().remove("timely"); // let differential_logger = scope.log_register().remove("differential/arrange"); let context = self.make_sourcing_context(); // self.timely_events = None; // self.differential_events = None; let mut attribute_streams = source.source(scope, context); for (aid, config, pairs) in attribute_streams.drain(..) { let pairs = match config.input_semantics { InputSemantics::Raw => pairs.as_collection(), InputSemantics::LastWriteWins => pairs.as_collection().last_write_wins(), // Ensure that redundant (e,v) pairs don't cause // misleading proposals during joining. InputSemantics::Distinct => pairs.as_collection().distinct(), }; let mut scoped_domain = pairs.as_singleton_domain(aid); if let Some(slack) = config.trace_slack { scoped_domain = scoped_domain.with_slack(slack.into()); } // LastWriteWins is a special case, because count, propose, // and validate are all essentially the same. if config.input_semantics != InputSemantics::LastWriteWins { scoped_domain = scoped_domain.with_query_support(config.query_support); } if config.index_direction == IndexDirection::Both { scoped_domain = scoped_domain.with_reverse_indices(); } self.internal += scoped_domain.into(); } // if let Some(logger) = timely_logger { // if let Ok(logger) = logger.downcast::<Logger<TimelyEvent>>() { // scope // .log_register() // .insert_logger::<TimelyEvent>("timely", *logger); // } // } // if let Some(logger) = differential_logger { // if let Ok(logger) = logger.downcast::<Logger<DifferentialEvent>>() { // scope // .log_register() // .insert_logger::<DifferentialEvent>("differential/arrange", *logger); // } // } Ok(()) } /// Handles an AdvanceDomain request. pub fn advance_domain(&mut self, name: Option<String>, next: T) -> Result<(), Error> { match name { None => self.internal.advance_epoch(next), Some(_) => Err(Error::unsupported("Named domains are not yet supported.")), } } /// Handles an Uninterest request, possibly cleaning up dataflows /// that are no longer interesting to any client. pub fn uninterest(&mut self, client: Token, name: &A) -> Result<(), Error> { // All workers keep track of every client's interests, s.t. they // know when to clean up unused dataflows. if let Some(entry) = self.interests.get_mut(name) { entry.remove(&client); if entry.is_empty() { self.shutdown_query(name); self.interests.remove(name); } } Ok(()) } /// Cleans up all bookkeeping state for the specified client. pub fn disconnect_client(&mut self, client: Token) -> Result<(), Error> { let names: Vec<A> = self.interests.keys().cloned().collect(); for query_name in names.iter() { self.uninterest(client, query_name)? } Ok(()) } /// Returns true iff the probe is behind any input handle. Mostly /// used as a convenience method during testing. Using this within /// `step_while` is not safe in general and might lead to stalls. pub fn is_any_outdated(&self) -> bool { self.probe .with_frontier(|out_frontier| self.internal.dominates(out_frontier)) } /// Helper for registering, publishing, and indicating interest in /// a single, named query. Used for testing. pub fn test_single<S: Scope<Timestamp = T>>( &mut self, scope: &mut S, rule: Rule<A>, ) -> Collection<S, Vec<Value>, isize> { let interest_name = rule.name.clone(); let publish_name = rule.name.clone(); self.register(Register { rules: vec![rule], publish: vec![publish_name], }) .unwrap(); match self.interest(interest_name, scope) { Err(error) => panic!("{:?}", error), Ok(relation) => relation.probe_with(&mut self.probe), } } } impl<A, Token> Server<A, Duration, Token> where A: AsAid + ExchangeData + From<&'static str>, Token: Hash + Eq + Copy, { /// Registers loggers for use in the various logging sources. pub fn enable_logging<Al: Allocate>(&self, worker: &mut Worker<Al>) -> Result<(), Error> { let mut timely_logger = BatchLogger::new(self.timely_events.clone().unwrap()); worker .log_register() .insert::<TimelyEvent, _>("timely", move |time, data| { timely_logger.publish_batch(time, data) }); let mut differential_logger = BatchLogger::new(self.differential_events.clone().unwrap()); worker .log_register() .insert::<DifferentialEvent, _>("differential/arrange", move |time, data| { differential_logger.publish_batch(time, data) }); Ok(()) } /// Unregisters loggers. pub fn shutdown_logging<Al: Allocate>(&self, worker: &mut Worker<Al>) -> Result<(), Error> { worker .log_register() .insert::<TimelyEvent, _>("timely", move |_time, _data| {}); worker .log_register() .insert::<DifferentialEvent, _>("differential/arrange", move |_time, _data| {}); worker .log_register() .insert::<DeclarativeEvent, _>("declarative", move |_time, _data| {}); Ok(()) } }
use crate::raw::Table; use crossbeam_epoch::Atomic; use parking_lot::Mutex; /// Entry in a bin. /// /// Will _generally_ be `Node`. Any entry that is not first in the bin, will be a `Node`. #[derive(Debug)] pub(crate) enum BinEntry<K, V> { Node(Node<K, V>), Moved, } unsafe impl<K, V> Send for BinEntry<K, V> where K: Send, V: Send, Node<K, V>: Send, Table<K, V>: Send, { } unsafe impl<K, V> Sync for BinEntry<K, V> where K: Sync, V: Sync, Node<K, V>: Sync, Table<K, V>: Sync, { } impl<K, V> BinEntry<K, V> { pub(crate) fn as_node(&self) -> Option<&Node<K, V>> { if let BinEntry::Node(ref n) = *self { Some(n) } else { None } } } /// Key-value entry. #[derive(Debug)] pub(crate) struct Node<K, V> { pub(crate) hash: u64, pub(crate) key: K, pub(crate) value: Atomic<V>, pub(crate) next: Atomic<BinEntry<K, V>>, pub(crate) lock: Mutex<()>, } #[cfg(test)] mod tests { use super::*; use crossbeam_epoch::Owned; use std::sync::atomic::Ordering; fn new_node(hash: u64, key: usize, value: usize) -> Node<usize, usize> { Node { hash, key, value: Atomic::new(value), next: Atomic::null(), lock: Mutex::new(()), } } #[test] fn find_node_no_match() { let guard = &crossbeam_epoch::pin(); let node2 = new_node(4, 5, 6); let entry2 = BinEntry::Node(node2); let node1 = new_node(1, 2, 3); node1.next.store(Owned::new(entry2), Ordering::SeqCst); let entry1 = Owned::new(BinEntry::Node(node1)).into_shared(guard); let mut tab = Table::from(vec![Atomic::from(entry1)]); // safety: we have not yet dropped entry1 assert!(tab.find(unsafe { entry1.deref() }, 1, &0, guard).is_null()); tab.drop_bins(); } #[test] fn find_node_single_match() { let guard = &crossbeam_epoch::pin(); let entry = Owned::new(BinEntry::Node(new_node(1, 2, 3))).into_shared(guard); let mut tab = Table::from(vec![Atomic::from(entry)]); assert_eq!( // safety: we have not yet dropped entry unsafe { tab.find(entry.deref(), 1, &2, guard).deref() } .as_node() .unwrap() .key, 2 ); tab.drop_bins(); } #[test] fn find_node_multi_match() { let guard = &crossbeam_epoch::pin(); let node2 = new_node(4, 5, 6); let entry2 = BinEntry::Node(node2); let node1 = new_node(1, 2, 3); node1.next.store(Owned::new(entry2), Ordering::SeqCst); let entry1 = Owned::new(BinEntry::Node(node1)).into_shared(guard); let mut tab = Table::from(vec![Atomic::from(entry1)]); assert_eq!( // safety: we have not yet dropped entry1 unsafe { tab.find(entry1.deref(), 4, &5, guard).deref() } .as_node() .unwrap() .key, 5 ); tab.drop_bins(); } #[test] fn find_moved_empty_bins_no_match() { let guard = &crossbeam_epoch::pin(); let mut table = Table::<usize, usize>::new(1); let mut table2 = Owned::new(Table::new(1)).into_shared(guard); let entry = table.get_moved(table2, guard); table.store_bin(0, entry); assert!(table.find(&BinEntry::Moved, 1, &2, guard).is_null()); table.drop_bins(); // safety: table2 is still valid and not accessed by different threads unsafe { table2.deref_mut() }.drop_bins(); unsafe { guard.defer_destroy(table2) }; } #[test] fn find_moved_no_bins_no_match() { let guard = &crossbeam_epoch::pin(); let mut table = Table::<usize, usize>::new(1); let mut table2 = Owned::new(Table::new(0)).into_shared(guard); let entry = table.get_moved(table2, guard); table.store_bin(0, entry); assert!(table.find(&BinEntry::Moved, 1, &2, guard).is_null()); table.drop_bins(); // safety: table2 is still valid and not accessed by different threads unsafe { table2.deref_mut() }.drop_bins(); unsafe { guard.defer_destroy(table2) }; } #[test] fn find_moved_null_bin_no_match() { let guard = &crossbeam_epoch::pin(); let mut table = Table::<usize, usize>::new(1); let mut table2 = Owned::new(Table::new(2)).into_shared(guard); unsafe { table2.deref() }.store_bin(0, Owned::new(BinEntry::Node(new_node(1, 2, 3)))); let entry = table.get_moved(table2, guard); table.store_bin(0, entry); assert!(table.find(&BinEntry::Moved, 0, &1, guard).is_null()); table.drop_bins(); // safety: table2 is still valid and not accessed by different threads unsafe { table2.deref_mut() }.drop_bins(); unsafe { guard.defer_destroy(table2) }; } #[test] fn find_moved_match() { let guard = &crossbeam_epoch::pin(); let mut table = Table::<usize, usize>::new(1); let mut table2 = Owned::new(Table::new(1)).into_shared(guard); // safety: table2 is still valid unsafe { table2.deref() }.store_bin(0, Owned::new(BinEntry::Node(new_node(1, 2, 3)))); let entry = table.get_moved(table2, guard); table.store_bin(0, entry); assert_eq!( // safety: entry is still valid since the table was not dropped and the // entry was not removed unsafe { table.find(&BinEntry::Moved, 1, &2, guard).deref() } .as_node() .unwrap() .key, 2 ); table.drop_bins(); // safety: table2 is still valid and not accessed by different threads unsafe { table2.deref_mut() }.drop_bins(); unsafe { guard.defer_destroy(table2) }; } }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { wlan_common::mac::{AuthAlgorithmNumber, Bssid}, wlan_mlme::{ap::Ap, buffer::BufferProvider, common::mac, device::Device, error::ResultExt}, }; #[no_mangle] pub extern "C" fn ap_sta_new( device: Device, buf_provider: BufferProvider, bssid: &[u8; 6], ) -> *mut Ap { Box::into_raw(Box::new(Ap::new(device, buf_provider, Bssid(*bssid)))) } #[no_mangle] pub extern "C" fn ap_sta_send_open_auth_frame( sta: &mut Ap, client_addr: &[u8; 6], status_code: u16, ) -> i32 { sta.ctx .send_auth_frame(*client_addr, AuthAlgorithmNumber::OPEN, 2, mac::StatusCode(status_code)) .into_raw_zx_status() } #[no_mangle] pub extern "C" fn ap_sta_delete(sta: *mut Ap) { if !sta.is_null() { unsafe { Box::from_raw(sta) }; } }