text
stringlengths
8
4.13M
use super::super::{btn, text}; use crate::{ block::{self, BlockId}, idb, model, random_id::U128Id, resource::Data, JsObject, Promise, Timestamp, }; use kagura::prelude::*; use std::{collections::HashMap, rc::Rc}; use wasm_bindgen::{prelude::*, JsCast}; pub type LoadTable = Component<Props, Sub>; pub struct Props { pub common_db: Rc<web_sys::IdbDatabase>, pub table_db: Rc<web_sys::IdbDatabase>, pub block_field: block::Field, } pub struct State { block_field: block::Field, common_db: Rc<web_sys::IdbDatabase>, table_db: Rc<web_sys::IdbDatabase>, tables: Vec<(U128Id, Table)>, selecting_table: Option<U128Id>, cmd_queue: model::CmdQueue<Msg, Sub>, } pub struct Table { name: String, timestamp: Timestamp, resources: Vec<U128Id>, } pub enum LoadMode { Open, Clone, } pub enum Msg { Close, SetTables(Vec<(U128Id, Table)>), SelectTable(U128Id), LoadSelectingTable(LoadMode), LoadTable( LoadMode, U128Id, HashMap<U128Id, JsValue>, HashMap<U128Id, Data>, ), OpenTable( U128Id, HashMap<BlockId, block::FieldBlock>, HashMap<U128Id, Data>, ), } pub enum Sub { Close, Open( BlockId, HashMap<BlockId, block::FieldBlock>, HashMap<U128Id, Data>, ), Clone(U128Id), } impl Table { fn from_jsvalue(val: &JsValue) -> Option<Self> { val.dyn_ref::<JsObject>().and_then(|val| { let name = val.get("name").and_then(|x| x.as_string()); let timestamp = val.get("timestamp").and_then(|x| x.as_f64()); let resources = val.get("resources").map(|x| { js_sys::Array::from(&x) .to_vec() .into_iter() .filter_map(|x| U128Id::from_jsvalue(&x)) .collect::<Vec<_>>() }); if let (Some(name), Some(timestamp), Some(resources)) = (name, timestamp, resources) { Some(Self { name, timestamp: Timestamp::from(timestamp), resources: resources, }) } else { None } }) } } pub fn new() -> LoadTable { Component::new(init, update, render) } fn init(state: Option<State>, props: Props) -> (State, Cmd<Msg, Sub>, Vec<Batch<Msg>>) { if let Some(state) = state { ( State { block_field: props.block_field, common_db: props.common_db, table_db: props.table_db, tables: state.tables, selecting_table: state.selecting_table, cmd_queue: state.cmd_queue, }, Cmd::none(), vec![], ) } else { let state = State { block_field: props.block_field, common_db: props.common_db, table_db: props.table_db, tables: vec![], selecting_table: None, cmd_queue: model::CmdQueue::new(), }; let common_db = Rc::clone(&state.common_db); let promise = idb::query(&common_db, "tables", idb::Query::GetAllKeys); let cmd = Cmd::task(move |resolve| { promise .and_then(move |keys| { if let Some(keys) = keys { let keys = js_sys::Array::from(&keys).to_vec(); let mut promises = vec![]; for key in keys { if let Some(table_id) = U128Id::from_jsvalue(&key) { promises.push( idb::query(&common_db, "tables", idb::Query::Get(&key)).map( |x| { x.and_then(|x| Table::from_jsvalue(&x)) .map(|x| (table_id, x)) }, ), ) } } Promise::all(promises) } else { Promise::new(|resolve| resolve(None)) } }) .then(|tables| { if let Some(tables) = tables { let tables: Vec<_> = tables.into_iter().collect(); resolve(Msg::SetTables(tables)); } }); }); (state, cmd, vec![]) } } fn update(state: &mut State, msg: Msg) -> Cmd<Msg, Sub> { match msg { Msg::Close => { state.cmd_queue.enqueue(Cmd::sub(Sub::Close)); state.cmd_queue.dequeue() } Msg::SetTables(tables) => { state.tables = tables; state.cmd_queue.dequeue() } Msg::SelectTable(table_id) => { state.selecting_table = Some(table_id); state.cmd_queue.dequeue() } Msg::LoadSelectingTable(load_mode) => { if let Some(table_id) = state.selecting_table.as_ref() { if let Some(i) = state.tables.iter().position(|(t_id, _)| t_id == table_id) { let promise = load_table( Rc::clone(&state.common_db), Rc::clone(&state.table_db), table_id.clone(), &state.tables[i].1.resources, ); let cmd = Cmd::task({ let table_id = table_id.clone(); move |resolve| { promise.then(|x| { if let Some((blocks, resources)) = x { resolve(Msg::LoadTable(load_mode, table_id, blocks, resources)); } }); } }); state.cmd_queue.enqueue(cmd); } } state.cmd_queue.dequeue() } Msg::LoadTable(load_mode, table_id, blocks, resources) => { let blocks = state.block_field.unpack_listed(blocks.into_iter()); let cmd = Cmd::task(move |resolve| { blocks.then(move |blocks| { if let Some(blocks) = blocks { match load_mode { LoadMode::Open => resolve(Msg::OpenTable(table_id, blocks, resources)), LoadMode::Clone => {} } } }) }); state.cmd_queue.enqueue(cmd); state.cmd_queue.dequeue() } Msg::OpenTable(table_id, blocks, resources) => { state.cmd_queue.enqueue(Cmd::sub(Sub::Open( state.block_field.block_id(table_id), blocks, resources, ))); state.cmd_queue.dequeue() } } } fn load_table( common_db: Rc<web_sys::IdbDatabase>, table_db: Rc<web_sys::IdbDatabase>, table_id: U128Id, resources: &Vec<U128Id>, ) -> Promise<(HashMap<U128Id, JsValue>, HashMap<U128Id, Data>)> { let mut promises = Promise::new(|resolve| resolve(Some(vec![]))); for r_id in resources { crate::debug::log_1(r_id.to_jsvalue()); promises = promises.and_then({ let r_id = r_id.clone(); let common_db = Rc::clone(&common_db); move |resources| match resources { Some(mut resources) => { idb::query(&common_db, "resources", idb::Query::Get(&r_id.to_jsvalue())) .and_then(|x| match x { Some(x) => Data::unpack(x), None => Promise::new(|resolve| resolve(None)), }) .map({ let r_id = r_id.clone(); move |x| { x.map(|x| { resources.push((r_id, x)); resources }) } }) } None => Promise::new(|resolve| resolve(None)), } }); } let resources = promises; idb::query(&table_db, &table_id.to_string(), idb::Query::GetAllKeys) .and_then(move |keys| { if let Some(keys) = keys { let keys = js_sys::Array::from(&keys).to_vec(); let mut promises = vec![]; for key in keys { if let Some(block_id) = U128Id::from_jsvalue(&key) { promises.push( idb::query(&table_db, &table_id.to_string(), idb::Query::Get(&key)) .map(|x| x.map(|x| (block_id, x))), ) } else if key.as_string().map(|key| key == "data").unwrap_or(false) { promises.push( idb::query( &table_db, &table_id.to_string(), idb::Query::Get(&JsValue::from("data")), ) .map({ let table_id = table_id.clone(); move |x| x.map(|x| (table_id, x)) }), ) } } Promise::all(promises) } else { Promise::new(|resolve| resolve(None)) } }) .and_then(move |tables| { if let Some(tables) = tables { resources.map(move |resources| resources.map(|x| (tables, x))) } else { Promise::new(|resolve| resolve(None)) } }) .map(|x| { x.map(|(tables, resources)| { ( tables.into_iter().collect::<HashMap<_, _>>(), resources.into_iter().collect::<HashMap<_, _>>(), ) }) }) } fn render(state: &State, _: Vec<Html>) -> Html { super::container( Attributes::new(), Events::new(), vec![super::frame( 12, Attributes::new(), Events::new(), vec![ super::header( Attributes::new().class("keyvalue").class("keyvalue-rev"), Events::new(), vec![ Html::div( Attributes::new().class("text-label"), Events::new(), vec![Html::text("保存済みのテーブル")], ), Html::div( Attributes::new().class("linear-h"), Events::new(), vec![btn::close( Attributes::new(), Events::new().on_click(move |_| Msg::Close), )], ), ], ), super::body( Attributes::new() .class("keyvalue") .class("keyvalue-rev") .class("keyvalue-align-stretch"), Events::new(), vec![ Html::div( Attributes::new() .class("linear-v") .class("container-a") .class("scroll-y"), Events::new(), state .tables .iter() .map(|(table_id, table)| { btn::selectable( state .selecting_table .as_ref() .map(|t_id| *table_id == *t_id) .unwrap_or(false), Attributes::new().class("pure-button-list"), Events::new().on_click({ let table_id = table_id.clone(); move |_| Msg::SelectTable(table_id) }), vec![ text::div(&table.name), text::div(format!( "最終更新日時:{}", table.timestamp.to_string() )), ], ) }) .collect(), ), Html::div( Attributes::new() .class("vkeyvalue") .class("vkeyvalue-rev") .class("container-a"), Events::new(), vec![ Html::div(Attributes::new(), Events::new(), vec![]), btn::primary( Attributes::new(), Events::new() .on_click(|_| Msg::LoadSelectingTable(LoadMode::Open)), vec![Html::text("読み込み")], ), ], ), ], ), super::footer(Attributes::new(), Events::new(), vec![]), ], )], ) }
use std::io; fn main() { let mut input = String::new(); println!("type in number of words"); io::stdin().read_line(&mut input ) .expect("Failed to read line"); let input: u32 = input.trim().parse() .expect("Please type a number!"); println!("You entered: {}", input); let mut v = create_random_vector(input); print_vector(&v); let mut v2 = copy_shuffle(& v); hm_tree_wrapper(&v); let mut tam = String::new(); println!("type in index of word to be changed"); io::stdin().read_line(&mut tam ) .expect("Failed to read line"); let tam: usize = tam.trim().parse() .expect("Please type a number!"); println!("You entered: {}", tam); v2[tam] = "changed".to_string(); hm_tree_wrapper(&v); hm_tree_wrapper(&v2); /* let mut v1 = vec![String::from("Bifesta"),String::from("Citron"), String::from("Collection"),String::from("Kirby")]; tamper(&mut v1, 0); print_vector(& v1); let mut v2 = copy_shuffle(& v1); print_vector(& v1); print_vector(& v2); */ } pub fn shuffle<T>(vec: &mut [T]) { // ... contents removed: it shuffles the vector in place // ... so needs a mutable vector } pub fn copy_shuffle<T: Clone>(vec: &Vec<T>) -> Vec<T> { let mut vec = vec.clone(); shuffle(&mut vec); vec } fn tamper(xor: &mut Vec<String>, i: usize){ let num = xor.len(); for n in 0..num { if n == i { xor[n] = "changed".to_string(); } } } fn hm_tree_wrapper(xor: &Vec<String>) { let mut temp = to_hash(xor); hm_tree(&temp); print_vector(xor); } fn to_hash(xor: &Vec<String>) -> Vec<String>{ let num = xor.len(); let mut trial: Vec<String> = Vec::new(); for n in 0..num { trial.push(primitive_hash_fn(&xor[n])); } trial } fn to_hash_con(xor: &Vec<String>) -> Vec<String>{ let num = xor.len(); let mut trial: Vec<String> = Vec::new(); let mut n = 0; while n < num { let mut temp = String::new(); temp.push_str(&xor[n]); temp.push_str(&xor[n+1]); trial.push(primitive_hash_fn(&temp)); n = n + 2; } trial } fn hm_tree(xor: &Vec<String>) { let n = xor.len(); if n > 1 { let temp = to_hash_con(xor); hm_tree(&temp); print_vector(xor); } else { print!("The root hash: "); print_vector(xor); } } fn primitive_hash_fn(input: &str) -> String { let mut buf = String::with_capacity(4); let mut holder = 0; let mut ind = 0; for c in input.chars() { let x = c.to_digit(36); ind += 1; match x { None => (), Some(temp) => holder = holder + (temp * ind), } } holder = holder % 2099; let temp = holder.to_string(); buf.push_str(&temp); while buf.len() < 4 { buf.push('z'); } buf } fn create_random_vector(input: u32) -> Vec<String> { let mut trial: Vec<String> = Vec::new(); for n in 0..input { let mut temp = String::new(); println!("type in word"); io::stdin().read_line(&mut temp ) .expect("Failed to read line"); trial.push(temp.trim().to_string()); } trial } fn print_vector(xor: &Vec<String>) { let num = xor.len(); for n in 0..num { print!("{} ",xor[n]); } println!(""); } // Struct Attempt /* use std::io; fn main() { println!("Hello, world!"); let mut input = String::new(); println!("type in number of words"); io::stdin().read_line(&mut input ) .expect("Failed to read line"); let input: u32 = input.trim().parse() .expect("Please type a number!"); println!("You entered: {}", input); if(input != 0){ let mut v = create_random_vector(input); print_vector(&v); hm_tree_wrapper(&v); let mut tam = String::new(); println!("type in index of word to be changed"); io::stdin().read_line(&mut tam ) .expect("Failed to read line"); let tam: usize = tam.trim().parse() .expect("Please type a number!"); println!("You entered: {}", tam); v[tam] = "changed".to_string(); hm_tree_wrapper(&v); }else{ let mut root = Node { hash: String::from("root node"), name: String::from("root node"), left_c: None, right_c: None,} ; let mut lc = Node { hash: String::from("left node"), name: String::from("left node"), left_c: None, right_c: None,} ; let mut rc = Node { hash: String::from("right node"), name: String::from("right node"), left_c: None, right_c: None,} ; root.left_c = Some(Box::new(lc)); root.right_c = Some(Box::new(rc)); printtree(&root); } //tamper(&mut v, tam); //let y = String::from("abcd"); //let x = primitive_hash_fn(&y); testing hash functions //println!("{}", x); //let mut x: Vec<String> = Vec::new(); //x.push("really".to_string()); found out without trim(), input strings would print in new line //x.push("why".to_string()); //print_vector(&x); } struct Node { hash: String, name: String, left_c: Option<Box<Node>>, right_c: Option<Box<Node>> } impl Clone for Node { //fn clone(&self) -> Node {*self} fn clone(&self) -> Node { let mut t_lc = None; match &self.left_c { Some(p)=> t_lc = Some(Box::new(*p.clone())), None => t_lc = None, } let mut r_lc = None; match &self.right_c { Some(p)=> r_lc = Some(Box::new(*p.clone())), None => r_lc = None, } let temp = Node {hash: self.hash.clone(), name: self.name.clone(), left_c: t_lc, right_c: r_lc,}; temp } } fn printtree(xor: &Node,){ //in_order let left_traverse = &xor.left_c; match left_traverse { Some(p)=> printtree(&*p), None => (), } println!("{}",xor.hash); let right_traverse = &xor.right_c; match right_traverse { Some(p)=> printtree(&*p), None => (), } } fn tamper(xor: &mut Vec<String>, i: usize){ let num = xor.len(); let mut temp = String::new(); println!("type in word"); io::stdin().read_line(&mut temp ) .expect("Failed to read line"); for n in 0..num { if n == i { xor[n] = temp.trim().to_string(); } } } fn hm_tree_wrapper(xor: &Vec<String>) { let mut temp = to_hash(xor); let node = hm_tree(&mut temp); printtree(&node); print_vector(xor); } fn to_hash(xor: &Vec<String>) -> Vec<Node>{ let num = xor.len(); let mut trial: Vec<Node> = Vec::new(); for n in 0..num { let mut root = Node { hash: primitive_hash_fn(&xor[n]), name: String::from(n.to_string()), left_c: None, right_c: None,} ; trial.push(root); } trial } fn to_hash_con(xor: &Vec<Node>) -> Vec<Node>{ let num = xor.len(); let mut trial: Vec<Node> = Vec::new(); if(num == 1){ trial = xor.to_vec(); }else { let mut n = 0; while n < num { let mut temp = String::new(); temp.push_str(&xor[n].hash); temp.push_str(&xor[n+1].hash); let nhash = primitive_hash_fn(&temp); let mut nn = String::new(); nn.push_str(&xor[n].name); nn.push_str(&xor[n+1].name); //let mut lc = Node { hash: xor[n].hash.clone(), //name: xor[n].name.clone(), //left_c: xor[n].left_c.copied(), //right_c: xor[n].right_c.copied(), //}; //let mut lc = Node {..xor[n]}; //let mut rc = Node {..xor[n+1]}; let mut lc = xor[n].clone(); let mut rc = xor[n+1].clone(); let mut root = Node { hash: nhash, name: String::from(nn.to_string()), left_c: Some(Box::new(lc)), right_c: Some(Box::new(rc)),} ; //try creating a copy of nodes in Xor to place in to the new node (brand new duplicate) trial.push(root); n = n + 2; if(n == num-1){ trial.push(xor[n].clone()); n = n + 1; } } trial = to_hash_con(&trial); } trial } fn hm_tree(mut xor: &Vec<Node>) -> Node{ let mut n = xor.len(); let mut temp = to_hash_con(&xor); //while n > 1 { //temp = to_hash_con(&xor); //n = xor.len(); //} let root = temp[0].clone(); //if n > 1 { //let temp = to_hash_con(xor); //hm_tree(&temp); //print_vector(xor); //} else { //print!("The root hash: "); //print_vector(xor); //} root } fn primitive_hash_fn(input: &str) -> String { let mut buf = String::with_capacity(4); let mut holder = 0; let mut ind = 0; for c in input.chars() { let x = c.to_digit(36); ind += 1; match x { None => (), Some(temp) => holder = holder + (temp * ind), } } holder = holder % 2099; let temp = holder.to_string(); buf.push_str(&temp); while buf.len() < 4 { buf.push('z'); } buf } fn create_random_vector(input: u32) -> Vec<String> { let mut trial: Vec<String> = Vec::new(); for n in 0..input { let mut temp = String::new(); println!("type in word"); io::stdin().read_line(&mut temp ) .expect("Failed to read line"); trial.push(temp.trim().to_string()); } trial } fn print_vector(xor: &Vec<String>) { let num = xor.len(); for n in 0..num { print!("{} ",xor[n]); } println!(""); } */
#[doc = "Register `CFGR1` reader"] pub type R = crate::R<CFGR1_SPEC>; #[doc = "Register `CFGR1` writer"] pub type W = crate::W<CFGR1_SPEC>; #[doc = "Field `MEM_MODE` reader - Memory mapping selection bits"] pub type MEM_MODE_R = crate::FieldReader; #[doc = "Field `MEM_MODE` writer - Memory mapping selection bits"] pub type MEM_MODE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `PA11_PA12_RMP` reader - PA11 and PA12 remapping bit."] pub type PA11_PA12_RMP_R = crate::BitReader; #[doc = "Field `PA11_PA12_RMP` writer - PA11 and PA12 remapping bit."] pub type PA11_PA12_RMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IR_POL` reader - IR output polarity selection"] pub type IR_POL_R = crate::BitReader; #[doc = "Field `IR_POL` writer - IR output polarity selection"] pub type IR_POL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IR_MOD` reader - IR Modulation Envelope signal selection."] pub type IR_MOD_R = crate::FieldReader; #[doc = "Field `IR_MOD` writer - IR Modulation Envelope signal selection."] pub type IR_MOD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `BOOSTEN` reader - I/O analog switch voltage booster enable"] pub type BOOSTEN_R = crate::BitReader; #[doc = "Field `BOOSTEN` writer - I/O analog switch voltage booster enable"] pub type BOOSTEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `UCPD1_STROBE` reader - Strobe signal bit for UCPD1"] pub type UCPD1_STROBE_R = crate::BitReader; #[doc = "Field `UCPD1_STROBE` writer - Strobe signal bit for UCPD1"] pub type UCPD1_STROBE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `UCPD2_STROBE` reader - Strobe signal bit for UCPD2"] pub type UCPD2_STROBE_R = crate::BitReader; #[doc = "Field `UCPD2_STROBE` writer - Strobe signal bit for UCPD2"] pub type UCPD2_STROBE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `I2C_PBx_FMP` reader - Fast Mode Plus (FM+) driving capability activation bits"] pub type I2C_PBX_FMP_R = crate::FieldReader; #[doc = "Field `I2C_PBx_FMP` writer - Fast Mode Plus (FM+) driving capability activation bits"] pub type I2C_PBX_FMP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `I2C1_FMP` reader - FM+ driving capability activation for I2C1"] pub type I2C1_FMP_R = crate::BitReader; #[doc = "Field `I2C1_FMP` writer - FM+ driving capability activation for I2C1"] pub type I2C1_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `I2C2_FMP` reader - FM+ driving capability activation for I2C2"] pub type I2C2_FMP_R = crate::BitReader; #[doc = "Field `I2C2_FMP` writer - FM+ driving capability activation for I2C2"] pub type I2C2_FMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `I2C_PAx_FMP` reader - Fast Mode Plus (FM+) driving capability activation bits"] pub type I2C_PAX_FMP_R = crate::FieldReader; #[doc = "Field `I2C_PAx_FMP` writer - Fast Mode Plus (FM+) driving capability activation bits"] pub type I2C_PAX_FMP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; impl R { #[doc = "Bits 0:1 - Memory mapping selection bits"] #[inline(always)] pub fn mem_mode(&self) -> MEM_MODE_R { MEM_MODE_R::new((self.bits & 3) as u8) } #[doc = "Bit 4 - PA11 and PA12 remapping bit."] #[inline(always)] pub fn pa11_pa12_rmp(&self) -> PA11_PA12_RMP_R { PA11_PA12_RMP_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - IR output polarity selection"] #[inline(always)] pub fn ir_pol(&self) -> IR_POL_R { IR_POL_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bits 6:7 - IR Modulation Envelope signal selection."] #[inline(always)] pub fn ir_mod(&self) -> IR_MOD_R { IR_MOD_R::new(((self.bits >> 6) & 3) as u8) } #[doc = "Bit 8 - I/O analog switch voltage booster enable"] #[inline(always)] pub fn boosten(&self) -> BOOSTEN_R { BOOSTEN_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - Strobe signal bit for UCPD1"] #[inline(always)] pub fn ucpd1_strobe(&self) -> UCPD1_STROBE_R { UCPD1_STROBE_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - Strobe signal bit for UCPD2"] #[inline(always)] pub fn ucpd2_strobe(&self) -> UCPD2_STROBE_R { UCPD2_STROBE_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bits 16:19 - Fast Mode Plus (FM+) driving capability activation bits"] #[inline(always)] pub fn i2c_pbx_fmp(&self) -> I2C_PBX_FMP_R { I2C_PBX_FMP_R::new(((self.bits >> 16) & 0x0f) as u8) } #[doc = "Bit 20 - FM+ driving capability activation for I2C1"] #[inline(always)] pub fn i2c1_fmp(&self) -> I2C1_FMP_R { I2C1_FMP_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - FM+ driving capability activation for I2C2"] #[inline(always)] pub fn i2c2_fmp(&self) -> I2C2_FMP_R { I2C2_FMP_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bits 22:23 - Fast Mode Plus (FM+) driving capability activation bits"] #[inline(always)] pub fn i2c_pax_fmp(&self) -> I2C_PAX_FMP_R { I2C_PAX_FMP_R::new(((self.bits >> 22) & 3) as u8) } } impl W { #[doc = "Bits 0:1 - Memory mapping selection bits"] #[inline(always)] #[must_use] pub fn mem_mode(&mut self) -> MEM_MODE_W<CFGR1_SPEC, 0> { MEM_MODE_W::new(self) } #[doc = "Bit 4 - PA11 and PA12 remapping bit."] #[inline(always)] #[must_use] pub fn pa11_pa12_rmp(&mut self) -> PA11_PA12_RMP_W<CFGR1_SPEC, 4> { PA11_PA12_RMP_W::new(self) } #[doc = "Bit 5 - IR output polarity selection"] #[inline(always)] #[must_use] pub fn ir_pol(&mut self) -> IR_POL_W<CFGR1_SPEC, 5> { IR_POL_W::new(self) } #[doc = "Bits 6:7 - IR Modulation Envelope signal selection."] #[inline(always)] #[must_use] pub fn ir_mod(&mut self) -> IR_MOD_W<CFGR1_SPEC, 6> { IR_MOD_W::new(self) } #[doc = "Bit 8 - I/O analog switch voltage booster enable"] #[inline(always)] #[must_use] pub fn boosten(&mut self) -> BOOSTEN_W<CFGR1_SPEC, 8> { BOOSTEN_W::new(self) } #[doc = "Bit 9 - Strobe signal bit for UCPD1"] #[inline(always)] #[must_use] pub fn ucpd1_strobe(&mut self) -> UCPD1_STROBE_W<CFGR1_SPEC, 9> { UCPD1_STROBE_W::new(self) } #[doc = "Bit 10 - Strobe signal bit for UCPD2"] #[inline(always)] #[must_use] pub fn ucpd2_strobe(&mut self) -> UCPD2_STROBE_W<CFGR1_SPEC, 10> { UCPD2_STROBE_W::new(self) } #[doc = "Bits 16:19 - Fast Mode Plus (FM+) driving capability activation bits"] #[inline(always)] #[must_use] pub fn i2c_pbx_fmp(&mut self) -> I2C_PBX_FMP_W<CFGR1_SPEC, 16> { I2C_PBX_FMP_W::new(self) } #[doc = "Bit 20 - FM+ driving capability activation for I2C1"] #[inline(always)] #[must_use] pub fn i2c1_fmp(&mut self) -> I2C1_FMP_W<CFGR1_SPEC, 20> { I2C1_FMP_W::new(self) } #[doc = "Bit 21 - FM+ driving capability activation for I2C2"] #[inline(always)] #[must_use] pub fn i2c2_fmp(&mut self) -> I2C2_FMP_W<CFGR1_SPEC, 21> { I2C2_FMP_W::new(self) } #[doc = "Bits 22:23 - Fast Mode Plus (FM+) driving capability activation bits"] #[inline(always)] #[must_use] pub fn i2c_pax_fmp(&mut self) -> I2C_PAX_FMP_W<CFGR1_SPEC, 22> { I2C_PAX_FMP_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "SYSCFG configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CFGR1_SPEC; impl crate::RegisterSpec for CFGR1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`cfgr1::R`](R) reader structure"] impl crate::Readable for CFGR1_SPEC {} #[doc = "`write(|w| ..)` method takes [`cfgr1::W`](W) writer structure"] impl crate::Writable for CFGR1_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CFGR1 to value 0"] impl crate::Resettable for CFGR1_SPEC { const RESET_VALUE: Self::Ux = 0; }
use jsonrpc_core::{Notification, Params, Version}; use serde_json; use uuid::Uuid; use messages; #[derive(Debug, Deserialize)] pub struct Request<T> { room_id: Uuid, data: T, } #[derive(Debug, Serialize)] pub struct NotificationParams<T> { room_id: Uuid, data: T, } // Offer pub type OfferRequest = Request<OfferRequestData>; #[derive(Debug, Deserialize)] pub struct OfferRequestData { jsep: serde_json::Value, from: Uuid, to: Uuid, tracks: Vec<Track>, } #[derive(Debug, Serialize, Deserialize)] struct Track { id: Uuid, } type OfferNotification = NotificationParams<OfferNotificationData>; #[derive(Debug, Serialize)] struct OfferNotificationData { jsep: serde_json::Value, from: Uuid, tracks: Vec<Track>, } impl From<OfferRequest> for OfferNotificationData { fn from(req: OfferRequest) -> Self { OfferNotificationData { jsep: req.data.jsep, from: req.data.from, tracks: req.data.tracks, } } } // Offer // Answer pub type AnswerRequest = Request<AnswerRequestData>; #[derive(Debug, Deserialize)] pub struct AnswerRequestData { jsep: serde_json::Value, from: Uuid, to: Uuid, } type AnswerNotification = NotificationParams<AnswerNotificationData>; #[derive(Debug, Serialize)] struct AnswerNotificationData { jsep: serde_json::Value, from: Uuid, } impl From<AnswerRequest> for AnswerNotificationData { fn from(req: AnswerRequest) -> Self { AnswerNotificationData { jsep: req.data.jsep, from: req.data.from, } } } // Answer // Candidate pub type CandidateRequest = Request<CandidateRequestData>; #[derive(Debug, Deserialize)] pub struct CandidateRequestData { candidate: serde_json::Value, from: Uuid, to: Uuid, } type CandidateNotification = NotificationParams<CandidateNotificationData>; #[derive(Debug, Serialize)] struct CandidateNotificationData { candidate: serde_json::Value, from: Uuid, } impl From<CandidateRequest> for CandidateNotificationData { fn from(req: CandidateRequest) -> Self { CandidateNotificationData { candidate: req.data.candidate, from: req.data.from, } } } // Candidate #[derive(Debug, Serialize)] #[serde(untagged)] enum WebrtcMethod { Offer(OfferNotification), Answer(AnswerNotification), Candidate(CandidateNotification), } impl From<OfferRequest> for WebrtcMethod { fn from(req: OfferRequest) -> Self { let offer = OfferNotification { room_id: req.room_id, data: req.into(), }; WebrtcMethod::Offer(offer) } } impl From<AnswerRequest> for WebrtcMethod { fn from(req: AnswerRequest) -> Self { let answer = AnswerNotification { room_id: req.room_id, data: req.into(), }; WebrtcMethod::Answer(answer) } } impl From<CandidateRequest> for WebrtcMethod { fn from(req: CandidateRequest) -> Self { let candidate = CandidateNotification { room_id: req.room_id, data: req.into(), }; WebrtcMethod::Candidate(candidate) } } impl From<WebrtcMethod> for Option<Params> { fn from(method: WebrtcMethod) -> Self { serde_json::to_value(method) .ok() .map(|value| Params::Array(vec![value])) } } impl From<WebrtcMethod> for Notification { fn from(method: WebrtcMethod) -> Self { let method_name = match method { WebrtcMethod::Offer(_) => "webrtc.offer", WebrtcMethod::Answer(_) => "webrtc.answer", WebrtcMethod::Candidate(_) => "webrtc.candidate", }; Notification { jsonrpc: Some(Version::V2), method: method_name.to_owned(), params: method.into(), } } } impl From<OfferRequest> for Notification { fn from(req: OfferRequest) -> Self { let method = WebrtcMethod::from(req); Notification::from(method) } } impl From<AnswerRequest> for Notification { fn from(req: AnswerRequest) -> Self { let method = WebrtcMethod::from(req); Notification::from(method) } } impl From<CandidateRequest> for Notification { fn from(req: CandidateRequest) -> Self { let method = WebrtcMethod::from(req); Notification::from(method) } } impl From<OfferRequest> for messages::Method { fn from(req: OfferRequest) -> Self { messages::Method { agent_id: req.data.to, body: req.into(), } } } impl From<AnswerRequest> for messages::Method { fn from(req: AnswerRequest) -> Self { messages::Method { agent_id: req.data.to, body: req.into(), } } } impl From<CandidateRequest> for messages::Method { fn from(req: CandidateRequest) -> Self { messages::Method { agent_id: req.data.to, body: req.into(), } } }
#![cfg(feature = "std")] use tabled::{ builder::Builder, settings::{ object::{Rows, Segment}, Alignment, Disable, Extract, Format, Modify, Padding, }, }; use crate::matrix::Matrix; use testing_table::test_table; test_table!( extract_segment_full_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::segment(.., ..)), "+-------+--------------+--------------+--------------+" "| [N] | [column 0] | [column 1] | [column 2] |" "+-------+--------------+--------------+--------------+" "| [0] | [0-0] | [0-1] | [0-2] |" "+-------+--------------+--------------+--------------+" "| [1] | [1-0] | [1-1] | [1-2] |" "+-------+--------------+--------------+--------------+" "| [2] | [2-0] | [2-1] | [2-2] |" "+-------+--------------+--------------+--------------+" ); test_table!( extract_segment_skip_top_row_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::segment(1.., ..)), "+-------+---------+---------+---------+" "| [0] | [0-0] | [0-1] | [0-2] |" "+-------+---------+---------+---------+" "| [1] | [1-0] | [1-1] | [1-2] |" "+-------+---------+---------+---------+" "| [2] | [2-0] | [2-1] | [2-2] |" "+-------+---------+---------+---------+" ); test_table!( extract_segment_skip_column_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::segment(.., 1..)), "+--------------+--------------+--------------+" "| [column 0] | [column 1] | [column 2] |" "+--------------+--------------+--------------+" "| [0-0] | [0-1] | [0-2] |" "+--------------+--------------+--------------+" "| [1-0] | [1-1] | [1-2] |" "+--------------+--------------+--------------+" "| [2-0] | [2-1] | [2-2] |" "+--------------+--------------+--------------+" ); test_table!( extract_segment_bottom_right_square_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::segment(2.., 2..)), "+---------+---------+" "| [1-1] | [1-2] |" "+---------+---------+" "| [2-1] | [2-2] |" "+---------+---------+" ); test_table!( extract_segment_middle_section_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::segment(1..3, 1..)), "+---------+---------+---------+" "| [0-0] | [0-1] | [0-2] |" "+---------+---------+---------+" "| [1-0] | [1-1] | [1-2] |" "+---------+---------+---------+" ); test_table!( extract_segment_empty_test, Matrix::new(3, 3).with(Extract::segment(1..1, 1..1)), "" ); test_table!( extract_rows_full_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::rows(..)), "+-------+--------------+--------------+--------------+" "| [N] | [column 0] | [column 1] | [column 2] |" "+-------+--------------+--------------+--------------+" "| [0] | [0-0] | [0-1] | [0-2] |" "+-------+--------------+--------------+--------------+" "| [1] | [1-0] | [1-1] | [1-2] |" "+-------+--------------+--------------+--------------+" "| [2] | [2-0] | [2-1] | [2-2] |" "+-------+--------------+--------------+--------------+" ); test_table!( extract_rows_empty_test, Matrix::new(3, 3).with(Extract::rows(0..0)), "" ); test_table!( extract_rows_partial_view_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::rows(0..=2)), "+-------+--------------+--------------+--------------+" "| [N] | [column 0] | [column 1] | [column 2] |" "+-------+--------------+--------------+--------------+" "| [0] | [0-0] | [0-1] | [0-2] |" "+-------+--------------+--------------+--------------+" "| [1] | [1-0] | [1-1] | [1-2] |" "+-------+--------------+--------------+--------------+" ); test_table!( extract_columns_full_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::columns(..)), "+-------+--------------+--------------+--------------+" "| [N] | [column 0] | [column 1] | [column 2] |" "+-------+--------------+--------------+--------------+" "| [0] | [0-0] | [0-1] | [0-2] |" "+-------+--------------+--------------+--------------+" "| [1] | [1-0] | [1-1] | [1-2] |" "+-------+--------------+--------------+--------------+" "| [2] | [2-0] | [2-1] | [2-2] |" "+-------+--------------+--------------+--------------+" ); test_table!( extract_columns_empty_test, Matrix::new(3, 3).with(Extract::columns(0..0)), "" ); test_table!( extract_columns_partial_view_test, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))) .with(Extract::columns(0..2)), "+-------+--------------+" "| [N] | [column 0] |" "+-------+--------------+" "| [0] | [0-0] |" "+-------+--------------+" "| [1] | [1-0] |" "+-------+--------------+" "| [2] | [2-0] |" "+-------+--------------+" ); test_table!( extract_inside_test, Matrix::new(3, 3).with(Disable::row(Rows::first())).with(Extract::segment(1..2, 1..2)), "+-----+" "| 1-0 |" "+-----+" ); test_table!( extract_left_test, Matrix::new(3, 3).with(Disable::row(Rows::first())).with(Extract::segment(.., ..1)), "+---+" "| 0 |" "+---+" "| 1 |" "+---+" "| 2 |" "+---+" ); test_table!( extract_right_test, Matrix::new(3, 3).with(Disable::row(Rows::first())).with(Extract::segment(.., 2..)), "+-----+-----+" "| 0-1 | 0-2 |" "+-----+-----+" "| 1-1 | 1-2 |" "+-----+-----+" "| 2-1 | 2-2 |" "+-----+-----+" ); test_table!( extract_top_test, Matrix::new(3, 3).with(Disable::row(Rows::first())).with(Extract::segment(..1, ..)), "+---+-----+-----+-----+" "| 0 | 0-0 | 0-1 | 0-2 |" "+---+-----+-----+-----+" ); test_table!( extract_bottom_test, Matrix::new(3, 3).with(Disable::row(Rows::first())).with(Extract::segment(2.., ..)), "+---+-----+-----+-----+" "| 2 | 2-0 | 2-1 | 2-2 |" "+---+-----+-----+-----+" ); test_table!( extract_all_test, Matrix::new(3, 3) .with(Disable::row(Rows::first())) .with(Extract::segment(3.., 3..)), "" ); test_table!( extract_empty_test, Builder::default().build().with(Extract::segment(.., ..)), "" );
// Copyright 2018-2020, Wayfair GmbH // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // KV parsing // // Parses a string into a map. It is possible to split based on different characters that represent // either field or key value boundaries. // // A good part of the logstash functionality will be handled outside of this function and in a // generic way in tremor script. // // Features (in relation to LS): // // | Setting | Translation | Supported | // |------------------------|---------------------------------------------------------|-----------| // | allow_duplicate_values | not supported, since we deal with JSON maps | No | // | default_keys | should be handled in TS (via assignment) | TS | // | exclude_keys | should behandled in TS (via delete_keys?) | TS | // | field_split | supported, array of strings | Yes | // | field_split_pattern | not supported | No | // | include_brackets | should be handled in TS (via map + dissect?) | TS | // | include_keys | should be handled in TS (via select) | TS | // | prefix | should be handled in TS (via map + string::format) | TS | // | recursive | not supported | No | // | remove_char_key | should be handled in TS (via map + re::replace) | TS | // | remove_char_value | should be handled in TS (via map + re::replace) | TS | // | source | handled in TS at call time | TS | // | target | handled in TS at return time | TS | // | tag_on_failure | handled in TS at return time | TS | // | tag_on_timeout | currently not supported | No | // | timeout_millis | currently not supported | No | // | transform_key | should be handled in TS (via map + ?) | TS | // | transform_value | should be handled in TS (via map + ?) | TS | // | trim_key | should be handled in TS (via map + ?) | TS | // | trim_value | should be handled in TS (via map + ?) | TS | // | value_split | supported, array of strings | Yes | // | value_split_pattern | not supported | No | // | whitespace | we always run in 'lenient mode' as is the default of LS | No | #![forbid(warnings)] #![recursion_limit = "1024"] #![deny( clippy::all, clippy::unwrap_used, clippy::unnecessary_unwrap, clippy::pedantic )] #![allow(clippy::must_use_candidate)] use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::fmt; #[derive(Debug, PartialEq)] pub enum Error { InvalidPattern(usize), DoubleSeperator(String), InvalidEscape(char), UnterminatedEscape, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::InvalidPattern(p) => write!(f, "invalid pattern at character {}", p), Self::DoubleSeperator(s) => write!(f, "The seperator '{}' is used for both key value seperation as well as pair seperation.", s), Self::InvalidEscape(s) => write!(f, "Invalid escape sequence \\'{}' is not valid.", s), Self::UnterminatedEscape => write!(f, "Unterminated escape at the end of line or of a delimiter %{{ can't be escaped"), } } } impl std::error::Error for Error {} #[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] pub struct Pattern { field_seperators: Vec<String>, key_seperators: Vec<String>, } impl std::default::Default for Pattern { fn default() -> Self { Self { field_seperators: vec![" ".to_string()], key_seperators: vec![":".to_string()], } } } fn handle_escapes(s: &str) -> Result<String, Error> { let mut res = String::with_capacity(s.len()); let mut cs = s.chars(); while let Some(c) = cs.next() { match c { '\\' => { if let Some(c1) = cs.next() { match c1 { '\\' => res.push(c1), 'n' => res.push('\n'), 't' => res.push('\t'), 'r' => res.push('\r'), other => return Err(Error::InvalidEscape(other)), } } else { return Err(Error::UnterminatedEscape); } } c => res.push(c), } } Ok(res) } impl Pattern { /// compiles a pattern /// # Errors /// fails if the pattern is invalid pub fn compile(pattern: &str) -> Result<Self, Error> { let mut field_seperators = Vec::new(); let mut key_seperators = Vec::new(); let mut i = 0; loop { if pattern[i..].starts_with("%{key}") { i += 6; if let Some(i1) = pattern[i..].find("%{val}") { if i1 != 0 { key_seperators.push(handle_escapes(&pattern[i..i + i1])?); } i += i1 + 6; } else { return Err(Error::InvalidPattern(i)); } } else if let Some(i1) = pattern[i..].find("%{key}") { if i1 != 0 { field_seperators.push(handle_escapes(&pattern[i..i + i1])?); } i += i1; } else if pattern[i..].is_empty() { break; } else { field_seperators.push(handle_escapes(&pattern[i..])?); break; } } if field_seperators.is_empty() { field_seperators.push(" ".to_string()) } if key_seperators.is_empty() { key_seperators.push(":".to_string()) } field_seperators.sort(); key_seperators.sort(); field_seperators.dedup(); key_seperators.dedup(); for fs in &field_seperators { if key_seperators.iter().any(|ks| ks.find(fs).is_some()) { return Err(Error::DoubleSeperator(fs.to_string())); } if field_seperators .iter() .any(|fs2| fs2 != fs && fs2.contains(fs)) { return Err(Error::DoubleSeperator(fs.to_string())); } } for ks in &key_seperators { if field_seperators.iter().any(|fs| fs.find(ks).is_some()) { return Err(Error::DoubleSeperator(ks.to_string())); } if key_seperators .iter() .any(|ks2| ks2 != ks && ks2.contains(ks)) { return Err(Error::DoubleSeperator(ks.to_string())); } } Ok(Self { field_seperators, key_seperators, }) } /// Splits a string that represents KV pairs. /// /// * `input` - The input string /// /// Note: Fields that have on value are dropped. pub fn run<'input, V>(&self, input: &'input str) -> Option<V> where V: ValueTrait + Mutable + Builder<'input> + 'input, <V as ValueTrait>::Key: std::convert::From<&'input str>, { let mut r = V::object(); let mut empty = true; for field in multi_split(input, &self.field_seperators) { let kv: Vec<&str> = multi_split(field, &self.key_seperators); if kv.len() == 2 { empty = false; r.insert(kv[0], kv[1]).ok()?; } } if empty { None } else { Some(r) } } } fn multi_split<'input>(input: &'input str, seperators: &[String]) -> Vec<&'input str> { use std::mem; let mut i: Vec<&str> = vec![input]; let mut i1 = vec![]; let mut r: Vec<&str>; for s in seperators { i1.clear(); for e in &i { r = e.split(s.as_str()).collect(); i1.append(&mut r); } mem::swap(&mut i, &mut i1); } i } #[cfg(test)] mod test { use super::*; use simd_json::BorrowedValue; #[test] fn test_multisplit() { let seps = vec![String::from(" "), String::from(";")]; let input = "this=is;a=test for:seperators"; let i = multi_split(input, &seps); assert_eq!(i, vec!["this=is", "a=test", "for:seperators"]); } #[test] fn simple_split() { let kv = Pattern::compile("%{key}=%{val}").expect("Failed to build pattern"); let r: BorrowedValue = kv.run("this=is a=test").expect("Failed to split input"); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 2); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); } #[test] fn simple_split2() { let kv = Pattern::compile("&%{key}=%{val}").expect("Failed to build pattern"); let r: BorrowedValue = kv.run("this=is&a=test").expect("Failed to split input"); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 2); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); } #[test] fn newline_simple_() { let kv = Pattern::compile(r#"\n%{key}=%{val}"#).expect("Failed to build pattern"); let r: BorrowedValue = kv.run("this=is\na=test").expect("Failed to split input"); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 2); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); } #[test] fn simple_split3() { let kv = Pattern::compile("&").expect("Failed to build pattern"); let r: BorrowedValue = kv.run("this:is&a:test").expect("Failed to split input"); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 2); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); } #[test] fn simple_split4() { let kv = Pattern::compile("%{key}%{%{val}").expect("Failed to build pattern"); let r: BorrowedValue = kv.run("this%{is a%{test").expect("Failed to split input"); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 2); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); } #[test] fn simple_split5() { let kv = Pattern::compile("%{key}%{key}%{val}").expect("Failed to build pattern"); dbg!(&kv); let r: BorrowedValue = kv .run("this%{key}is a%{key}test") .expect("Failed to split input"); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 2); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); } #[test] fn invalid_pattern() { let kv = Pattern::compile("%{key} %{val} "); assert_eq!(kv.err(), Some(Error::DoubleSeperator(String::from(" ")))); let kv = Pattern::compile("%{key}=%{val}; %{key}:%{val} %{key}:%{val}"); assert_eq!(kv.err(), Some(Error::DoubleSeperator(String::from(" ")))); let kv = Pattern::compile("%{key}=%{val};%{key}:%{val} :%{key}:%{val}"); assert_eq!(kv.err(), Some(Error::DoubleSeperator(String::from(":")))); } #[test] fn one_field() { let kv = Pattern::compile("%{key}=%{val}").expect("Failed to build pattern"); let r: BorrowedValue = kv.run("this=is").expect("Failed to split input"); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 1); assert_eq!(r["this"], "is"); } #[test] fn no_split() { let kv = Pattern::compile("%{key}=%{val}").expect("Failed to build pattern"); let r: Option<BorrowedValue> = kv.run("this is a test"); assert!(r.is_none()); } #[test] fn different_seperatpors() { let kv = Pattern::compile("%{key}=%{val};%{key}:%{val} %{key}:%{val}") .expect("Failed to build pattern"); dbg!(&kv); let r: BorrowedValue = kv .run("this=is;a=test for:seperators") .expect("Failed to split input"); dbg!(&r); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 3); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); assert_eq!(r["for"], "seperators"); } #[test] fn different_seperatpors2() { let kv = Pattern::compile("%{key}=%{val}%{key}:%{val} %{key}:%{val};") .expect("Failed to build pattern"); let r: BorrowedValue = kv .run("this=is;a=test for:seperators") .expect("Failed to split input"); dbg!(&r); dbg!(&kv); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 3); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); assert_eq!(r["for"], "seperators"); } #[test] fn invalid_pattern2() { let kv = Pattern::compile("%{key}=%{val};%{key}:%{val} %{key}:%{val}") .expect("Failed to build pattern"); let r: BorrowedValue = kv .run("this=is;a=test for:seperators") .expect("Failed to split input"); dbg!(&r); dbg!(&kv); assert_eq!(r.as_object().map(|v| v.len()).unwrap_or_default(), 3); assert_eq!(r["this"], "is"); assert_eq!(r["a"], "test"); assert_eq!(r["for"], "seperators"); } } /* Functions: map::select(<map>, [<key>, ...]) keeps only the given keys in an object this would make include keys be part of TS */ /* 15 kv { 16 source => "full_message" 17 include_keys => ["hits", "status", "QTime"] 18 } 19 20 kv { 21 source => "full_message" 22 field_split => "&" 23 include_keys => ["qt"] 24 } 92 kv { 93 source => "naxsi_params" 94 field_split => "&" 95 include_keys => ["ip", "server", "uri", "learning", "vers", "total_processed", "total_blocked", "block"] 96 } 142 kv { 143 source => "dispatcher_params" 144 field_split => "&" 145 include_keys => ["_controller", "_action"] 146 remove_field => "dispatcher_params" 147 } 22 kv { 23 source => "syslog_message" 24 remove_field => [ "syslog_message" ] 25 } */
// Fabriquons un struct pour les chaussures #[derive(PartialEq, Debug)] struct Shoe { size: u32, style: String, } // Une fonction à deux arguments : // un vecteur de structs (ça existe) // Une taille de chaussure // Et qui renvoie un vecteur de struct // Cette fonction prend l'ownership du vecteur fn shoes_in_my_size(shoes: Vec<Shoe>, shoe_size: u32) -> Vec<Shoe> { // On passe le vecteur dans un itérateur avec into_iter() shoes.into_iter() // 'filter' amène l'itérateur dans un nouvel itérateur qui ne contiendra que les // éléments où la fermeture renverra TRUE. // la fermeture capture le paramètre shoe_size de l'environnement .filter(|s| s.size == shoe_size) // on récupère tout ça avec collect() .collect() } // Un test pour vérifier que la fonction... fonctionne #[test] fn filters_by_size() { let vecteur_de_chaussures = vec![ Shoe { size: 10, style: String::from("sneaker") }, Shoe { size: 13, style: String::from("sandal") }, Shoe { size: 10, style: String::from("boot") }, ]; let in_my_size = shoes_in_my_size(vecteur_de_chaussures, 10); assert_eq!( in_my_size, vec![ Shoe { size: 10, style: String::from("sneaker") }, Shoe { size: 10, style: String::from("boot") }, ] ) }
mod rom; pub use crate::rom::*;
//! A simple Driver for the Waveshare 5.83" (B) v2 E-Ink Display via SPI //! //! # References //! //! - [Datasheet](https://www.waveshare.com/5.83inch-e-Paper-B.htm) //! - [Waveshare C driver](https://github.com/waveshare/e-Paper/blob/master/RaspberryPi_JetsonNano/c/lib/e-Paper/EPD_5in83b_V2.c) //! - [Waveshare Python driver](https://github.com/waveshare/e-Paper/blob/master/RaspberryPi_JetsonNano/python/lib/waveshare_epd/epd5in83b_V2.py) use embedded_hal::{ blocking::{delay::*, spi::Write}, digital::v2::{InputPin, OutputPin}, }; use crate::color::Color; use crate::interface::DisplayInterface; use crate::prelude::{TriColor, WaveshareDisplay, WaveshareThreeColorDisplay}; use crate::traits::{InternalWiAdditions, RefreshLut}; pub(crate) mod command; use self::command::Command; use crate::buffer_len; /// Full size buffer for use with the 5in83b v2 EPD #[cfg(feature = "graphics")] pub type Display5in83 = crate::graphics::Display< WIDTH, HEIGHT, false, { buffer_len(WIDTH as usize, HEIGHT as usize * 2) }, TriColor, >; /// Width of the display pub const WIDTH: u32 = 648; /// Height of the display pub const HEIGHT: u32 = 480; /// Default Background Color pub const DEFAULT_BACKGROUND_COLOR: Color = Color::White; const IS_BUSY_LOW: bool = true; const NUM_DISPLAY_BITS: u32 = WIDTH * HEIGHT / 8; /// Epd7in5 driver /// pub struct Epd5in83<SPI, CS, BUSY, DC, RST, DELAY> { /// Connection Interface interface: DisplayInterface<SPI, CS, BUSY, DC, RST, DELAY>, /// Background Color color: Color, } impl<SPI, CS, BUSY, DC, RST, DELAY> InternalWiAdditions<SPI, CS, BUSY, DC, RST, DELAY> for Epd5in83<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { fn init(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> { // Reset the device self.interface.reset(delay, 10_000, 10_000); // Start the booster self.cmd_with_data(spi, Command::BoosterSoftStart, &[0x17, 0x17, 0x1e, 0x17])?; // Set the power settings: VGH=20V,VGL=-20V,VDH=15V,VDL=-15V self.cmd_with_data(spi, Command::PowerSetting, &[0x07, 0x07, 0x3F, 0x3F])?; // Power on self.command(spi, Command::PowerOn)?; delay.delay_us(5000); self.wait_until_idle(spi, delay)?; // Set the panel settings: BWROTP self.cmd_with_data(spi, Command::PanelSetting, &[0x0F])?; // Set the real resolution self.send_resolution(spi)?; // Disable dual SPI self.cmd_with_data(spi, Command::DualSPI, &[0x00])?; // Set Vcom and data interval self.cmd_with_data(spi, Command::VcomAndDataIntervalSetting, &[0x11, 0x07])?; // Set S2G and G2S non-overlap periods to 12 (default) self.cmd_with_data(spi, Command::TconSetting, &[0x22])?; self.wait_until_idle(spi, delay)?; Ok(()) } } impl<SPI, CS, BUSY, DC, RST, DELAY> WaveshareThreeColorDisplay<SPI, CS, BUSY, DC, RST, DELAY> for Epd5in83<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { fn update_color_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, black: &[u8], chromatic: &[u8], ) -> Result<(), SPI::Error> { self.update_achromatic_frame(spi, delay, black)?; self.update_chromatic_frame(spi, delay, chromatic)?; Ok(()) } fn update_achromatic_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, black: &[u8], ) -> Result<(), SPI::Error> { self.wait_until_idle(spi, delay)?; self.cmd_with_data(spi, Command::DataStartTransmission1, black)?; Ok(()) } fn update_chromatic_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, chromatic: &[u8], ) -> Result<(), SPI::Error> { self.wait_until_idle(spi, delay)?; self.cmd_with_data(spi, Command::DataStartTransmission2, chromatic)?; Ok(()) } } impl<SPI, CS, BUSY, DC, RST, DELAY> WaveshareDisplay<SPI, CS, BUSY, DC, RST, DELAY> for Epd5in83<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { type DisplayColor = Color; fn new( spi: &mut SPI, cs: CS, busy: BUSY, dc: DC, rst: RST, delay: &mut DELAY, delay_us: Option<u32>, ) -> Result<Self, SPI::Error> { let interface = DisplayInterface::new(cs, busy, dc, rst, delay_us); let color = DEFAULT_BACKGROUND_COLOR; let mut epd = Epd5in83 { interface, color }; epd.init(spi, delay)?; Ok(epd) } fn sleep(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> { self.wait_until_idle(spi, delay)?; self.command(spi, Command::PowerOff)?; self.wait_until_idle(spi, delay)?; self.cmd_with_data(spi, Command::DeepSleep, &[0xA5])?; Ok(()) } fn wake_up(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> { self.init(spi, delay) } fn set_background_color(&mut self, color: Color) { self.color = color; } fn background_color(&self) -> &Color { &self.color } fn width(&self) -> u32 { WIDTH } fn height(&self) -> u32 { HEIGHT } fn update_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error> { self.wait_until_idle(spi, delay)?; self.update_achromatic_frame(spi, delay, buffer)?; let color = self.color.get_byte_value(); self.command(spi, Command::DataStartTransmission2)?; self.interface.data_x_times(spi, color, NUM_DISPLAY_BITS)?; Ok(()) } fn update_partial_frame( &mut self, spi: &mut SPI, delay: &mut DELAY, buffer: &[u8], x: u32, y: u32, width: u32, height: u32, ) -> Result<(), SPI::Error> { self.wait_until_idle(spi, delay)?; if buffer.len() as u32 != width / 8 * height { //TODO panic or error } let hrst_upper = (x / 8) as u8 >> 6; let hrst_lower = ((x / 8) << 3) as u8; let hred_upper = ((x + width) / 8) as u8 >> 6; let hred_lower = (((x + width) / 8) << 3) as u8 & 0b111; let vrst_upper = (y >> 8) as u8; let vrst_lower = y as u8; let vred_upper = ((y + height) >> 8) as u8; let vred_lower = (y + height) as u8; let pt_scan = 0x01; // Gates scan both inside and outside of the partial window. (default) self.command(spi, Command::PartialIn)?; self.command(spi, Command::PartialWindow)?; self.send_data( spi, &[ hrst_upper, hrst_lower, hred_upper, hred_lower, vrst_upper, vrst_lower, vred_upper, vred_lower, pt_scan, ], )?; self.command(spi, Command::DataStartTransmission1)?; self.send_data(spi, buffer)?; let color = TriColor::Black.get_byte_value(); //We need it black, so red channel will be rendered transparent self.command(spi, Command::DataStartTransmission2)?; self.interface .data_x_times(spi, color, width * height / 8)?; self.command(spi, Command::DisplayRefresh)?; self.wait_until_idle(spi, delay)?; self.command(spi, Command::PartialOut)?; Ok(()) } fn display_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> { self.command(spi, Command::DisplayRefresh)?; self.wait_until_idle(spi, delay)?; Ok(()) } fn update_and_display_frame( &mut self, spi: &mut SPI, buffer: &[u8], delay: &mut DELAY, ) -> Result<(), SPI::Error> { self.update_frame(spi, buffer, delay)?; self.display_frame(spi, delay)?; Ok(()) } fn clear_frame(&mut self, spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> { self.wait_until_idle(spi, delay)?; // The Waveshare controllers all implement clear using 0x33 self.command(spi, Command::DataStartTransmission1)?; self.interface.data_x_times(spi, 0xFF, NUM_DISPLAY_BITS)?; self.command(spi, Command::DataStartTransmission2)?; self.interface.data_x_times(spi, 0x00, NUM_DISPLAY_BITS)?; Ok(()) } fn set_lut( &mut self, _spi: &mut SPI, _delay: &mut DELAY, _refresh_rate: Option<RefreshLut>, ) -> Result<(), SPI::Error> { unimplemented!(); } fn wait_until_idle(&mut self, _spi: &mut SPI, delay: &mut DELAY) -> Result<(), SPI::Error> { self.interface.wait_until_idle(delay, IS_BUSY_LOW); Ok(()) } } impl<SPI, CS, BUSY, DC, RST, DELAY> Epd5in83<SPI, CS, BUSY, DC, RST, DELAY> where SPI: Write<u8>, CS: OutputPin, BUSY: InputPin, DC: OutputPin, RST: OutputPin, DELAY: DelayUs<u32>, { fn command(&mut self, spi: &mut SPI, command: Command) -> Result<(), SPI::Error> { self.interface.cmd(spi, command) } fn send_data(&mut self, spi: &mut SPI, data: &[u8]) -> Result<(), SPI::Error> { self.interface.data(spi, data) } fn cmd_with_data( &mut self, spi: &mut SPI, command: Command, data: &[u8], ) -> Result<(), SPI::Error> { self.interface.cmd_with_data(spi, command, data) } fn send_resolution(&mut self, spi: &mut SPI) -> Result<(), SPI::Error> { let w = self.width(); let h = self.height(); self.command(spi, Command::TconResolution)?; self.send_data(spi, &[(w >> 8) as u8])?; self.send_data(spi, &[w as u8])?; self.send_data(spi, &[(h >> 8) as u8])?; self.send_data(spi, &[h as u8]) } } #[cfg(test)] mod tests { use super::*; #[test] fn epd_size() { assert_eq!(WIDTH, 648); assert_eq!(HEIGHT, 480); assert_eq!(DEFAULT_BACKGROUND_COLOR, Color::White); } }
use crate::atoms; use rustler::{Decoder, Error, NifResult, Term}; #[derive(PartialEq, Eq, Debug)] pub enum BuilderOption { AllLanguages, AllSpokenLanguages, AllLanguagesWithArabicScript, AllLanguagesWithCyrillicScript, AllLanguagesWithDevanagariScript, AllLanguagesWithLatinScript, WithLanguages, WithoutLanguages, } impl<'a> Decoder<'a> for BuilderOption { fn decode(term: Term<'a>) -> NifResult<Self> { if atoms::all_languages() == term { Ok(Self::AllLanguages) } else if atoms::all_spoken_languages() == term { Ok(Self::AllSpokenLanguages) } else if atoms::all_languages_with_arabic_script() == term { Ok(Self::AllLanguagesWithArabicScript) } else if atoms::all_languages_with_cyrillic_script() == term { Ok(Self::AllLanguagesWithCyrillicScript) } else if atoms::all_languages_with_devanagari_script() == term { Ok(Self::AllLanguagesWithDevanagariScript) } else if atoms::all_languages_with_latin_script() == term { Ok(Self::AllLanguagesWithLatinScript) } else if atoms::with_languages() == term { Ok(Self::WithLanguages) } else if atoms::without_languages() == term { Ok(Self::WithoutLanguages) } else { Err(Error::BadArg) } } }
use std::{collections::HashMap, net::IpAddr}; mod client; mod resolver; pub use client::*; pub use resolver::*; pub type IpTable = HashMap<IpAddr, String>;
use std::collections::{HashSet, VecDeque}; use std::time::Instant; const INPUT: &str = include_str!("../input.txt"); type Deck = VecDeque<usize>; fn load_decks() -> (Deck, Deck) { let mut lines = INPUT.lines(); lines.next(); // skip Player 1 let deck1: Deck = lines .by_ref() .take_while(|line| !line.is_empty()) .map(|line| line.parse().unwrap()) .collect(); lines.next(); // skip Player 2 let deck2: Deck = lines .take_while(|line| !line.is_empty()) .map(|line| line.parse().unwrap()) .collect(); (deck1, deck2) } #[derive(Debug)] enum Player { Player1, Player2, } fn calculate_score(deck: &Deck) -> usize { deck.iter() .rev() .enumerate() .map(|(i, card)| (i + 1) * card) .sum() } fn part1() -> usize { let (mut deck1, mut deck2) = load_decks(); while !deck1.is_empty() && !deck2.is_empty() { let p1_card = deck1.pop_front().unwrap(); let p2_card = deck2.pop_front().unwrap(); if p1_card > p2_card { deck1.push_back(p1_card); deck1.push_back(p2_card); } else { deck2.push_back(p2_card); deck2.push_back(p1_card); } } if deck1.is_empty() { calculate_score(&deck2) } else { calculate_score(&deck1) } } struct Game { player1_deck: Deck, player2_deck: Deck, } impl Game { fn new(player1_deck: Deck, player2_deck: Deck) -> Self { Self { player1_deck, player2_deck, } } fn play_game(&mut self) -> Player { let mut seen_rounds = HashSet::new(); loop { if !seen_rounds.insert((self.player1_deck.clone(), self.player2_deck.clone())) { return Player::Player1; } let p1_card = self.player1_deck.pop_front().unwrap(); let p2_card = self.player2_deck.pop_front().unwrap(); let p1_cards_remaining = self.player1_deck.len(); let p2_cards_remaining = self.player2_deck.len(); let round_winner = if p1_cards_remaining >= p1_card && p2_cards_remaining >= p2_card { Game::new( self.player1_deck.iter().copied().take(p1_card).collect(), self.player2_deck.iter().copied().take(p2_card).collect(), ) .play_game() } else if p1_card < p2_card { Player::Player2 } else { Player::Player1 }; match round_winner { Player::Player1 => { self.player1_deck.push_back(p1_card); self.player1_deck.push_back(p2_card); if self.player2_deck.is_empty() { return Player::Player1; } } Player::Player2 => { self.player2_deck.push_back(p2_card); self.player2_deck.push_back(p1_card); if self.player1_deck.is_empty() { return Player::Player2; } } }; } } } fn part2() -> usize { let (deck1, deck2) = load_decks(); let mut game = Game::new(deck1, deck2); match game.play_game() { Player::Player1 => calculate_score(&game.player1_deck), Player::Player2 => calculate_score(&game.player2_deck), } } fn main() { let start = Instant::now(); println!("part 1: {}", part1()); println!("part 1 took {}ms", (Instant::now() - start).as_millis()); let start = Instant::now(); println!("part 2: {}", part2()); println!("part 2 took {}ms", (Instant::now() - start).as_millis()); } #[cfg(test)] mod tests { use super::*; #[test] fn test_part1() { assert_eq!(part1(), 32401); } #[test] fn test_part2() { assert_eq!(part2(), 31436); } }
use futures::prelude::*; use state_machine_future::RentToOwn; use std::io; use std::marker::PhantomData; use super::{ Connection, encode, MAX_FRAGMENT_LEN, MAX_FRAGMENTS_PER_MESSAGE, MessageFragments, Packet, PacketData, recv_packet, }; /// A future used to receive a message from a connection. pub struct Receive<T> where T: AsMut<[u8]> { pub(crate) state: StateFuture<T>, } impl<T> Future for Receive<T> where T: AsMut<[u8]> { type Item = (Connection, T, usize); type Error = io::Error; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { self.state.poll().map_err(|(error, _)| error) } } #[derive(StateMachineFuture)] #[allow(dead_code)] pub(crate) enum State<T> where T: AsMut<[u8]> { #[state_machine_future(start, transitions(Acknowledging))] Reading { connection: Connection, buffer: T, }, #[state_machine_future(transitions(Ready))] Acknowledging { connection: Connection, buffer: T, message_len: usize, sequence_number: u32, }, #[state_machine_future(ready)] Ready((Connection, T, usize)), #[state_machine_future(error)] Error((io::Error, PhantomData<T>)), } impl<T> PollState<T> for State<T> where T: AsMut<[u8]> { fn poll_reading<'a>( reading: &'a mut RentToOwn<'a, Reading<T>>, ) -> Poll<AfterReading<T>, (io::Error, PhantomData<T>)> { let message_len; let sequence; loop { let reading = &mut **reading; let packet = match recv_packet( &reading.connection.socket, reading.connection.peer_address, &mut reading.connection.recv_buffer, ) { Ok(packet) => { packet } Err(error) => { if error.kind() == io::ErrorKind::WouldBlock { return Ok(Async::NotReady); } // HACK: We should be able to use `try_nb!` here, but since we need to bundle // the error with some `PhantomData` we end up having to do this manually. return Err((error, PhantomData)); } }; match packet.data { PacketData::Message { sequence_number, fragment, num_fragments, fragment_number, } => { // If there's only one fragment in the message, treat it as a special // case and return it directly, to avoid the overhead of stuffing it // into the fragments map. if num_fragments == 1 { // Copy the fragment data into the output buffer. let dest = &mut reading.buffer.as_mut()[.. fragment.len()]; dest.copy_from_slice(fragment); // Set the message's length to the length of the fragment. message_len = fragment.len(); sequence = sequence_number; break; } // Retrieve the map containing the fragments that we have received let message = reading.connection.fragments .entry(sequence_number) .or_insert_with(|| MessageFragments { num_fragments, received: 0, bytes_received: 0, fragments: [false; MAX_FRAGMENTS_PER_MESSAGE], }); // If the packet specifies a different number of fragments than the // first packet we received for this message, then discard it. if num_fragments != message.num_fragments { continue; } // If the fragment number is outside the valid range for this mesage, // then discard it. if fragment_number >= message.num_fragments { continue; } // If we haven't already received this fragment, insert it into the // message. if !message.fragments[fragment_number as usize] { // Copy the fragment into the corresponding part of the output // buffer. let fragment_start = fragment_number as usize * MAX_FRAGMENT_LEN; let fragment_end = fragment_start + fragment.len(); let buffer = &mut reading.buffer.as_mut()[fragment_start .. fragment_end]; buffer.copy_from_slice(fragment); // Update the tracking of which fragments we have received so far. message.fragments[fragment_number as usize] = true; message.bytes_received += fragment.len(); message.received += 1; } // Check if we have received all the fragments. If we have, then build // the message from the fragments. if message.received == message.num_fragments { // Verify that we have actually received all of the fragments. for received in &message.fragments[.. message.num_fragments as usize] { assert!(received, "We somehow missed a fragment of the message"); } message_len = message.bytes_received; sequence = sequence_number; break; } } PacketData::ConnectionRequest | PacketData::Challenge(..) | PacketData::ChallengeResponse(..) | PacketData::ConnectionAccepted | PacketData::Ack(..) => { continue; } } } let Reading { mut connection, buffer } = reading.take(); // Write the ack packet to the send buffer. encode( Packet { connection_id: connection.connection_id, data: PacketData::Ack(sequence), }, &mut connection.send_buffer, ).unwrap(); return Ok(Async::Ready(Acknowledging { connection, buffer, message_len, sequence_number: sequence, }.into())); } fn poll_acknowledging<'a>( ack: &'a mut RentToOwn<'a, Acknowledging<T>>, ) -> Poll<AfterAcknowledging<T>, (io::Error, PhantomData<T>)> { { let ack = &mut **ack; let send_result = ack.connection.socket.send_to( &ack.connection.send_buffer, &ack.connection.peer_address, ); let bytes_sent = match send_result { Ok(bytes) => { bytes } Err(error) => { if error.kind() == io::ErrorKind::WouldBlock { return Ok(Async::NotReady); } // HACK: We should be able to use `try_nb!` here, but since we need to bundle // the error with some `PhantomData` we end up having to do this manually. return Err((error, PhantomData)); } }; // If we send a datagram that doesn't include all the bytes in the packet, // then an error occurred. if bytes_sent != ack.connection.send_buffer.len() { return Err(( io::Error::new( io::ErrorKind::Other, "Failed to send all bytes of the fragment", ), PhantomData )); } } // Transition to the `Ready` state. let Acknowledging { connection, buffer, message_len, .. } = ack.take(); Ok(Async::Ready(Ready((connection, buffer, message_len)).into())) } }
// Copyright 2014 nerd-games.com. // // Licensed under the Apache License, Version 2.0. // See: http://www.apache.org/licenses/LICENSE-2.0 // This file may only be copied, modified and distributed according to those terms. /*#![macro_escape] use super::vector::*; use std::mem; #[macro_export] macro_rules! mat3 ( ($x:expr, $y:expr, $z:expr) => ( Mat3{x: $x, y: $y, z: $z} ); ($v00:expr, $v01:expr, $v02:expr, $v10:expr, $v11:expr, $v12:expr, $v20:expr, $v21:expr, $v22:expr) => ( Mat3{x: vec3!($v00, $v01, $v02), y: vec3!($v10, $v11, $v12), z: vec3!($v20, $v21, $v22)} ) ) #[macro_export] macro_rules! mat4 ( ($x:expr, $y:expr, $z:expr, $w:expr) => ( Mat3{x: $x, y: $y, z: $z, w: $w} ); ($v00:expr, $v01:expr, $v02:expr, $v03:expr, $v10:expr, $v11:expr, $v12:expr, $v13:expr, $v20:expr, $v21:expr, $v22:expr, $v23:expr, $v30:expr, $v31:expr, $v32:expr, $v33:expr) => ( Mat4{x: vec4!($v00, $v01, $v02, $v03), y: vec4!($v10, $v11, $v12, $v13), z: vec4!($v20, $v21, $v22, $v23), w: vec4!($v30, $v31, $v32, $v33)} ) ) pub struct Mat3<T: Add<T, T> + Sub<T, T> + Mul<T, T>> { x: Vec3<T>, y: Vec3<T>, z: Vec3<T> } pub struct Mat4<T: Add<T, T> + Sub<T, T> + Mul<T, T>> { x: Vec4<T>, y: Vec4<T>, z: Vec4<T>, w: Vec4<T> } /*impl <T: Add<T, T> + Sub<T, T> + Mul<T, T>> Index<uint, [T, ..3]> for Mat3<T> { fn index<'a>(&'a self, index: &uint) -> &'a [T, ..3] { &self.as_array()[*index] } }*/ impl <T: Add<T, T> + Sub<T, T> + Mul<T, T>> Mul<Mat3<T>, Mat3<T>> for Mat3<T> { fn mul(&self, other: &Mat3<T>) -> Mat3<T> { mat3!(self[0][0] * other[0][0] + self[1][0] * other[0][1] + self[2][0] * other[0][2], self[0][0] * other[1][0] + self[1][0] * other[1][1] + self[2][0] * other[1][2], self[0][0] * other[2][0] + self[1][0] * other[2][1] + self[2][0] * other[2][2], self[0][1] * other[0][0] + self[1][1] * other[0][1] + self[2][1] * other[0][2], self[0][1] * other[1][0] + self[1][1] * other[1][1] + self[2][1] * other[1][2], self[0][1] * other[2][0] + self[1][1] * other[2][1] + self[2][1] * other[2][2], self[0][2] * other[0][0] + self[1][2] * other[0][1] + self[2][2] * other[0][2], self[0][2] * other[1][0] + self[1][2] * other[1][1] + self[2][2] * other[1][2], self[0][2] * other[2][0] + self[1][2] * other[2][1] + self[2][2] * other[2][2]) } } /*impl <T: Add<T, T> + Sub<T, T> + Mul<T, T>> Mul<Vec3<T>, Vec3<T>> for Mat3<T> { fn mul(&self, other: &Vec3<T>) -> Vec3<T> { vec3!(self[0][0]*other[0] + self[1][1]*other[1] + self[2][0]*other[2], self[0][1]*other[0] + self[1][1]*other[1] + self[2][1]*other[2], self[0][2]*other[0] + self[1][2]*other[1] + self[2][2]*other[2]) } }*/ /*impl <T: Add<T, T> + Sub<T, T> + Mul<T, T>> Index<uint, [T, ..4]> for Mat4<T> { fn index<'a>(&'a self, index: &uint) -> &'a [T, ..4] { &self.as_array()[*index] } }*/ impl <T: Add<T, T> + Sub<T, T> + Mul<T, T>> Mul<Mat4<T>, Mat4<T>> for Mat4<T> { fn mul(&self, other: &Mat4<T>) -> Mat4<T> { mat4!(self[0][0] * other[0][0] + self[1][0] * other[0][1] + self[2][0] * other[0][2] + self[3][0] * other[0][3], self[0][0] * other[1][0] + self[1][0] * other[1][1] + self[2][0] * other[1][2] + self[3][0] * other[1][3], self[0][0] * other[2][0] + self[1][0] * other[2][1] + self[2][0] * other[2][2] + self[3][0] * other[2][3], self[0][0] * other[3][0] + self[1][0] * other[3][1] + self[2][0] * other[3][2] + self[3][0] * other[3][3], self[0][1] * other[0][0] + self[1][1] * other[0][1] + self[2][1] * other[0][2] + self[3][1] * other[0][3], self[0][1] * other[1][0] + self[1][1] * other[1][1] + self[2][1] * other[1][2] + self[3][1] * other[1][3], self[0][1] * other[2][0] + self[1][1] * other[2][1] + self[2][1] * other[2][2] + self[3][1] * other[2][3], self[0][1] * other[3][0] + self[1][1] * other[3][1] + self[2][1] * other[3][2] + self[3][1] * other[3][3], self[0][2] * other[0][0] + self[1][2] * other[0][1] + self[2][2] * other[0][2] + self[3][2] * other[0][3], self[0][2] * other[1][0] + self[1][2] * other[1][1] + self[2][2] * other[1][2] + self[3][2] * other[1][3], self[0][2] * other[2][0] + self[1][2] * other[2][1] + self[2][2] * other[2][2] + self[3][2] * other[2][3], self[0][2] * other[3][0] + self[1][2] * other[3][1] + self[2][2] * other[3][2] + self[3][2] * other[3][3], self[0][3] * other[0][0] + self[1][3] * other[0][1] + self[2][3] * other[0][2] + self[3][3] * other[0][3], self[0][3] * other[1][0] + self[1][3] * other[1][1] + self[2][3] * other[1][2] + self[3][3] * other[1][3], self[0][3] * other[2][0] + self[1][3] * other[2][1] + self[2][3] * other[2][2] + self[3][3] * other[2][3], self[0][3] * other[3][0] + self[1][3] * other[3][1] + self[2][3] * other[3][2] + self[3][3] * other[3][3]) } } /*impl <T: Add<T, T> + Sub<T, T> + Mul<T, T>> Mat4<T> { fn as_array(&self) -> [[T, ..4], ..4] { unsafe { mem::transmute(self) } } }*/ /*impl <T: Add<T, T> + Sub<T, T> + Mul<T, T>> Mul<Vec4<T>, Vec4<T>> for Mat4<T> { fn mul(&self, other: &Vec4<T>) -> Vec4<T> { vec4!(self[0][0]*other[0] + self[1][0]*other[1] + self[2][0]*other[2] + self[3][0]*other[3], self[0][1]*other[0] + self[1][1]*other[1] + self[2][1]*other[2] + self[3][1]*other[3], self[0][2]*other[0] + self[1][2]*other[1] + self[2][2]*other[2] + self[3][2]*other[3], self[0][3]*other[0] + self[1][3]*other[1] + self[2][3]*other[2] + self[3][3]*other[3]) } } */ */
//! A framework for pre and post processing machine intelligence based data extern crate image; extern crate parenchyma; pub use self::transformer::Transformer; mod transformer; mod transformers;
#[doc = "Reader of register DDRPHYC_PTR1"] pub type R = crate::R<u32, super::DDRPHYC_PTR1>; #[doc = "Writer for register DDRPHYC_PTR1"] pub type W = crate::W<u32, super::DDRPHYC_PTR1>; #[doc = "Register DDRPHYC_PTR1 `reset()`'s with value 0x0604_111d"] impl crate::ResetValue for super::DDRPHYC_PTR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0604_111d } } #[doc = "Reader of field `TDINIT0`"] pub type TDINIT0_R = crate::R<u32, u32>; #[doc = "Write proxy for field `TDINIT0`"] pub struct TDINIT0_W<'a> { w: &'a mut W, } impl<'a> TDINIT0_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x0007_ffff) | ((value as u32) & 0x0007_ffff); self.w } } #[doc = "Reader of field `TDINIT1`"] pub type TDINIT1_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TDINIT1`"] pub struct TDINIT1_W<'a> { w: &'a mut W, } impl<'a> TDINIT1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 19)) | (((value as u32) & 0xff) << 19); self.w } } impl R { #[doc = "Bits 0:18 - TDINIT0"] #[inline(always)] pub fn tdinit0(&self) -> TDINIT0_R { TDINIT0_R::new((self.bits & 0x0007_ffff) as u32) } #[doc = "Bits 19:26 - TDINIT1"] #[inline(always)] pub fn tdinit1(&self) -> TDINIT1_R { TDINIT1_R::new(((self.bits >> 19) & 0xff) as u8) } } impl W { #[doc = "Bits 0:18 - TDINIT0"] #[inline(always)] pub fn tdinit0(&mut self) -> TDINIT0_W { TDINIT0_W { w: self } } #[doc = "Bits 19:26 - TDINIT1"] #[inline(always)] pub fn tdinit1(&mut self) -> TDINIT1_W { TDINIT1_W { w: self } } }
extern crate rand; use std::{thread, time}; use rand::{thread_rng, Rng}; trait Observer { fn update(&self, generator: Box<&NumberGenerator>); } struct DigitObserver {} impl DigitObserver { fn new() -> DigitObserver { DigitObserver {} } } impl Observer for DigitObserver { fn update(&self, generator: Box<&NumberGenerator>) { println!("DigitObserver: {}", generator.get_number()); thread::sleep(time::Duration::from_millis(100)); } } struct GraphObserver {} impl GraphObserver { fn new() -> GraphObserver { GraphObserver {} } } impl Observer for GraphObserver { fn update(&self, generator: Box<&NumberGenerator>) { print!("GraphObserver:"); for _ in 0..generator.get_number() { print!("*"); } println!(""); thread::sleep(time::Duration::from_millis(100)); } } trait NumberGenerator { fn get_number(&self) -> u32; fn execute(&mut self); } struct RandomNumberGenerator { observers: Vec<Box<Observer>>, number: u32, rng: rand::ThreadRng, } impl RandomNumberGenerator { fn new() -> RandomNumberGenerator { RandomNumberGenerator { observers: Vec::new(), number: 0, rng: thread_rng(), } } fn add_observer(&mut self, observer: Box<Observer>) { self.observers.push(observer); } fn notify_observers(&self) { for observer in &self.observers { observer.update(Box::new(self)); } } } impl NumberGenerator for RandomNumberGenerator { fn get_number(&self) -> u32 { self.number } fn execute(&mut self) { for _ in 0..20 { self.number = self.rng.gen_range(0, 50); self.notify_observers(); } } } fn main() { let mut generator = Box::new(RandomNumberGenerator::new()); let observer1 = Box::new(DigitObserver::new()); let observer2 = Box::new(GraphObserver::new()); generator.add_observer(observer1); generator.add_observer(observer2); generator.execute(); }
// Copyright 2019, 2020 Wingchain // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::error::Error; use std::fmt::Debug; use futures::channel::mpsc::TrySendError; use primitives::errors::{CommonError, CommonErrorKind, Display}; #[derive(Debug, Display)] pub enum ErrorKind { #[display(fmt = "Time error")] Time, #[display(fmt = "Verify proof error: {}", _0)] VerifyProofError(String), #[display(fmt = "Data error: {}", _0)] Data(String), #[display(fmt = "TxPool error: {}", _0)] TxPool(String), #[display(fmt = "Channel error: {:?}", _0)] Channel(Box<dyn Error + Send + Sync>), #[display(fmt = "Config error: {}", _0)] Config(String), } impl Error for ErrorKind {} impl From<ErrorKind> for CommonError { fn from(error: ErrorKind) -> Self { CommonError::new(CommonErrorKind::Consensus, Box::new(error)) } } pub fn map_channel_err<T: Send + Sync + 'static>(error: TrySendError<T>) -> CommonError { ErrorKind::Channel(Box::new(error)).into() }
#[cfg(nightly)] pub use std::intrinsics::{likely, unlikely}; #[cfg(not(nightly))] #[inline] #[cold] const fn cold() {} #[cfg(not(nightly))] #[inline] pub const fn likely(b: bool) -> bool { if !b { cold() } b } #[cfg(not(nightly))] #[inline] pub const fn unlikely(b: bool) -> bool { if b { cold() } b }
use cocoa::base::id; use core_foundation::date::CFTimeInterval; /// The `MTLDrawable` protocol defines the interface for an object that represents /// a displayable resource that can be used as a destination for rendering commands. pub trait MTLDrawable { /// Displays the displayable resource. /// /// # Discussion /// /// This method is usually invoked by the command buffer’s scheduled handler. /// /// # Availability /// /// Available in iOS 8.0 and later. unsafe fn present(self); /// Displays the displayable resource at a given host time. /// /// # Parameters /// /// `presentationTime` - Time to display the resource, in seconds. /// /// # Discussion /// /// This method is usually invoked by the command buffer’s scheduled handler. /// /// # Availability /// /// Available in iOS 8.0 and later. unsafe fn presentAtTime(self, presentationTime: CFTimeInterval); } impl MTLDrawable for id { unsafe fn present(self) { msg_send![self, present] } unsafe fn presentAtTime(self, presentationTime: CFTimeInterval) { msg_send![self, presentAtTime:presentationTime] } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - IPCC Processor 1 control register"] pub c1cr: C1CR, #[doc = "0x04 - IPCC Processor 1 mask register"] pub c1mr: C1MR, #[doc = "0x08 - Reading this register will always return 0x0000 0000."] pub c1scr: C1SCR, #[doc = "0x0c - IPCC processor 1 to processor 2 status register"] pub c1toc2sr: C1TOC2SR, #[doc = "0x10 - IPCC Processor 2 control register"] pub c2cr: C2CR, #[doc = "0x14 - IPCC Processor 2 mask register"] pub c2mr: C2MR, #[doc = "0x18 - Reading this register will always return 0x0000 0000."] pub c2scr: C2SCR, #[doc = "0x1c - IPCC processor 2 to processor 1 status register"] pub c2toc1sr: C2TOC1SR, _reserved8: [u8; 0x03d0], #[doc = "0x3f0 - IPCC Hardware configuration register"] pub hwcfgr: HWCFGR, #[doc = "0x3f4 - IPCC IP Version register"] pub verr: VERR, #[doc = "0x3f8 - IPCC IP Identification register"] pub ipidr: IPIDR, #[doc = "0x3fc - IPCC Size ID register"] pub sidr: SIDR, } #[doc = "C1CR (rw) register accessor: IPCC Processor 1 control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c1cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c1cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c1cr`] module"] pub type C1CR = crate::Reg<c1cr::C1CR_SPEC>; #[doc = "IPCC Processor 1 control register"] pub mod c1cr; #[doc = "C1MR (rw) register accessor: IPCC Processor 1 mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c1mr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c1mr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c1mr`] module"] pub type C1MR = crate::Reg<c1mr::C1MR_SPEC>; #[doc = "IPCC Processor 1 mask register"] pub mod c1mr; #[doc = "C1SCR (rw) register accessor: Reading this register will always return 0x0000 0000.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c1scr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c1scr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c1scr`] module"] pub type C1SCR = crate::Reg<c1scr::C1SCR_SPEC>; #[doc = "Reading this register will always return 0x0000 0000."] pub mod c1scr; #[doc = "C1TOC2SR (r) register accessor: IPCC processor 1 to processor 2 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c1toc2sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c1toc2sr`] module"] pub type C1TOC2SR = crate::Reg<c1toc2sr::C1TOC2SR_SPEC>; #[doc = "IPCC processor 1 to processor 2 status register"] pub mod c1toc2sr; #[doc = "C2CR (rw) register accessor: IPCC Processor 2 control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c2cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c2cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c2cr`] module"] pub type C2CR = crate::Reg<c2cr::C2CR_SPEC>; #[doc = "IPCC Processor 2 control register"] pub mod c2cr; #[doc = "C2MR (rw) register accessor: IPCC Processor 2 mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c2mr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c2mr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c2mr`] module"] pub type C2MR = crate::Reg<c2mr::C2MR_SPEC>; #[doc = "IPCC Processor 2 mask register"] pub mod c2mr; #[doc = "C2SCR (rw) register accessor: Reading this register will always return 0x0000 0000.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c2scr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c2scr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c2scr`] module"] pub type C2SCR = crate::Reg<c2scr::C2SCR_SPEC>; #[doc = "Reading this register will always return 0x0000 0000."] pub mod c2scr; #[doc = "C2TOC1SR (r) register accessor: IPCC processor 2 to processor 1 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c2toc1sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`c2toc1sr`] module"] pub type C2TOC1SR = crate::Reg<c2toc1sr::C2TOC1SR_SPEC>; #[doc = "IPCC processor 2 to processor 1 status register"] pub mod c2toc1sr; #[doc = "HWCFGR (r) register accessor: IPCC Hardware configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hwcfgr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`hwcfgr`] module"] pub type HWCFGR = crate::Reg<hwcfgr::HWCFGR_SPEC>; #[doc = "IPCC Hardware configuration register"] pub mod hwcfgr; #[doc = "VERR (r) register accessor: IPCC IP Version register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`verr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`verr`] module"] pub type VERR = crate::Reg<verr::VERR_SPEC>; #[doc = "IPCC IP Version register"] pub mod verr; #[doc = "IPIDR (r) register accessor: IPCC IP Identification register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ipidr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ipidr`] module"] pub type IPIDR = crate::Reg<ipidr::IPIDR_SPEC>; #[doc = "IPCC IP Identification register"] pub mod ipidr; #[doc = "SIDR (r) register accessor: IPCC Size ID register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sidr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sidr`] module"] pub type SIDR = crate::Reg<sidr::SIDR_SPEC>; #[doc = "IPCC Size ID register"] pub mod sidr;
use super::base::*; use super::coder::*; use super::error::*; use super::lexer::*; use super::parser::*; use super::strutil::Strutil; pub struct HPU { pub path: std::path::PathBuf, pub command: Command, pub parser: Parser, pub lexer: Lexer, pub valid_line: usize, } impl HPU { pub fn new(path: &std::path::PathBuf) -> HPU { HPU { path: path.clone(), command: Command::new(), parser: Parser::new(), lexer: Lexer::new(), valid_line: 0, } } pub fn is_comment(s: &str) -> bool { let mut iter = s.chars(); let _1: char; match iter.next() { Some(ch) => { _1 = ch; } _ => { return false; } } let _2: char; match iter.next() { Some(ch) => { _2 = ch; } _ => { return false; } } if _1 == '/' && _2 == '/' { return true; } return false; } pub fn should_skip(s: &str) -> bool { if Strutil::empty_line(s) { return true; } else if HPU::is_comment(s) { return true; } else { return false; } } pub fn command_type(s: &String) -> CommandType { if s.starts_with("@") { return CommandType::ACommand; } else if s.starts_with("(") && s.ends_with(")") { return CommandType::LCommand; } else { return CommandType::CCommand; } } pub fn second_pass<'a>( &'a mut self, num: usize, line: &'a str, ) -> Result<String, Box<HackError>> { if HPU::should_skip(&line) { return Ok("".into()); } self.lexer.set(line)?; let mut parg = ParserArg { parser: Some(&mut self.parser), tokens: Some(Box::new(self.lexer.tokens.clone())), index: Some(Box::new(0)), content: line.into(), line_num: Some(Box::new(num)), }; match Parser::parse_command(&mut parg) { Ok(arg) => { let parser = arg.parser.as_mut().unwrap(); let result = parser.result.as_ref().unwrap(); match result.t.as_ref().unwrap() { CommandType::ACommand => Coder::translate_a( parser.map.as_mut().unwrap(), parser.varmem.as_mut().unwrap(), result.ar.as_ref().unwrap(), ), CommandType::CCommand => Coder::translate_c(result.cr.as_ref().unwrap()), CommandType::LCommand => Ok("".to_owned()), } } Err(e) => Err(e), } } pub fn first_pass<'a>( &'a mut self, data: &'a (usize, String), ) -> Result<(), Box<HackError>> { if HPU::should_skip(&data.1) { return Ok(()); } let t = HPU::command_type(&data.1); if CommandType::LCommand == t { self.lexer.set(&data.1)?; let mut parg = ParserArg { parser: Some(&mut self.parser), tokens: Some(Box::new(self.lexer.tokens.clone())), index: Some(Box::new(0)), content: data.1.clone(), line_num: Some(Box::new(data.0)), }; match Parser::parse_command(&mut parg) { Ok(arg) => { let parser = arg.parser.as_mut().unwrap(); parser.map.as_mut().unwrap().insert( parser .result .as_ref() .unwrap() .lr .as_ref() .unwrap() .label .clone(), self.valid_line, ); } Err(e) => { return Err(e); } } } else { self.valid_line += 1; } Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_comand_detection() { assert_eq!(Strutil::empty_line("\n"), true); assert_eq!(Strutil::empty_line(""), true); assert_eq!(HPU::is_comment(""), false); assert_eq!(HPU::is_comment("/"), false); assert_eq!(HPU::is_comment("//"), true); let a = String::from("@INFINITE_LOOP"); let c = String::from("M=-1"); let l = String::from("(INFINITE_LOOP)"); assert_eq!(HPU::command_type(&a), CommandType::ACommand); assert_eq!(HPU::command_type(&c), CommandType::CCommand); assert_eq!(HPU::command_type(&l), CommandType::LCommand); } #[test] fn test_parse_command() -> Result<(), Box<HackError>> { let mut lexer = Lexer::new(); let mut input = "@R15"; lexer.set(input)?; println!("{:?}", lexer.tokens); let mut parser = Parser::new(); let mut parg = ParserArg { parser: Some(&mut parser), tokens: Some(Box::new(lexer.tokens.clone())), index: Some(Box::new(0)), line_num: Some(Box::new(0)), content: input.into(), }; match Parser::parse_command(&mut parg) { Ok(r) => { assert_eq!(**r.index.as_ref().unwrap(), 2); } Err(e) => panic!("{}", e), } input = "( LABEL )"; lexer.set(input)?; println!("{:?}", lexer.tokens); let mut parg = ParserArg { parser: Some(&mut parser), tokens: Some(Box::new(lexer.tokens.clone())), index: Some(Box::new(0)), line_num: Some(Box::new(0)), content: input.into(), }; match Parser::parse_command(&mut parg) { Ok(r) => { assert_eq!(**r.index.as_ref().unwrap(), 3); } Err(e) => panic!("{}", e), } input = "MD=M-1;JMP"; lexer.set(input)?; println!("{:?}", lexer.tokens); let mut parg = ParserArg { parser: Some(&mut parser), tokens: Some(Box::new(lexer.tokens.clone())), index: Some(Box::new(0)), line_num: Some(Box::new(0)), content: input.into(), }; match Parser::parse_command(&mut parg) { Ok(r) => { assert_eq!(**r.index.as_ref().unwrap(), 5); Ok(()) } Err(e) => panic!("{}", e), } } }
#[doc = "Reader of register SYSTICK_CTL"] pub type R = crate::R<u32, super::SYSTICK_CTL>; #[doc = "Writer for register SYSTICK_CTL"] pub type W = crate::W<u32, super::SYSTICK_CTL>; #[doc = "Register SYSTICK_CTL `reset()`'s with value 0x4000_0147"] impl crate::ResetValue for super::SYSTICK_CTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x4000_0147 } } #[doc = "Reader of field `TENMS`"] pub type TENMS_R = crate::R<u32, u32>; #[doc = "Write proxy for field `TENMS`"] pub struct TENMS_W<'a> { w: &'a mut W, } impl<'a> TENMS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x00ff_ffff) | ((value as u32) & 0x00ff_ffff); self.w } } #[doc = "Reader of field `CLOCK_SOURCE`"] pub type CLOCK_SOURCE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CLOCK_SOURCE`"] pub struct CLOCK_SOURCE_W<'a> { w: &'a mut W, } impl<'a> CLOCK_SOURCE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 24)) | (((value as u32) & 0x03) << 24); self.w } } #[doc = "Reader of field `SKEW`"] pub type SKEW_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SKEW`"] pub struct SKEW_W<'a> { w: &'a mut W, } impl<'a> SKEW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `NOREF`"] pub type NOREF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `NOREF`"] pub struct NOREF_W<'a> { w: &'a mut W, } impl<'a> NOREF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:23 - Specifies the number of clock source cycles (minus 1) that make up 10 ms. E.g., for a 32,768 Hz reference clock, TENMS is 328 - 1 = 327."] #[inline(always)] pub fn tenms(&self) -> TENMS_R { TENMS_R::new((self.bits & 0x00ff_ffff) as u32) } #[doc = "Bits 24:25 - Specifies an external clock source: '0': The low frequency clock 'clk_lf' is selected. The precision of this clock depends on whether the low frequency clock source is a SRSS internal RC oscillator (imprecise) or a device external crystal oscillator (precise). '1': The internal main oscillator (IMO) clock 'clk_imo' is selected. The MXS40 platform uses a fixed frequency IMO clock. o '2': The external crystal oscillator (ECO) clock 'clk_eco' is selected. '3': The SRSS 'clk_timer' is selected ('clk_timer' is a divided/gated version of 'clk_hf' or 'clk_imo'). Note: If NOREF is '1', the CLOCK_SOURCE value is NOT used. Note: It is SW's responsibility to provide the correct NOREF, SKEW and TENMS field values for the selected clock source."] #[inline(always)] pub fn clock_source(&self) -> CLOCK_SOURCE_R { CLOCK_SOURCE_R::new(((self.bits >> 24) & 0x03) as u8) } #[doc = "Bit 30 - Specifies the precision of the clock source and if the TENMS field represents exactly 10 ms (clock source frequency is a multiple of 100 Hz). This affects the suitability of the SysTick timer as a SW real-time clock: '0': Precise. '1': Imprecise."] #[inline(always)] pub fn skew(&self) -> SKEW_R { SKEW_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Specifies if an external clock source is provided: '0': An external clock source is provided. '1': An external clock source is NOT provided and only the CPU internal clock can be used as SysTick timer clock source."] #[inline(always)] pub fn noref(&self) -> NOREF_R { NOREF_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:23 - Specifies the number of clock source cycles (minus 1) that make up 10 ms. E.g., for a 32,768 Hz reference clock, TENMS is 328 - 1 = 327."] #[inline(always)] pub fn tenms(&mut self) -> TENMS_W { TENMS_W { w: self } } #[doc = "Bits 24:25 - Specifies an external clock source: '0': The low frequency clock 'clk_lf' is selected. The precision of this clock depends on whether the low frequency clock source is a SRSS internal RC oscillator (imprecise) or a device external crystal oscillator (precise). '1': The internal main oscillator (IMO) clock 'clk_imo' is selected. The MXS40 platform uses a fixed frequency IMO clock. o '2': The external crystal oscillator (ECO) clock 'clk_eco' is selected. '3': The SRSS 'clk_timer' is selected ('clk_timer' is a divided/gated version of 'clk_hf' or 'clk_imo'). Note: If NOREF is '1', the CLOCK_SOURCE value is NOT used. Note: It is SW's responsibility to provide the correct NOREF, SKEW and TENMS field values for the selected clock source."] #[inline(always)] pub fn clock_source(&mut self) -> CLOCK_SOURCE_W { CLOCK_SOURCE_W { w: self } } #[doc = "Bit 30 - Specifies the precision of the clock source and if the TENMS field represents exactly 10 ms (clock source frequency is a multiple of 100 Hz). This affects the suitability of the SysTick timer as a SW real-time clock: '0': Precise. '1': Imprecise."] #[inline(always)] pub fn skew(&mut self) -> SKEW_W { SKEW_W { w: self } } #[doc = "Bit 31 - Specifies if an external clock source is provided: '0': An external clock source is provided. '1': An external clock source is NOT provided and only the CPU internal clock can be used as SysTick timer clock source."] #[inline(always)] pub fn noref(&mut self) -> NOREF_W { NOREF_W { w: self } } }
use crate::service::ApiKeyService; use actix_web::{web, HttpResponse}; use drogue_cloud_service_api::{ api::ApiKeyCreationOptions, auth::user::{ authn::{AuthenticationRequest, AuthenticationResponse, Outcome}, UserInformation, }, }; use std::ops::Deref; pub struct WebData<S: ApiKeyService> { pub service: S, } impl<S: ApiKeyService> Deref for WebData<S> { type Target = S; fn deref(&self) -> &Self::Target { &self.service } } pub async fn create<S>( user: UserInformation, service: web::Data<WebData<S>>, opts: web::Query<ApiKeyCreationOptions>, ) -> Result<HttpResponse, actix_web::Error> where S: ApiKeyService + 'static, { let result = match service.create(&user, opts.0).await { Ok(key) => Ok(HttpResponse::Ok().json(key)), Err(e) => Err(e.into()), }; result } pub async fn list<S>( user: UserInformation, service: web::Data<WebData<S>>, ) -> Result<HttpResponse, actix_web::Error> where S: ApiKeyService + 'static, { let result = match service.list(&user).await { Ok(outcome) => Ok(HttpResponse::Ok().json(outcome)), Err(e) => Err(e.into()), }; result } pub async fn delete<S>( prefix: web::Path<String>, user: UserInformation, service: web::Data<WebData<S>>, ) -> Result<HttpResponse, actix_web::Error> where S: ApiKeyService + 'static, { let result = match service.delete(&user, prefix.into_inner()).await { Ok(_) => Ok(HttpResponse::NoContent().finish()), Err(e) => Err(e.into()), }; result } /// Endpoint to authenticate a user key pub async fn authenticate<S>( req: web::Json<AuthenticationRequest>, service: web::Data<WebData<S>>, ) -> Result<HttpResponse, actix_web::Error> where S: ApiKeyService + 'static, { let result = match service.authenticate(&req.user_id, &req.api_key).await { Ok(Some(details)) => Ok(HttpResponse::Ok().json(AuthenticationResponse { outcome: Outcome::Known(details), })), Ok(None) => Ok(HttpResponse::Ok().json(AuthenticationResponse { outcome: Outcome::Unknown, })), Err(e) => Err(e.into()), }; result }
use crate::hal::gpio::{p0, Output, PushPull}; pub type LcdCsPin = p0::P0_25<Output<PushPull>>; pub type LcdDcPin = p0::P0_18<Output<PushPull>>; pub type LcdResetPin = p0::P0_26<Output<PushPull>>;
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ConversionErrorCode { #[serde(rename = "UNKNOWN")] Unknown, #[serde(rename = "NO_ERROR")] NoError, #[serde(rename = "SERVICE_ERROR")] ServiceError, #[serde(rename = "INVALID_ASSET_URI")] InvalidAssetUri, #[serde(rename = "INVALID_JOB_ID")] InvalidJobId, #[serde(rename = "INVALID_GRAVITY")] InvalidGravity, #[serde(rename = "INVALID_SCALE")] InvalidScale, #[serde(rename = "ASSET_SIZE_TOO_LARGE")] AssetSizeTooLarge, #[serde(rename = "ASSET_DIMENSIONS_OUT_OF_BOUNDS")] AssetDimensionsOutOfBounds, #[serde(rename = "ZERO_FACES")] ZeroFaces, #[serde(rename = "INVALID_FACE_VERTICES")] InvalidFaceVertices, #[serde(rename = "ZERO_TRAJECTORIES_GENERATED")] ZeroTrajectoriesGenerated, #[serde(rename = "TOO_MANY_RIG_POSES")] TooManyRigPoses, #[serde(rename = "ASSET_CANNOT_BE_CONVERTED")] AssetCannotBeConverted, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum JobStatus { NotStarted, Running, Succeeded, Failed, Cancelled, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Vector3 { pub x: f32, pub y: f32, pub z: f32, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Quaternion { pub x: f32, pub y: f32, pub z: f32, pub w: f32, #[serde(rename = "isIdentity", default, skip_serializing_if = "Option::is_none")] pub is_identity: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Pose { pub rotation: Quaternion, pub translation: Vector3, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Vector4 { pub x: f32, pub y: f32, pub z: f32, pub w: f32, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IngestionConfiguration { #[serde(default, skip_serializing_if = "Option::is_none")] pub dimensions: Option<Vector3>, #[serde(rename = "boundingBoxCenter", default, skip_serializing_if = "Option::is_none")] pub bounding_box_center: Option<Vector3>, pub gravity: Vector3, #[serde(rename = "keyFrameIndexes", default, skip_serializing_if = "Vec::is_empty")] pub key_frame_indexes: Vec<i32>, #[serde(rename = "gtTrajectory", default, skip_serializing_if = "Vec::is_empty")] pub gt_trajectory: Vec<Pose>, #[serde(rename = "principalAxis", default, skip_serializing_if = "Option::is_none")] pub principal_axis: Option<Quaternion>, pub scale: f32, #[serde(rename = "supportingPlane", default, skip_serializing_if = "Option::is_none")] pub supporting_plane: Option<Vector4>, #[serde(rename = "testTrajectory", default, skip_serializing_if = "Vec::is_empty")] pub test_trajectory: Vec<Pose>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IngestionProperties { #[serde(rename = "clientErrorDetails", default, skip_serializing_if = "Option::is_none")] pub client_error_details: Option<String>, #[serde(rename = "serverErrorDetails", default, skip_serializing_if = "Option::is_none")] pub server_error_details: Option<String>, #[serde(rename = "errorCode", default, skip_serializing_if = "Option::is_none")] pub error_code: Option<ConversionErrorCode>, #[serde(rename = "jobId", default, skip_serializing_if = "Option::is_none")] pub job_id: Option<String>, #[serde(rename = "outputModelUri", default, skip_serializing_if = "Option::is_none")] pub output_model_uri: Option<String>, #[serde(rename = "jobStatus", default, skip_serializing_if = "Option::is_none")] pub job_status: Option<JobStatus>, #[serde(rename = "assetFileType", default, skip_serializing_if = "Option::is_none")] pub asset_file_type: Option<String>, #[serde(rename = "inputAssetUri", default, skip_serializing_if = "Option::is_none")] pub input_asset_uri: Option<String>, #[serde(rename = "accountId", default, skip_serializing_if = "Option::is_none")] pub account_id: Option<String>, #[serde(rename = "ingestionConfiguration", default, skip_serializing_if = "Option::is_none")] pub ingestion_configuration: Option<IngestionConfiguration>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Error>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UploadLocation { #[serde(rename = "inputAssetUri")] pub input_asset_uri: String, }
use commitments::CommitmentScheme; struct PedersenCommitment {} impl CommitmentScheme for PedersenCommitment { fn setup(num_elements: u32) -> Vec<Vec<u8>> { unimplemented!(); } fn commit(generators: &[&[u8]], messages: &[&[u8]]) -> Vec<u8> { unimplemented!(); } fn open(commitment: &[u8], secret: &[u8], generators: &[&[u8]], messages: &[&[u8]]) -> bool { unimplemented!(); } }
#![feature(test)] extern crate test; #[macro_use] extern crate nom; use test::Bencher; use nom::{IResult,digit}; // Parser definition use std::str; use std::str::FromStr; // We parse any expr surrounded by parens, ignoring all whitespaces around those named!(parens<i64>, ws!(delimited!( tag!("("), expr, tag!(")") )) ); // We transform an integer string into a i64, ignoring surrounding whitespaces // We look for a digit suite, and try to convert it. // If either str::from_utf8 or FromStr::from_str fail, // we fallback to the parens parser defined above named!(factor<i64>, alt!( map_res!( map_res!( ws!(digit), str::from_utf8 ), FromStr::from_str ) | parens ) ); // We read an initial factor and for each time we find // a * or / operator followed by another factor, we do // the math by folding everything named!(term <i64>, do_parse!( init: factor >> res: fold_many0!( pair!(alt!(tag!("*") | tag!("/")), factor), init, |acc, (op, val): (&[u8], i64)| { if (op[0] as char) == '*' { acc * val } else { acc / val } } ) >> (res) ) ); named!(expr <i64>, do_parse!( init: term >> res: fold_many0!( pair!(alt!(tag!("+") | tag!("-")), term), init, |acc, (op, val): (&[u8], i64)| { if (op[0] as char) == '+' { acc + val } else { acc - val } } ) >> (res) ) ); #[bench] fn arithmetic(b: &mut Bencher) { let data = &b" 2*2 / ( 5 - 1) + 3 / 4 * (2 - 7 + 567 *12 /2) + 3*(1+2*( 45 /2))"; println!("parsed:\n{:?}", expr(&data[..])); b.iter(||{ expr(&data[..]) }); }
// Licensed under the 2-Clause BSD license <LICENSE or // https://opensource.org/licenses/BSD-2-Clause>. This // file may not be copied, modified, or distributed // except according to those terms. //! # Burst //! //! Burst is a library supporting decomposing binary code //! into instructions, while maintaining detailed information //! about the instructions, their flags, and the operands. The //! result is a structure rather than textual strings. //! //! While Burst currently only supports x86 and x86_64 code, //! this will change in the near future and we anticipate adding //! many additional architectures. //! //! ## Goals of Burst: //! //! * Regular releases without waiting for long periods of time. //! * Uses fuzz testing to avoid crashes. //! * Well tested. //! * Fast. Few allocations and little data copying should be required. //! //! ## Installation //! //! This crate works with Cargo and is on //! [crates.io](https://crates.io/crates/burst). //! Add it to your `Cargo.toml` like so: //! //! ```toml //! [dependencies] //! burst = "0.0.2" //! ``` //! //! Then, let `rustc` know that you're going to use this crate at the //! top of your own crate: //! //! ``` //! extern crate burst; //! # fn main() {} //! ``` //! //! ## Contributions //! //! Contributions are welcome. //! #![warn(missing_docs)] #![deny(trivial_numeric_casts, unstable_features, unused_import_braces, unused_qualifications)] pub mod x86; /// An instruction operation. /// /// This is description of the actual CPU operation that the /// instruction carries out. pub trait Operation { /// The mnemonic for this instruction. fn mnemonic(&self) -> &str; } /// An operand for an `Instruction`. pub trait Operand {} /// An decoded instruction, including an `Operation` and its /// `Operand`s. /// /// An instruction represents the full amount of information that /// we have about the instruction that has been disassembled from /// the binary opcode data. pub trait Instruction { /// The type of the operation for this instruction. type Operation: Operation; /// The type of the operands for this instruction. type Operand: Operand; /// The operation carried out by this instruction. fn operation(&self) -> Self::Operation; /// The mnemonic for this instruction. fn mnemonic(&self) -> &str; /// The operands for this instruction. fn operands(&self) -> &[Self::Operand]; /// How many bytes in the binary opcode data are used by this /// instruction. /// /// This can be used to continue disassembling at the next /// instruction. An invalid instruction may have a value of /// `0` here. fn length(&self) -> usize; }
extern crate advent_of_code_2017_day_2; use advent_of_code_2017_day_2::*; #[test] fn part_1_example() { // 5 1 9 5 // 7 5 3 // 2 4 6 8 // // - The first row's largest and smallest values are 9 and 1, and their difference is 8. // - The second row's largest and smallest values are 7 and 3, and their difference is 4. // - The third row's difference is 6. // // In this example, the spreadsheet's checksum would be 8 + 4 + 6 = 18. let input = "\ 5 1 9 5 7 5 3 2 4 6 8"; println!("{}", input); assert_eq!(solve_puzzle_part_1(input), "18"); } #[test] fn part_2_example() { // 5 9 2 8 // 9 4 7 3 // 3 8 6 5 // - In the first row, the only two numbers that evenly divide are 8 and 2; the result of // this division is 4. // - In the second row, the two numbers are 9 and 3; the result is 3. // - In the third row, the result is 2. // // In this example, the sum of the results would be 4 + 3 + 2 = 9. let input = "\ 5 9 2 8 9 4 7 3 3 8 6 5"; println!("{}", input); assert_eq!(solve_puzzle_part_2(input), "9"); }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub mod operations { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!("{}/providers/Microsoft.Cache/operations", &operation_config.base_path,); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: OperationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod get { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn operation_status( operation_config: &crate::OperationConfig, location: &str, operation_id: &str, subscription_id: &str, ) -> std::result::Result<OperationStatus, operation_status::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Cache/locations/{}/operationsStatus/{}", &operation_config.base_path, subscription_id, location, operation_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(operation_status::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(operation_status::BuildRequestError)?; let rsp = client.execute(req).await.context(operation_status::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(operation_status::ResponseBytesError)?; let rsp_value: OperationStatus = serde_json::from_slice(&body).context(operation_status::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(operation_status::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(operation_status::DeserializeError { body })?; operation_status::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod operation_status { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod redis_enterprise { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, subscription_id: &str, ) -> std::result::Result<Cluster, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: Cluster = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, parameters: &Cluster, subscription_id: &str, ) -> std::result::Result<create::Response, create::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(create::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(create::BuildRequestError)?; let rsp = client.execute(req).await.context(create::ExecuteRequestError)?; match rsp.status() { StatusCode::CREATED => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; let rsp_value: Cluster = serde_json::from_slice(&body).context(create::DeserializeError { body })?; Ok(create::Response::Created201(rsp_value)) } StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; let rsp_value: Cluster = serde_json::from_slice(&body).context(create::DeserializeError { body })?; Ok(create::Response::Ok200(rsp_value)) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create::DeserializeError { body })?; create::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Created201(Cluster), Ok200(Cluster), } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn update( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, parameters: &ClusterUpdate, subscription_id: &str, ) -> std::result::Result<update::Response, update::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name ); let mut req_builder = client.patch(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(update::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(update::BuildRequestError)?; let rsp = client.execute(req).await.context(update::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?; let rsp_value: Cluster = serde_json::from_slice(&body).context(update::DeserializeError { body })?; Ok(update::Response::Ok200(rsp_value)) } StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?; update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod update { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(Cluster), Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(delete::Response::Ok200), StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list_by_resource_group( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, ) -> std::result::Result<ClusterList, list_by_resource_group::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise", &operation_config.base_path, subscription_id, resource_group_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_by_resource_group::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?; let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?; let rsp_value: ClusterList = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?; list_by_resource_group::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_by_resource_group { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list(operation_config: &crate::OperationConfig, subscription_id: &str) -> std::result::Result<ClusterList, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Cache/redisEnterprise", &operation_config.base_path, subscription_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ClusterList = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod databases { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list_by_cluster( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, cluster_name: &str, ) -> std::result::Result<DatabaseList, list_by_cluster::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases", &operation_config.base_path, subscription_id, resource_group_name, cluster_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_by_cluster::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list_by_cluster::BuildRequestError)?; let rsp = client.execute(req).await.context(list_by_cluster::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_cluster::ResponseBytesError)?; let rsp_value: DatabaseList = serde_json::from_slice(&body).context(list_by_cluster::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_cluster::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_cluster::DeserializeError { body })?; list_by_cluster::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_by_cluster { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, subscription_id: &str, ) -> std::result::Result<Database, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: Database = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, parameters: &Database, subscription_id: &str, ) -> std::result::Result<create::Response, create::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(create::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(create::BuildRequestError)?; let rsp = client.execute(req).await.context(create::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; let rsp_value: Database = serde_json::from_slice(&body).context(create::DeserializeError { body })?; Ok(create::Response::Ok200(rsp_value)) } StatusCode::CREATED => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; let rsp_value: Database = serde_json::from_slice(&body).context(create::DeserializeError { body })?; Ok(create::Response::Created201(rsp_value)) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create::DeserializeError { body })?; create::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(Database), Created201(Database), } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn update( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, parameters: &DatabaseUpdate, subscription_id: &str, ) -> std::result::Result<update::Response, update::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.patch(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(update::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(update::BuildRequestError)?; let rsp = client.execute(req).await.context(update::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?; let rsp_value: Database = serde_json::from_slice(&body).context(update::DeserializeError { body })?; Ok(update::Response::Ok200(rsp_value)) } StatusCode::ACCEPTED => Ok(update::Response::Accepted202), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(update::DeserializeError { body })?; update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod update { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(Database), Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(delete::Response::Ok200), StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod redis_enterprise_database { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, subscription_id: &str, ) -> std::result::Result<AccessKeys, list_keys::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}/listKeys", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_keys::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0); let req = req_builder.build().context(list_keys::BuildRequestError)?; let rsp = client.execute(req).await.context(list_keys::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_keys::ResponseBytesError)?; let rsp_value: AccessKeys = serde_json::from_slice(&body).context(list_keys::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_keys::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_keys::DeserializeError { body })?; list_keys::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_keys { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn regenerate_key( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, parameters: &RegenerateKeyParameters, subscription_id: &str, ) -> std::result::Result<regenerate_key::Response, regenerate_key::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}/regenerateKey", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(regenerate_key::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(regenerate_key::BuildRequestError)?; let rsp = client.execute(req).await.context(regenerate_key::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(regenerate_key::ResponseBytesError)?; let rsp_value: AccessKeys = serde_json::from_slice(&body).context(regenerate_key::DeserializeError { body })?; Ok(regenerate_key::Response::Ok200(rsp_value)) } StatusCode::ACCEPTED => Ok(regenerate_key::Response::Accepted202), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(regenerate_key::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(regenerate_key::DeserializeError { body })?; regenerate_key::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod regenerate_key { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(AccessKeys), Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn import( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, parameters: &ImportClusterParameters, subscription_id: &str, ) -> std::result::Result<import::Response, import::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}/import", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(import::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(import::BuildRequestError)?; let rsp = client.execute(req).await.context(import::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(import::Response::Ok200), StatusCode::ACCEPTED => Ok(import::Response::Accepted202), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(import::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(import::DeserializeError { body })?; import::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod import { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn export( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, database_name: &str, parameters: &ExportClusterParameters, subscription_id: &str, ) -> std::result::Result<export::Response, export::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/databases/{}/export", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, database_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(export::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(export::BuildRequestError)?; let rsp = client.execute(req).await.context(export::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(export::Response::Ok200), StatusCode::ACCEPTED => Ok(export::Response::Accepted202), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(export::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(export::DeserializeError { body })?; export::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod export { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod private_endpoint_connections { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, subscription_id: &str, ) -> std::result::Result<PrivateEndpointConnectionListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/privateEndpointConnections", &operation_config.base_path, subscription_id, resource_group_name, cluster_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: PrivateEndpointConnectionListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, private_endpoint_connection_name: &str, subscription_id: &str, ) -> std::result::Result<PrivateEndpointConnection, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/privateEndpointConnections/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, private_endpoint_connection_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: PrivateEndpointConnection = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn put( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, subscription_id: &str, private_endpoint_connection_name: &str, properties: &PrivateEndpointConnection, ) -> std::result::Result<PrivateEndpointConnection, put::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/privateEndpointConnections/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, private_endpoint_connection_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(put::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(properties); let req = req_builder.build().context(put::BuildRequestError)?; let rsp = client.execute(req).await.context(put::ExecuteRequestError)?; match rsp.status() { StatusCode::CREATED => { let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?; let rsp_value: PrivateEndpointConnection = serde_json::from_slice(&body).context(put::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(put::DeserializeError { body })?; put::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod put { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, subscription_id: &str, private_endpoint_connection_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/privateEndpointConnections/{}", &operation_config.base_path, subscription_id, resource_group_name, cluster_name, private_endpoint_connection_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(delete::Response::Ok200), StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod private_link_resources { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list_by_redis_enterprise_cache( operation_config: &crate::OperationConfig, resource_group_name: &str, cluster_name: &str, subscription_id: &str, ) -> std::result::Result<PrivateLinkResourceListResult, list_by_redis_enterprise_cache::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Cache/redisEnterprise/{}/privateLinkResources", &operation_config.base_path, subscription_id, resource_group_name, cluster_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_by_redis_enterprise_cache::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list_by_redis_enterprise_cache::BuildRequestError)?; let rsp = client .execute(req) .await .context(list_by_redis_enterprise_cache::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_redis_enterprise_cache::ResponseBytesError)?; let rsp_value: PrivateLinkResourceListResult = serde_json::from_slice(&body).context(list_by_redis_enterprise_cache::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_redis_enterprise_cache::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list_by_redis_enterprise_cache::DeserializeError { body })?; list_by_redis_enterprise_cache::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_by_redis_enterprise_cache { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } }
#[doc = "Reader of register FMC_CSQISR"] pub type R = crate::R<u32, super::FMC_CSQISR>; #[doc = "Writer for register FMC_CSQISR"] pub type W = crate::W<u32, super::FMC_CSQISR>; #[doc = "Register FMC_CSQISR `reset()`'s with value 0"] impl crate::ResetValue for super::FMC_CSQISR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `TCF`"] pub type TCF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TCF`"] pub struct TCF_W<'a> { w: &'a mut W, } impl<'a> TCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `SCF`"] pub type SCF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SCF`"] pub struct SCF_W<'a> { w: &'a mut W, } impl<'a> SCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `SEF`"] pub type SEF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SEF`"] pub struct SEF_W<'a> { w: &'a mut W, } impl<'a> SEF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `SUEF`"] pub type SUEF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SUEF`"] pub struct SUEF_W<'a> { w: &'a mut W, } impl<'a> SUEF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `CMDTCF`"] pub type CMDTCF_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CMDTCF`"] pub struct CMDTCF_W<'a> { w: &'a mut W, } impl<'a> CMDTCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } impl R { #[doc = "Bit 0 - TCF"] #[inline(always)] pub fn tcf(&self) -> TCF_R { TCF_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - SCF"] #[inline(always)] pub fn scf(&self) -> SCF_R { SCF_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - SEF"] #[inline(always)] pub fn sef(&self) -> SEF_R { SEF_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - SUEF"] #[inline(always)] pub fn suef(&self) -> SUEF_R { SUEF_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - CMDTCF"] #[inline(always)] pub fn cmdtcf(&self) -> CMDTCF_R { CMDTCF_R::new(((self.bits >> 4) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - TCF"] #[inline(always)] pub fn tcf(&mut self) -> TCF_W { TCF_W { w: self } } #[doc = "Bit 1 - SCF"] #[inline(always)] pub fn scf(&mut self) -> SCF_W { SCF_W { w: self } } #[doc = "Bit 2 - SEF"] #[inline(always)] pub fn sef(&mut self) -> SEF_W { SEF_W { w: self } } #[doc = "Bit 3 - SUEF"] #[inline(always)] pub fn suef(&mut self) -> SUEF_W { SUEF_W { w: self } } #[doc = "Bit 4 - CMDTCF"] #[inline(always)] pub fn cmdtcf(&mut self) -> CMDTCF_W { CMDTCF_W { w: self } } }
use std::sync::{Arc, Mutex, Condvar}; use std::process::exit; use sdl2; use sdl2::Sdl; use sdl2::event::Event; use sdl2::pixels; use sdl2::keyboard::Keycode; use sdl2::gfx::primitives::DrawRenderer; use sdl2::render::WindowCanvas; const SCREEN_WIDTH: u8 = 64; const SCREEN_HEIGHT: u8 = 32; const OUTPUT_WIDTH: u32 = 256; const OUTPUT_HEIGHT: u32 = 128; pub struct Graphics { context: Sdl, canvas: WindowCanvas, screen: [bool; 64 * 32], pub keys: [bool; 16], // Key pressed states } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum DrawResult { Collision, Success, } impl Graphics { // Construct a new Graphics struct. // Initializes sdl2 and defines an sdl context. pub fn new() -> Graphics { let sdl_context = sdl2::init().unwrap(); let video_subsys = sdl_context.video().unwrap(); let window = video_subsys.window("CHIP8", OUTPUT_WIDTH as u32, OUTPUT_HEIGHT as u32) .position_centered() .opengl() .build() .unwrap(); let mut canvas = window.into_canvas().build().unwrap(); canvas.set_draw_color(pixels::Color::RGB(0, 0, 0)); canvas.clear(); canvas.present(); Graphics { context: sdl_context, canvas: canvas, screen: [false; 64 * 32], keys: [false; 16], } } // Map an SDL Keycode enum to a CHIP8 key, if any. fn key_ind(&self, keycode: Keycode) -> Option<usize> { match keycode { Keycode::Num1 => Some(0x1), Keycode::Num2 => Some(0x2), Keycode::Num3 => Some(0x3), Keycode::Num4 => Some(0xC), Keycode::Q => Some(0x4), Keycode::W => Some(0x5), Keycode::E => Some(0x6), Keycode::R => Some(0xD), Keycode::A => Some(0x7), Keycode::S => Some(0x8), Keycode::D => Some(0x9), Keycode::F => Some(0xE), Keycode::Z => Some(0xA), Keycode::X => Some(0x0), Keycode::C => Some(0xB), Keycode::V => Some(0xF), _ => None, } } // Draw a CHIP8 sprite from a slice to (x, y). // If a collision occurs, return Collision. Otherwise, return Success. pub fn draw_sprite<'a>(&mut self, x: u8, y: u8, slice: &'a [u8]) -> DrawResult { let l = slice.len(); let mut collision = false; for i in 0..l { for j in 0..8 { let scy = (y as usize + i) % (SCREEN_HEIGHT as usize); let scx = (x as usize + j) % (SCREEN_WIDTH as usize); let scindex = scy * (SCREEN_WIDTH as usize) + scx; let set = (slice[i] >> (7 - j)) & 1; let set_bool = if set == 1 { true } else { false }; if self.screen[scindex] && set_bool { collision = true; } self.screen[scindex] ^= set_bool; } } for i in 0..(SCREEN_WIDTH as usize * SCREEN_HEIGHT as usize) { let cx = (i % (SCREEN_WIDTH as usize)) as i16; let cy = (i / (SCREEN_WIDTH as usize)) as i16; let mut color = pixels::Color::RGB(0, 0, 0); if self.screen[i] { color = pixels::Color::RGB(255, 255, 255); } for j in (cx*4)..(cx*4 + 4) { for k in (cy*4)..(cy*4 + 4) { self.canvas.pixel(j, k as i16, color); } } } self.canvas.present(); if collision { DrawResult::Collision } else { DrawResult::Success } } // Clear the canvas. pub fn clear(&mut self) { for i in 0..self.screen.len() { self.screen[i] = false; } self.canvas.set_draw_color(pixels::Color::RGB(0, 0, 0)); self.canvas.clear(); self.canvas.present(); } // Process all queued key events. pub fn draw_events(&mut self) { let mut events = self.context.event_pump().unwrap(); for event in events.poll_iter() { match event { Event::Quit {..} => exit(0), Event::KeyDown {keycode: Some(keycode), ..} => { if keycode == Keycode::Escape { exit(0); } if let Some(ind) = self.key_ind(keycode) { self.keys[ind as usize] = true; } }, Event::KeyUp {keycode: Some(keycode), ..} => { if let Some(ind) = self.key_ind(keycode) { self.keys[ind as usize] = false; } }, _ => {}, } } } pub fn beep(&mut self) { return; } }
pub fn is_leap_year(year: u64) -> bool { let l4 = year % 4 == 0; let nl100 = year % 100 != 0; let l400 = year % 400 == 0; match (l4, nl100, l400) { (true, true, _) => true, (true, false, true) => true, _ => false } }
extern crate greed_ecs; extern crate serde; extern crate serde_json; #[cfg_attr(test,allow(unused_variables,unused_imports,dead_code))] use greed_ecs::game::cardsmeta; #[cfg_attr(test,allow(unused_variables,unused_imports,dead_code))] #[test] fn card_meta() { let data = cardsmeta::locale_card_meta(cardsmeta::LocalEnum::English); println!("data {:?}", data); assert_eq!(30, 30); }
use std::fmt; use nom::{ IResult, combinator::map, branch::alt, bytes::complete::tag }; #[derive(Debug, PartialEq, Clone)] pub enum SdpTransport { Udp, Tcp, RtpAvp, RtpSavp } pub fn parse_transport(input: &[u8]) -> IResult<&[u8], SdpTransport> { alt(( map(tag("UDP"), |_| SdpTransport::Udp), map(tag("TCP"), |_| SdpTransport::Tcp), map(tag("RTP/AVP"), |_| SdpTransport::RtpAvp), map(tag("RTP/SAVP"), |_| SdpTransport::RtpSavp) ))(input) } impl fmt::Display for SdpTransport { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { SdpTransport::Udp => write!(f, "UDP"), SdpTransport::Tcp => write!(f, "TCP"), SdpTransport::RtpAvp => write!(f, "RTP/AVP"), SdpTransport::RtpSavp => write!(f, "RTP/SAVP") } } }
#![cfg(test)] use super::*; use std::f64::consts::PI; use crate::physics:: { BOLTZMANN_CONSTANT, PLANCK_CONSTANT }; use crate::physics::single_chain::test::Parameters; mod base { use super::*; use rand::Rng; #[test] fn init() { let parameters = Parameters::default(); let _ = SWFJC::init(parameters.number_of_links_minimum, parameters.link_length_reference, parameters.hinge_mass_reference, parameters.well_width_reference); } #[test] fn number_of_links() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); assert_eq!(number_of_links, SWFJC::init(number_of_links, parameters.link_length_reference, parameters.hinge_mass_reference, parameters.well_width_reference).number_of_links); } } #[test] fn link_length() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); assert_eq!(link_length, SWFJC::init(parameters.number_of_links_minimum, link_length, parameters.hinge_mass_reference, parameters.well_width_reference).link_length); } } #[test] fn hinge_mass() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); assert_eq!(hinge_mass, SWFJC::init(parameters.number_of_links_minimum, parameters.link_length_reference, hinge_mass, parameters.well_width_reference).hinge_mass); } } #[test] fn well_width() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); assert_eq!(well_width, SWFJC::init(parameters.number_of_links_minimum, parameters.link_length_reference, parameters.hinge_mass_reference, well_width).well_width); } } #[test] fn all_parameters() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); assert_eq!(number_of_links, model.number_of_links); assert_eq!(link_length, model.link_length); assert_eq!(hinge_mass, model.hinge_mass); assert_eq!(well_width, model.well_width); } } } mod legendre { use super::*; use rand::Rng; #[test] fn force() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>()); let force = nondimensional_force*BOLTZMANN_CONSTANT*temperature/link_length; let end_to_end_length = model.isotensional.end_to_end_length(&force, &temperature); let force_out = model.isometric.legendre.force(&end_to_end_length, &temperature); let residual_abs = &force - &force_out; let residual_rel = &residual_abs/&force; assert!(residual_abs.abs() <= parameters.abs_tol || residual_rel.abs() <= parameters.rel_tol); } } #[test] fn nondimensional_force() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let nondimensional_end_to_end_length_per_link= model.isotensional.nondimensional_end_to_end_length_per_link(&nondimensional_force); let nondimensional_force_out = model.isometric.legendre.nondimensional_force(&nondimensional_end_to_end_length_per_link); let residual_abs = &nondimensional_force - &nondimensional_force_out; let residual_rel = &residual_abs/&nondimensional_force; assert!(residual_abs.abs() <= parameters.abs_tol || residual_rel.abs() <= parameters.rel_tol); } } #[test] fn helmholtz_free_energy() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>()); let force = nondimensional_force*BOLTZMANN_CONSTANT*temperature/link_length; let end_to_end_length = model.isotensional.end_to_end_length(&force, &temperature); let helmholtz_free_energy_legendre = model.isotensional.gibbs_free_energy(&force, &temperature) + force*end_to_end_length; let helmholtz_free_energy_legendre_out = model.isometric.legendre.helmholtz_free_energy(&end_to_end_length, &temperature); let residual_abs = &helmholtz_free_energy_legendre - &helmholtz_free_energy_legendre_out + BOLTZMANN_CONSTANT*temperature*(8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln(); let residual_rel = &residual_abs/&helmholtz_free_energy_legendre; assert!(residual_abs.abs() <= parameters.abs_tol || residual_rel.abs() <= parameters.rel_tol); } } #[test] fn helmholtz_free_energy_per_link() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>()); let force = nondimensional_force*BOLTZMANN_CONSTANT*temperature/link_length; let end_to_end_length = model.isotensional.end_to_end_length(&force, &temperature); let end_to_end_length_per_link = model.isotensional.end_to_end_length_per_link(&force, &temperature); let helmholtz_free_energy_per_link_legendre = model.isotensional.gibbs_free_energy_per_link(&force, &temperature) + force*end_to_end_length_per_link; let helmholtz_free_energy_per_link_legendre_out = model.isometric.legendre.helmholtz_free_energy_per_link(&end_to_end_length, &temperature); let residual_abs = &helmholtz_free_energy_per_link_legendre - &helmholtz_free_energy_per_link_legendre_out + BOLTZMANN_CONSTANT*temperature*(8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln()/(number_of_links as f64); let residual_rel = &residual_abs/&helmholtz_free_energy_per_link_legendre; assert!(residual_abs.abs() <= parameters.abs_tol || residual_rel.abs() <= parameters.rel_tol); } } #[test] fn relative_helmholtz_free_energy() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>()); let force = nondimensional_force*BOLTZMANN_CONSTANT*temperature/link_length; let end_to_end_length = model.isotensional.end_to_end_length(&force, &temperature); let relative_helmholtz_free_energy_legendre = model.isotensional.relative_gibbs_free_energy(&force, &temperature) + force*end_to_end_length; let relative_helmholtz_free_energy_legendre_out = model.isometric.legendre.relative_helmholtz_free_energy(&end_to_end_length, &temperature); let residual_abs = &relative_helmholtz_free_energy_legendre - &relative_helmholtz_free_energy_legendre_out; let residual_rel = &residual_abs/&relative_helmholtz_free_energy_legendre; assert!(residual_abs.abs() <= 3e1 * parameters.abs_tol || residual_rel.abs() <= 3e1 * parameters.rel_tol); } } #[test] fn relative_helmholtz_free_energy_per_link() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>()); let force = nondimensional_force*BOLTZMANN_CONSTANT*temperature/link_length; let end_to_end_length = model.isotensional.end_to_end_length(&force, &temperature); let end_to_end_length_per_link = model.isotensional.end_to_end_length_per_link(&force, &temperature); let relative_helmholtz_free_energy_per_link_legendre = model.isotensional.relative_gibbs_free_energy_per_link(&force, &temperature) + force*end_to_end_length_per_link; let relative_helmholtz_free_energy_per_link_legendre_out = model.isometric.legendre.relative_helmholtz_free_energy_per_link(&end_to_end_length, &temperature); let residual_abs = &relative_helmholtz_free_energy_per_link_legendre - &relative_helmholtz_free_energy_per_link_legendre_out; let residual_rel = &residual_abs/&relative_helmholtz_free_energy_per_link_legendre; assert!(residual_abs.abs() <= 3e1 * parameters.abs_tol || residual_rel.abs() <= 3e1 * parameters.rel_tol); } } #[test] fn nondimensional_helmholtz_free_energy() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>()); let nondimensional_end_to_end_length = model.isotensional.nondimensional_end_to_end_length(&nondimensional_force); let nondimensional_end_to_end_length_per_link = model.isotensional.nondimensional_end_to_end_length_per_link(&nondimensional_force); let nondimensional_helmholtz_free_energy_legendre = model.isotensional.nondimensional_gibbs_free_energy(&nondimensional_force, &temperature) + nondimensional_force*nondimensional_end_to_end_length; let nondimensional_helmholtz_free_energy_legendre_out = model.isometric.legendre.nondimensional_helmholtz_free_energy(&nondimensional_end_to_end_length_per_link, &temperature); let residual_abs = &nondimensional_helmholtz_free_energy_legendre - &nondimensional_helmholtz_free_energy_legendre_out + (8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln(); let residual_rel = &residual_abs/&nondimensional_helmholtz_free_energy_legendre; assert!(residual_abs.abs() <= parameters.abs_tol || residual_rel.abs() <= parameters.rel_tol); } } #[test] fn nondimensional_helmholtz_free_energy_per_link() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let temperature = parameters.temperature_reference + parameters.temperature_scale*(0.5 - rng.gen::<f64>()); let nondimensional_end_to_end_length_per_link = model.isotensional.nondimensional_end_to_end_length_per_link(&nondimensional_force); let nondimensional_helmholtz_free_energy_per_link_legendre = model.isotensional.nondimensional_gibbs_free_energy_per_link(&nondimensional_force, &temperature) + nondimensional_force*nondimensional_end_to_end_length_per_link; let nondimensional_helmholtz_free_energy_per_link_legendre_out = model.isometric.legendre.nondimensional_helmholtz_free_energy_per_link(&nondimensional_end_to_end_length_per_link, &temperature); let residual_abs = &nondimensional_helmholtz_free_energy_per_link_legendre - &nondimensional_helmholtz_free_energy_per_link_legendre_out + (8.0*PI.powi(2)*hinge_mass*link_length.powi(2)*BOLTZMANN_CONSTANT*temperature/PLANCK_CONSTANT.powi(2)).ln()/(number_of_links as f64); let residual_rel = &residual_abs/&nondimensional_helmholtz_free_energy_per_link_legendre; assert!(residual_abs.abs() <= parameters.abs_tol || residual_rel.abs() <= parameters.rel_tol); } } #[test] fn nondimensional_relative_helmholtz_free_energy() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let nondimensional_end_to_end_length = model.isotensional.nondimensional_end_to_end_length(&nondimensional_force); let nondimensional_end_to_end_length_per_link = model.isotensional.nondimensional_end_to_end_length_per_link(&nondimensional_force); let nondimensional_relative_helmholtz_free_energy_legendre = model.isotensional.nondimensional_relative_gibbs_free_energy(&nondimensional_force) + nondimensional_force*nondimensional_end_to_end_length; let nondimensional_relative_helmholtz_free_energy_legendre_out = model.isometric.legendre.nondimensional_relative_helmholtz_free_energy(&nondimensional_end_to_end_length_per_link); let residual_abs = &nondimensional_relative_helmholtz_free_energy_legendre - &nondimensional_relative_helmholtz_free_energy_legendre_out; let residual_rel = &residual_abs/&nondimensional_relative_helmholtz_free_energy_legendre; assert!(residual_abs.abs() <= 3e1 * parameters.abs_tol || residual_rel.abs() <= 3e1 * parameters.rel_tol); } } #[test] fn nondimensional_relative_helmholtz_free_energy_per_link() { let mut rng = rand::thread_rng(); let parameters = Parameters::default(); for _ in 0..parameters.number_of_loops { let number_of_links: u8 = rng.gen_range(parameters.number_of_links_minimum..parameters.number_of_links_maximum); let link_length = parameters.link_length_reference + parameters.link_length_scale*(0.5 - rng.gen::<f64>()); let hinge_mass = parameters.hinge_mass_reference + parameters.hinge_mass_scale*(0.5 - rng.gen::<f64>()); let well_width = parameters.well_width_reference + parameters.well_width_scale*(0.5 - rng.gen::<f64>()); let model = SWFJC::init(number_of_links, link_length, hinge_mass, well_width); let nondimensional_force = parameters.nondimensional_force_reference + parameters.nondimensional_force_scale*(0.5 - rng.gen::<f64>()); let nondimensional_end_to_end_length_per_link = model.isotensional.nondimensional_end_to_end_length_per_link(&nondimensional_force); let nondimensional_relative_helmholtz_free_energy_per_link_legendre = model.isotensional.nondimensional_relative_gibbs_free_energy_per_link(&nondimensional_force) + nondimensional_force*nondimensional_end_to_end_length_per_link; let nondimensional_relative_helmholtz_free_energy_per_link_legendre_out = model.isometric.legendre.nondimensional_relative_helmholtz_free_energy_per_link(&nondimensional_end_to_end_length_per_link); let residual_abs = &nondimensional_relative_helmholtz_free_energy_per_link_legendre - &nondimensional_relative_helmholtz_free_energy_per_link_legendre_out; let residual_rel = &residual_abs/&nondimensional_relative_helmholtz_free_energy_per_link_legendre; assert!(residual_abs.abs() <= 3e1 * parameters.abs_tol || residual_rel.abs() <= 3e1 * parameters.rel_tol); } } }
pub mod platform_traits; pub mod foc_types; pub mod foc_controller; mod pid_controller; mod dq_transform; mod modulator; mod foc_math_utils;
#![recursion_limit = "1024"] use anyhow::{Context, Result}; use human_panic::setup_panic; use boilrs::{generate, install, list, new, uninstall, utils}; fn main() -> Result<()> { setup_panic!(); // * Creating cli app let cli = utils::terminal::init_term().get_matches(); match cli.subcommand() { ("generate", Some(args)) => generate::generate(&args).context(format!( "Failed to generate the template using `args`: {:?}", args ))?, ("new", Some(args)) => new::new(&args).context(format!( "Failed to create new config using `args`: {:?}", args ))?, ("install", Some(args)) => install::install(args).context(format!( "Failed to install the template using `args`: {:?}", args ))?, ("uninstall", Some(args)) => uninstall::uninstall(args).context(format!( "Failed to uninstall the template using `args`: {:?}", args ))?, ("list", Some(args)) => list::list(args).context(format!( "Failed to list the templates using `args`: {:?}", args ))?, // ("download", Some(_args)) => unimplemented!(), // ("update", Some(_args)) => unimplemented!(), _ => (), } Ok(()) }
//! Generic helper types. use std::error::Error; use std::fmt::{self, Debug, Display, Formatter}; use std::fs::File; use std::io::Read; use std::path::PathBuf; use chrono::DateTime; use chrono::FixedOffset; use chrono::Local; use serde_json; /// Used for representing generic String errors as IronErrors. #[derive(Debug)] pub struct StringError(pub String); impl Error for StringError {} impl Display for StringError { fn fmt(&self, f: &mut Formatter) -> fmt::Result { Debug::fmt(self, f) } } /// The different file types supported. #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub enum FileType { File, Url, Text, } impl FileType { /// Parses a file type from a String form. pub fn from_str(name: &str) -> Option<FileType> { match name { "file" => Some(FileType::File), "url" => Some(FileType::Url), "text" => Some(FileType::Text), _ => None, } } } /// Implements a RFC2822 serializer for serde (compatible with the original PHP implementation) mod metadata_rfc2822 { use chrono::{DateTime, FixedOffset}; use serde::{self, Deserialize, Deserializer, Serializer}; pub fn serialize<S>(date: &DateTime<FixedOffset>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let s = date.to_rfc2822(); serializer.serialize_str(&s) } pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<FixedOffset>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; DateTime::parse_from_rfc2822(&s).map_err(serde::de::Error::custom) } } /// The main metadata store for files. #[derive(Serialize, Deserialize)] pub struct FileMetadata { /// Date when this file was uploaded. #[serde(with = "metadata_rfc2822")] pub date: DateTime<FixedOffset>, // Mon, 11 Dec 2017 10:28:36 +0000" /// What kind of file this is. #[serde(rename = "type")] pub file_type: FileType, /// Specifies a URL to redirect to. /// If this is a URL filetype, this is required. pub url: Option<String>, /// The filename in which this exists on the filesystem. /// If this is a file filetype, this is required. pub filename: Option<String>, /// The original filename the user specified. /// If this is a file filetype, this is required. pub actual_filename: Option<String>, } impl FileMetadata { /// Parses some FileMetadata from a specified <root>/<name>.info.json file. pub fn from_path(root: &str, name: &str) -> Result<FileMetadata, String> { let mut path = PathBuf::from(root); path.push(name.to_string() + ".info.json"); if !path.exists() { return Err(format!("File {} doesn't exist!", name)); } let mut meta_file = match File::open(&path) { Ok(file) => file, Err(e) => return Err(format!("File {} couldn't be opened: {:?}", name, e)), }; let mut meta_string = String::new(); match meta_file.read_to_string(&mut meta_string) { Ok(_) => (), Err(e) => return Err(format!("File {} couldn't be read: {:?}", name, e)), } match serde_json::from_str(&meta_string) { Ok(meta) => Ok(meta), Err(e) => Err(format!("File {} couldn't be parsed: {:?}", name, e)), } } /// Creates a new FileMetadata for a generic file upload. pub fn new_from_file(filename: String, actual_filename: String) -> FileMetadata { FileMetadata { date: Local::now().with_timezone(&FixedOffset::east(0)), file_type: FileType::File, filename: Some(filename), actual_filename: Some(actual_filename), url: None, } } /// Creates a new FileMetadata for a text file upload. pub fn new_from_text(filename: String, actual_filename: String) -> FileMetadata { FileMetadata { date: Local::now().with_timezone(&FixedOffset::east(0)), file_type: FileType::Text, filename: Some(filename), actual_filename: Some(actual_filename), url: None, } } /// Creates a new FileMetadata for a url upload. pub fn new_from_url(url: String) -> FileMetadata { FileMetadata { date: Local::now().with_timezone(&FixedOffset::east(0)), file_type: FileType::Url, filename: None, actual_filename: None, url: Some(url), } } }
#![no_std] #![no_main] use core::{fmt::Write, panic::PanicInfo}; use esp32_hal::{ clock_control::{sleep, ClockControl, XTAL_FREQUENCY_AUTO}, dport::Split, dprintln, prelude::*, serial::{config::Config, Pins, Serial}, target, timer::Timer, }; const BLINK_HZ: Hertz = Hertz(2); #[entry] fn main() -> ! { let dp = target::Peripherals::take().expect("Failed to obtain Peripherals"); let (_, dport_clock_control) = dp.DPORT.split(); let clkcntrl = ClockControl::new( dp.RTCCNTL, dp.APB_CTRL, dport_clock_control, XTAL_FREQUENCY_AUTO, ) .unwrap(); let (clkcntrl_config, mut watchdog) = clkcntrl.freeze().unwrap(); watchdog.disable(); let (_, _, _, mut watchdog0) = Timer::new(dp.TIMG0, clkcntrl_config); let (_, _, _, mut watchdog1) = Timer::new(dp.TIMG1, clkcntrl_config); watchdog0.disable(); watchdog1.disable(); let pins = dp.GPIO.split(); let mut blinky = pins.gpio0.into_push_pull_output(); // Use UART1 as example: will cause dprintln statements not to be printed let mut serial: Serial<_, _, _> = Serial::new( dp.UART1, Pins { tx: pins.gpio1, rx: pins.gpio3, cts: None, rts: None, }, Config { // default configuration is 19200 baud, 8 data bits, 1 stop bit & no parity (8N1) baudrate: 115200.Hz(), ..Config::default() }, clkcntrl_config, ) .unwrap(); let (mut tx, mut rx) = serial.split(); writeln!(tx, "\n\nESP32 Started\n\n").unwrap(); // line will not be printed as using UART1 dprintln!("UART0\n"); loop { writeln!(tx, "Characters received: {:?}", rx.count()).unwrap(); while let Ok(x) = rx.read() { write!(tx, "{} ({:#x}) ", if x >= 32 { x as char } else { '?' }, x).unwrap() } writeln!(tx, "").unwrap(); blinky.set_high().unwrap(); sleep((Hertz(1_000_000) / BLINK_HZ).us()); blinky.set_low().unwrap(); sleep((Hertz(1_000_000) / BLINK_HZ).us()); } } #[panic_handler] fn panic(info: &PanicInfo) -> ! { dprintln!("\n\n*** {:?}", info); loop {} }
pub mod dijkstra; pub mod dir; pub mod grid; pub mod intcode; pub mod io; pub mod permutation;
extern crate libc; use std::ffi::CStr; use std::os::raw::c_char; use super::dapi::MarketQuote; use super::dapi::Bar; use super::dapi::DailyBar; use super::tapi::{Balance, AccountInfo, Order, Trade, Position, EntrustAction, OrderStatus, Side}; pub fn c_str_to_string(s : *const c_char) -> String { unsafe { if !s.is_null() { CStr::from_ptr(s).to_string_lossy().into_owned() } else { String::from("") } } } #[repr(C, packed)] pub struct CMarketQuote{ code : *const c_char, date : u32, time : u32, recv_time : u64, trading_day: u32, open : f64, high : f64, low : f64, close : f64, last : f64, high_limit : f64, low_limit : f64, pre_close : f64, volume : i64, turnover : f64, ask1 : f64, ask2 : f64, ask3 : f64, ask4 : f64, ask5 : f64, bid1 : f64, bid2 : f64, bid3 : f64, bid4 : f64, bid5 : f64, ask_vol1 : i64, ask_vol2 : i64, ask_vol3 : i64, ask_vol4 : i64, ask_vol5 : i64, bid_vol1 : i64, bid_vol2 : i64, bid_vol3 : i64, bid_vol4 : i64, bid_vol5 : i64, settle : f64, pre_settle : f64, oi : i64, pre_oi : i64 } impl CMarketQuote { pub fn to_rs(&self) -> MarketQuote { unsafe { MarketQuote { code : CStr::from_ptr(self.code).to_string_lossy().into_owned(), date : self.date, time : self.time, recv_time : self.recv_time, trading_day: self.trading_day, open : self.open, high : self.high, low : self.low,close : self.close, last : self.last, high_limit : self.high_limit, low_limit : self.low_limit, pre_close : self.pre_close, volume : self.volume, turnover : self.turnover, ask1 : self.ask1, ask2 : self.ask2, ask3 : self.ask3, ask4 : self.ask4, ask5 : self.ask5, ask_vol1 : self.ask_vol1, ask_vol2 : self.ask_vol2, ask_vol3 : self.ask_vol3, ask_vol4 : self.ask_vol4, ask_vol5 : self.ask_vol5, bid1 : self.bid1, bid2 : self.bid2, bid3 : self.bid3, bid4 : self.bid4, bid5 : self.bid5, bid_vol1 : self.bid_vol1, bid_vol2 : self.bid_vol2, bid_vol3 : self.bid_vol3, bid_vol4 : self.bid_vol4, bid_vol5 : self.bid_vol5, settle : self.settle, pre_settle : self.pre_settle, oi : self.oi, pre_oi : self.pre_oi } } } } #[repr(C, packed)] pub struct CBar { code : *const c_char, date : u32, time : u32, trading_day: u32, open : f64, high : f64, low : f64, close : f64, volume : i64, turnover : f64, oi : i64 } impl CBar { pub fn to_rs(&self) -> Bar{ unsafe { Bar { code: CStr::from_ptr(self.code).to_string_lossy().into_owned(), date : self.date, time : self.time, trading_day: self.trading_day, open : self.open, high : self.high, low : self.low, close : self.close, volume : self.volume, turnover : self.turnover, oi : self.oi } } } } #[repr(C, packed)] pub struct CDailyBar { code : *const c_char, trading_day: u32, open : f64, high : f64, low : f64, close : f64, volume : i64, turnover : f64, oi : i64, settle : f64, pre_close : f64, pre_settle : f64, af : f64 } impl CDailyBar { pub fn to_rs(&self) -> DailyBar{ unsafe { DailyBar { code: CStr::from_ptr(self.code).to_string_lossy().into_owned(), trading_day: self.trading_day, open : self.open, high : self.high, low : self.low, close : self.close, volume : self.volume, turnover : self.turnover, oi : self.oi, settle : self.settle, pre_close : self.pre_close, pre_settle : self.pre_settle, af : self.af } } } } #[repr(C, packed)] pub struct GetTicksResult { _data : *mut libc::c_void, pub ticks : *mut CMarketQuote, pub ticks_length : i32, pub element_size : i32, pub msg : *const c_char, } #[repr(C, packed)] pub struct GetBarsResult { _data : *mut libc::c_void, pub ticks : *mut CBar, pub ticks_length : i32, pub element_size : i32, pub msg : *const c_char, } #[repr(C, packed)] pub struct GetDailyBarResult { _data : *mut libc::c_void, pub ticks : *mut CDailyBar, pub ticks_length : i32, pub element_size : i32, pub msg : *const c_char, } #[repr(C, packed)] pub struct GetQuoteResult { _data : *mut libc::c_void, pub quote : *mut CMarketQuote, pub msg : *const c_char, } #[repr(C, packed)] pub struct SubscribeResult { _data : *mut libc::c_void, pub codes : *const c_char, pub msg : *const c_char, } #[repr(C, packed)] pub struct UnSubscribeResult { _data : *mut libc::c_void, pub codes : *const c_char, pub msg : *const c_char, } #[repr(C, packed)] pub struct CDataApiCallback { pub obj : *mut libc::c_void, pub on_quote : extern "C" fn(obj : *mut libc::c_void, quote : *mut CMarketQuote ), pub on_bar : extern "C" fn(obj : *mut libc::c_void, cycle : *mut c_char, bar : *mut CBar ) } pub enum CDataApi { } #[link(name = "tqapi-static", kind = "static")] extern "C" { pub fn tqapi_create_data_api(addr : *const c_char) -> *mut CDataApi; pub fn tqapi_free_data_api (dapi : *mut CDataApi); pub fn tqapi_dapi_get_ticks (dapi : *mut CDataApi, code : *const c_char, trading_day : u32, number: i32) -> *mut GetTicksResult; pub fn tqapi_dapi_free_get_ticks_result (dapi : *mut CDataApi, result : *mut GetTicksResult); pub fn tqapi_dapi_get_bars (dapi : *mut CDataApi, code : *const c_char, cycle : *const c_char, trading_day: u32, align: i32, number: i32) -> *mut GetBarsResult; pub fn tqapi_dapi_free_get_bars_result (dapi : *mut CDataApi, result : *mut GetBarsResult); pub fn tqapi_dapi_get_dailybars (dapi : *mut CDataApi, code : *const c_char, price_adj : *const c_char, align: i32, number: i32) -> *mut GetDailyBarResult; pub fn tqapi_dapi_free_get_dailybars_result(dapi : *mut CDataApi, result : *mut GetDailyBarResult); pub fn tqapi_dapi_get_quote (dapi : *mut CDataApi, code : *const c_char) -> *mut GetQuoteResult; pub fn tqapi_dapi_free_get_quote_result (dapi : *mut CDataApi, result : *mut GetQuoteResult); pub fn tqapi_dapi_subscribe (dapi: *mut CDataApi, codes : *const c_char) -> *mut SubscribeResult; pub fn tqapi_dapi_free_subscribe_result (dapi: *mut CDataApi, result : *mut SubscribeResult); pub fn tqapi_dapi_unsubscribe (dapi: *mut CDataApi, codes : *const c_char) -> *mut UnSubscribeResult; pub fn tqapi_dapi_free_unsubscribe_result (dapi: *mut CDataApi, result : *mut UnSubscribeResult); pub fn tqapi_dapi_set_callback (dapi: *mut CDataApi, callback : *mut CDataApiCallback) -> * mut CDataApiCallback; } // TradeApi #[repr(C, packed)] pub struct CAccountInfo { pub account_id : *const c_char, // 帐号编号 pub broker : *const c_char, // 交易商名称,如招商证券 pub account : *const c_char, // 交易帐号 pub status : *const c_char, // 连接状态,取值 Disconnected, Connected, Connecting pub msg : *const c_char, // 状态信息,如登录失败原因 pub account_type : *const c_char, // 帐号类型,如 stock, ctp } impl CAccountInfo { pub fn to_rs(&self) -> AccountInfo { AccountInfo{ account_id : c_str_to_string(self.account_id), broker : c_str_to_string(self.broker), account : c_str_to_string(self.account), status : c_str_to_string(self.status), msg : c_str_to_string(self.msg), account_type : c_str_to_string(self.account_type) } } } #[repr(C, packed)] pub struct CBalance { pub account_id : *const c_char, pub fund_account : *const c_char, pub init_balance : f64, pub enable_balance : f64, pub margin : f64, pub float_pnl : f64, pub close_pnl : f64, } impl CBalance { pub fn to_rs(&self) -> Balance { Balance{ account_id : c_str_to_string(self.account_id), fund_account : c_str_to_string(self.fund_account), init_balance : self.init_balance, enable_balance : self.enable_balance, margin : self.margin, float_pnl : self.float_pnl, close_pnl : self.close_pnl, } } } #[repr(C, packed)] pub struct COrder { pub account_id : *const c_char, // 帐号编号 pub code : *const c_char, // 证券代码 pub name : *const c_char, // 证券名称 pub entrust_no : *const c_char, // 委托编号 pub entrust_action : *const c_char, // 委托动作 pub entrust_price : f64, // 委托价格 pub entrust_size : i64, // 委托数量,单位:股 pub entrust_date : u32, // 委托日期 pub entrust_time : u32, // 委托时间 pub fill_price : f64, // 成交价格 pub fill_size : i64, // 成交数量 pub status : *const c_char, // 订单状态:取值: OrderStatus pub status_msg : *const c_char, // 状态消息 pub order_id : i32 // 自定义订单编号 } impl COrder { pub fn to_rs(&self) -> Order { unsafe { Order { account_id : c_str_to_string( self.account_id ) , code : c_str_to_string( self.code ) , name : c_str_to_string( self.name ) , entrust_no : c_str_to_string( self.entrust_no ) , entrust_action : EntrustAction::from(CStr::from_ptr(self.entrust_action).to_str().unwrap()), entrust_price : self.entrust_price , entrust_size : self.entrust_size , entrust_date : self.entrust_date , entrust_time : self.entrust_time , fill_price : self.fill_price , fill_size : self.fill_size , status : OrderStatus::from(CStr::from_ptr(self.status).to_str().unwrap()), status_msg : c_str_to_string( self.status_msg ) , order_id : self.order_id , } } } } #[repr(C, packed)] pub struct CTrade { pub account_id : *const c_char, // 帐号编号 pub code : *const c_char, // 证券代码 pub name : *const c_char, // 证券名称 pub entrust_no : *const c_char, // 委托编号 pub entrust_action : *const c_char, // 委托动作 pub fill_no : *const c_char, // 成交编号 pub fill_size : i64, // 成交数量 pub fill_price : f64, // 成交价格 pub fill_date : u32, // 成交日期 pub fill_time : u32, // 成交时间 pub order_id : i32, // 自定义订单编号 } impl CTrade { pub fn to_rs(&self) -> Trade { unsafe { Trade { account_id : c_str_to_string(self.account_id) , code : c_str_to_string(self.code ) , name : c_str_to_string(self.name ) , entrust_no : c_str_to_string(self.entrust_no) , entrust_action : EntrustAction::from(CStr::from_ptr(self.entrust_action).to_str().unwrap()) , fill_no : c_str_to_string(self.fill_no) , fill_size : self.fill_size , fill_price : self.fill_price , fill_date : self.fill_date , fill_time : self.fill_time , order_id : self.order_id , } } } } #[repr(C, packed)] pub struct CPosition { pub account_id : *const c_char, // 帐号编号 pub code : *const c_char, // 证券代码 pub name : *const c_char, // 证券名称 pub current_size : i64, // 当前持仓 pub enable_size : i64, // 可用(可交易)持仓 pub init_size : i64, // 初始持仓 pub today_size : i64, // 今日持仓 pub frozen_size : i64, // 冻结持仓 pub side : *const c_char, // 持仓方向,股票的持仓方向为 Long, 期货分 Long, Short pub cost : f64, // 成本 pub cost_price : f64, // 成本价格 pub last_price : f64, // 最新价格 pub float_pnl : f64, // 持仓盈亏 pub close_pnl : f64, // 平仓盈亏 pub margin : f64, // 保证金 pub commission : f64, // 手续费 } impl CPosition { pub fn to_rs(&self) -> Position { unsafe { Position { account_id : c_str_to_string(self.account_id) , code : c_str_to_string(self.code ) , name : c_str_to_string(self.name ) , current_size : self.current_size , enable_size : self.enable_size , init_size : self.init_size , today_size : self.today_size , frozen_size : self.frozen_size , side : Side::from(CStr::from_ptr(self.side).to_str().unwrap()), cost : self.cost , cost_price : self.cost_price , last_price : self.last_price , float_pnl : self.float_pnl , close_pnl : self.close_pnl , margin : self.margin , commission : self.commission , } } } } #[repr(C, packed)] pub struct COrderId { pub entrust_no : *const c_char, // 订单委托号 pub order_id : i32 // 自定义编号 } #[repr(C, packed)] pub struct CTradeApiCallback { pub obj : *mut libc::c_void, pub on_trade : extern "C" fn(obj : *mut libc::c_void, trade : *mut CTrade ), pub on_order : extern "C" fn(obj : *mut libc::c_void, order : *mut COrder ), pub on_account_status : extern "C" fn(obj : *mut libc::c_void, status: *mut CAccountInfo ), } #[repr(C, packed)] pub struct CNewOrder { pub action : *const c_char, pub code : *const c_char, pub size : i64, pub price : f64, pub order_type : *const c_char, pub order_id : i32 } #[repr(C, packed)] pub struct CPlaceOrderResult { pub _data : *mut libc::c_void, pub order_id : *mut COrderId, pub msg : *const c_char, } #[repr(C, packed)] pub struct CQueryPositionsResult { pub _data : *mut libc::c_void, pub array : *mut CPosition, pub array_size : i32, pub element_size : i32, pub msg : *const c_char, } #[repr(C, packed)] pub struct CQueryTradesResult { pub _data : *mut libc::c_void, pub array : *mut CTrade, pub array_size : i32, pub element_size : i32, pub msg : *const c_char, } #[repr(C, packed)] pub struct CQueryOrdersResult { pub _data : *mut libc::c_void, pub array : *mut COrder, pub array_size : i32, pub element_size : i32, pub msg : *const c_char, } #[repr(C, packed)] pub struct CQueryBalanceResult { pub _data : *mut libc::c_void, pub balance : *mut CBalance, pub msg : *const c_char, } #[repr(C, packed)] pub struct CQueryAccountsResult { pub _data : *mut libc::c_void, pub array : *mut CAccountInfo, pub array_size : i32, pub element_size : i32, pub msg : *const c_char, } #[repr(C, packed)] pub struct CCancelOrderResult { pub _data : *mut libc::c_void, pub success : i32, // bool pub msg : *const c_char, } #[repr(C, packed)] pub struct CQueryResult { pub _data : *mut libc::c_void, pub text : *const c_char, pub msg : *const c_char, } pub enum CTradeApi { } //#[link(name = "tqapi")]//, kind = "static")] extern "C" { pub fn tqapi_create_trade_api(addr : *const c_char) -> *mut CTradeApi; pub fn tqapi_free_trade_api (dapi : *mut CTradeApi); pub fn tqapi_tapi_place_order (tapi : *mut CTradeApi, account_id : *const c_char, order : *mut CNewOrder) -> *mut CPlaceOrderResult; pub fn tqapi_tapi_cancel_order (tapi : *mut CTradeApi, account_id : *const c_char, code : *const c_char, oid : *mut COrderId) -> *mut CCancelOrderResult; pub fn tqapi_tapi_query_balance (tapi : *mut CTradeApi, account_id : *const c_char) ->*mut CQueryBalanceResult; pub fn tqapi_tapi_query_positions (tapi : *mut CTradeApi, account_id : *const c_char, codes: *const c_char) -> *mut CQueryPositionsResult; pub fn tqapi_tapi_query_orders (tapi : *mut CTradeApi, account_id : *const c_char, codes: *const c_char) -> *mut CQueryOrdersResult; pub fn tqapi_tapi_query_trades (tapi : *mut CTradeApi, account_id : *const c_char, codes: *const c_char) -> *mut CQueryTradesResult; pub fn tqapi_tapi_query (tapi : *mut CTradeApi, account_id : *const c_char, command: *const c_char, params: *const c_char) -> *mut CQueryResult; pub fn tqapi_tapi_query_accounts (tapi : *mut CTradeApi) -> *mut CQueryAccountsResult; pub fn tqapi_tapi_free_place_order_result (tapi : *mut CTradeApi, result : *mut CPlaceOrderResult); pub fn tqapi_tapi_free_cancel_order_result (tapi : *mut CTradeApi, result : *mut CCancelOrderResult); pub fn tqapi_tapi_free_query_accounts_result (tapi : *mut CTradeApi, result : *mut CQueryAccountsResult); pub fn tqapi_tapi_free_query_balance_result (tapi : *mut CTradeApi, result : *mut CQueryBalanceResult); pub fn tqapi_tapi_free_query_positions_result (tapi : *mut CTradeApi, result : *mut CQueryPositionsResult); pub fn tqapi_tapi_free_query_orders_result (tapi : *mut CTradeApi, result : *mut CQueryOrdersResult); pub fn tqapi_tapi_free_query_trades_result (tapi : *mut CTradeApi, result : *mut CQueryTradesResult); pub fn tqapi_tapi_free_query_result (tapi : *mut CTradeApi, result : *mut CQueryResult); pub fn tqapi_tapi_set_callback (tapi : *mut CTradeApi, callback : *mut CTradeApiCallback) -> * mut CTradeApiCallback; }
use crate::aabb::*; use crate::mesh::*; use crate::picture::*; use crate::zbuffer::*; use std::f32::consts::PI; use std::time::{Duration, Instant}; #[derive(Debug)] pub struct RenderOptions { pub view_pos: Vec3, pub light_pos: Vec3, pub light_color: Vec3, pub ambient_color: Vec3, pub model_color: Vec3, pub grid_color: Vec3, pub background_color: Vec4, pub zoom: f32, pub grid_visible: bool, pub draw_size_hint: bool, } impl Default for RenderOptions { fn default() -> Self { Self { view_pos: Vec3::new(-1.0, 1.0, -1.0).normalize(), light_pos: Vec3::new(-1.0, 0.5, -0.5), light_color: Vec3::new(0.6, 0.6, 0.6), ambient_color: Vec3::new(0.4, 0.4, 0.4), model_color: Vec3::new(0.0, 0.45, 1.0), grid_color: Vec3::new(0.1, 0.1, 0.1), background_color: Vec4::new(1.0, 1.0, 1.0, 1.0), grid_visible: true, zoom: 1.0, draw_size_hint: true, } } } #[derive(Debug)] pub struct RasterBackend { pub render_options: RenderOptions, width: u32, height: u32, aspect_ratio: f32, } impl RasterBackend { pub fn new(width: u32, height: u32) -> Self { Self { render_options: RenderOptions::default(), width, height, aspect_ratio: width as f32 / height as f32, } } fn view_projection(&self, zoom: f32) -> Mat4 { // calculate view projection matrix let proj = glm::ortho( zoom * 0.5 * self.aspect_ratio, -zoom * 0.5 * self.aspect_ratio, -zoom * 0.5, zoom * 0.5, 0.0, 1.0, ); let view = glm::look_at( &self.render_options.view_pos, &Vec3::new(0.0, 0.0, 0.0), &Vec3::new(0.0, 0.0, -1.0), ); proj * view } pub fn fit_mesh_scale(&self, mesh: impl IntoIterator<Item = Triangle> + Copy) -> (AABB, f32) { let aabb = AABB::from_iterable(mesh); let vp = self.view_projection(1.0); // scale the model such that is fills the entire canvas (aabb, scale_for_unitsize(&vp, &aabb)) } pub fn render( &self, mesh: impl IntoIterator<Item = Triangle> + Copy, model_scale: f32, aabb: &AABB, timeout: Option<Duration>, ) -> Picture { let start_time = Instant::now(); let mut pic = Picture::new(self.width, self.height); let mut zbuf = ZBuffer::new(self.width, self.height); let mut scaled_aabb = *aabb; pic.fill(&(&self.render_options.background_color).into()); let vp = self.view_projection(self.render_options.zoom); // calculate transforms taking the new model scale into account let model = Mat4::identity() .append_translation(&-aabb.center()) .append_scaling(model_scale); let mvp = vp * model; // let the AABB match the transformed model scaled_aabb.apply_transform(&model); // eye normal pointing towards the camera in world space let eye_normal = self.render_options.view_pos.normalize(); // grid in x and y direction if self.render_options.grid_visible { draw_grid( &mut pic, &vp, scaled_aabb.lower.z, &self.render_options.grid_color, aabb.size(), model_scale, ); draw_grid( &mut pic, &(vp * glm::rotation(PI / 2.0, &Vec3::new(0.0, 0.0, 1.0))), scaled_aabb.lower.z, &self.render_options.grid_color, aabb.size(), model_scale, ); } for t in mesh { // timed out? if let Some(timeout) = timeout { let dt = Instant::now() - start_time; if dt > timeout { // abort println!("... timeout!"); return pic; } } let normal = -t.normal; // backface culling if glm::dot(&eye_normal, &normal) < 0.0 { continue; } let v = &t.vertices; let v0 = matmul(&mvp, &v[0]); let v1 = matmul(&mvp, &v[1]); let v2 = matmul(&mvp, &v[2]); let v0m = matmul(&model, &v[0]); let v1m = matmul(&model, &v[1]); let v2m = matmul(&model, &v[2]); // triangle bounding box let min_x = v0.x.min(v1.x).min(v2.x); let min_y = v0.y.min(v1.y).min(v2.y); let max_x = v0.x.max(v1.x).max(v2.x); let max_y = v0.y.max(v1.y).max(v2.y); // triangle bounding box in screen space let smin_x = 0.max(((min_x + 1.0) / 2.0 * pic.width() as f32) as u32); let smin_y = 0.max(((min_y + 1.0) / 2.0 * pic.height() as f32) as u32); let smax_x = 0.max(pic.width().min(((max_x + 1.0) / 2.0 * pic.width() as f32) as u32)); let smax_y = 0.max(pic.height().min(((max_y + 1.0) / 2.0 * pic.height() as f32) as u32)); for y in smin_y..=smax_y { for x in smin_x..=smax_x { // normalized screen coordinates [-1,1] let nx = 2.0 * ((x as f32 / pic.width() as f32) - 0.5); let ny = 2.0 * ((y as f32 / pic.height() as f32) - 0.5); let p = Vec2::new(nx, ny); let p0 = v0.xy(); let p1 = v1.xy(); let p2 = v2.xy(); let inside = edge_fn(&p, &p0, &p1) <= 0.0 && edge_fn(&p, &p1, &p2) <= 0.0 && edge_fn(&p, &p2, &p0) <= 0.0; if inside { // calculate barycentric coordinates let area = edge_fn(&p0, &p1, &p2); let w0 = edge_fn(&p1, &p2, &p) / area; let w1 = edge_fn(&p2, &p0, &p) / area; let w2 = edge_fn(&p0, &p1, &p) / area; // fragment position in screen space let frag_pos = Vec3::new( w0 * v0.x + w1 * v1.x + w2 * v2.x, w0 * v0.y + w1 * v1.y + w2 * v2.y, w0 * v0.z + w1 * v1.z + w2 * v2.z, ); // fragment position in world space let fp = Vec3::new( w0 * v0m.x + w1 * v1m.x + w2 * v2m.x, w0 * v0m.y + w1 * v1m.y + w2 * v2m.y, w0 * v0m.z + w1 * v1m.z + w2 * v2m.z, ); //let fp = matmul(&mvp_inv, &frag_pos); if zbuf.test_and_set(x, y, frag_pos.z) { // calculate lightning let light_normal = (self.render_options.light_pos - fp).normalize(); // normal frag pos to light (world space) let view_normal = (self.render_options.view_pos - fp).normalize(); // normal frag pos to view (world space) let reflect_dir = glm::reflect_vec(&-light_normal, &normal); // diffuse let diff_color = glm::dot(&normal, &light_normal).max(0.0) * self.render_options.light_color * 1.0; // specular let spec_color = (glm::dot(&view_normal, &reflect_dir).powf(16.0) * 0.7) * self.render_options.light_color; // merge let mut color = self.render_options.ambient_color + diff_color + spec_color; color.x *= self.render_options.model_color.x; color.y *= self.render_options.model_color.y; color.z *= self.render_options.model_color.z; pic.set(x, y, &(color.x, color.y, color.z, 1.0).into()); } } } } } if self.render_options.draw_size_hint { let margin = 3; let text_to_height_ratio = 16; let text = format!( "{}x{}x{}", aabb.size().x as i32, aabb.size().y as i32, aabb.size().z as i32 ); let text_size = pic.height() / text_to_height_ratio; pic.fill_rect( 0, pic.height() as i32 - (text_size + margin * 2) as i32, pic.width() as i32, pic.height() as i32, &"333333FF".into(), ); pic.stroke_string( margin, pic.height() - text_size - margin, &text, text_size as f32, &"FFFFFFFF".into(), ); } pic } } fn edge_fn(a: &Vec2, b: &Vec2, c: &Vec2) -> f32 { (c.x - a.x) * (b.y - a.y) - (c.y - a.y) * (b.x - a.x) } fn scale_for_unitsize(mvp: &Mat4, aabb: &AABB) -> f32 { let edges = [ matmul(&mvp, &Vec3::new(aabb.lower.x, aabb.lower.y, aabb.lower.z)), matmul(&mvp, &Vec3::new(aabb.upper.x, aabb.lower.y, aabb.lower.z)), matmul(&mvp, &Vec3::new(aabb.lower.x, aabb.upper.y, aabb.lower.z)), matmul(&mvp, &Vec3::new(aabb.upper.x, aabb.upper.y, aabb.lower.z)), matmul(&mvp, &Vec3::new(aabb.lower.x, aabb.lower.y, aabb.upper.z)), matmul(&mvp, &Vec3::new(aabb.upper.x, aabb.lower.y, aabb.upper.z)), matmul(&mvp, &Vec3::new(aabb.lower.x, aabb.upper.y, aabb.upper.z)), matmul(&mvp, &Vec3::new(aabb.upper.x, aabb.upper.y, aabb.upper.z)), ]; let mut min = Vec3::new(f32::MAX, f32::MAX, f32::MAX); let mut max = Vec3::new(f32::MIN, f32::MIN, f32::MIN); for e in &edges { min.x = min.x.min(e.x); min.y = min.y.min(e.y); max.x = max.x.max(e.x); max.y = max.y.max(e.y); } 1.0 / ((f32::abs(max.x - min.x)).max(f32::abs(max.y - min.y)) / 2.0) } fn draw_grid(pic: &mut Picture, vp: &Mat4, z: f32, color: &Vec3, model_size: Vec3, scale: f32) { // draw grid let max_xy = model_size.x.max(model_size.y); let grid_color = (color.x, color.y, color.z, 1.0).into(); let grid_size = 10.0; // mm let grid_count = ((max_xy * scale) / scale / grid_size + 1.0) as i32; let grid_spacing = grid_size * scale as f32; let ox = grid_count as f32 * grid_spacing / 2.0; for x in 0..=grid_count { let p0 = Vec3::new(grid_spacing * x as f32 - ox, grid_count as f32 * grid_spacing * 0.5, z); let p1 = Vec3::new(p0.x, -grid_count as f32 * grid_spacing * 0.5, z); // to screen space let sp0 = matmul(&vp, &p0).xy(); let sp1 = matmul(&vp, &p1).xy(); pic.thick_line( ((sp0.x + 1.0) / 2.0 * pic.width() as f32) as i32, ((sp0.y + 1.0) / 2.0 * pic.height() as f32) as i32, ((sp1.x + 1.0) / 2.0 * pic.width() as f32) as i32, ((sp1.y + 1.0) / 2.0 * pic.height() as f32) as i32, &grid_color, 1.0, ); } }
// Copyright 2019 The Grin Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Utilities to check the status of all the outputs we have stored in //! the wallet storage and update them. use std::collections::{HashMap, HashSet}; use uuid::Uuid; use crate::error::{Error, ErrorKind}; use crate::grin_core::consensus::reward; use crate::grin_core::core::{Output, TxKernel}; use crate::grin_core::global; use crate::grin_core::libtx::proof::ProofBuilder; use crate::grin_core::libtx::reward; use crate::grin_keychain::{Identifier, Keychain, SwitchCommitmentType}; use crate::grin_util as util; use crate::grin_util::secp::key::SecretKey; use crate::grin_util::secp::pedersen; use crate::internal::keys; use crate::types::{ NodeClient, OutputData, OutputStatus, TxLogEntry, TxLogEntryType, WalletBackend, WalletInfo, }; use crate::{BlockFees, CbData, OutputCommitMapping}; /// Retrieve all of the outputs (doesn't attempt to update from node) pub fn retrieve_outputs<'a, T: ?Sized, C, K>( wallet: &mut T, keychain_mask: Option<&SecretKey>, show_spent: bool, tx: Option<&TxLogEntry>, parent_key_id: &Identifier, pagination_start: Option<u32>, pagination_len: Option<u32>, ) -> Result<Vec<OutputCommitMapping>, Error> where T: WalletBackend<'a, C, K>, C: NodeClient + 'a, K: Keychain + 'a, { // just read the wallet here, no need for a write lock let mut outputs = wallet .iter() .filter(|out| show_spent || out.status != OutputStatus::Spent) .collect::<Vec<_>>(); // only include outputs with a given tx_id if provided if let Some(tx) = tx { let mut tx_commits: HashSet<String> = HashSet::new(); tx_commits.extend(tx.input_commits.iter().map(|c| util::to_hex(&c.0))); tx_commits.extend(tx.output_commits.iter().map(|c| util::to_hex(&c.0))); outputs = outputs .into_iter() .filter(|out| { if tx_commits.is_empty() { out.tx_log_entry == Some(tx.id) } else { tx_commits.contains(&out.commit.clone().unwrap_or(String::from("?"))) } }) .collect::<Vec<_>>(); } outputs = outputs .iter() .filter(|o| o.root_key_id == *parent_key_id) .cloned() .collect(); outputs.sort_by_key(|out| out.n_child); let keychain = wallet.keychain(keychain_mask)?; // Key: tx_log id; Value: true if active, false if cancelled let tx_log_is_active: HashMap<u32, bool> = wallet .tx_log_iter() .filter(|tx_log| tx_log.parent_key_id == *parent_key_id) .map(|tx_log| (tx_log.id, !tx_log.is_cancelled())) .collect(); let mut res: Vec<OutputCommitMapping> = Vec::new(); for out in outputs { // Filtering out Unconfirmed from cancelled (not active) transactions if out.status == OutputStatus::Unconfirmed && !tx_log_is_active .get(&out.tx_log_entry.clone().unwrap_or(std::u32::MAX)) .unwrap_or(&true) { continue; } let commit = match out.commit.clone() { Some(c) => pedersen::Commitment::from_vec(util::from_hex(&c).map_err(|e| { ErrorKind::GenericError(format!("Unable to parse HEX commit {}, {}", c, e)) })?), None => keychain // TODO: proper support for different switch commitment schemes .commit(out.value, &out.key_id, SwitchCommitmentType::Regular)?, }; res.push(OutputCommitMapping { output: out, commit, }); } if pagination_len.is_some() || pagination_start.is_some() { let pag_len = pagination_len.unwrap_or(res.len() as u32); let pagination_start = pagination_start.unwrap_or(0); let mut pag_vec = Vec::new(); let mut pre_count = 0; let mut count = 0; for n in res { if pre_count >= pagination_start { pag_vec.push(n); count = count + 1; if count == pag_len { break; } } pre_count = pre_count + 1; } Ok(pag_vec) } else { Ok(res) } } /// Retrieve all of the transaction entries, or a particular entry /// if `parent_key_id` is set, only return entries from that key pub fn retrieve_txs<'a, T: ?Sized, C, K>( wallet: &mut T, _keychain_mask: Option<&SecretKey>, tx_id: Option<u32>, tx_slate_id: Option<Uuid>, parent_key_id: Option<&Identifier>, outstanding_only: bool, pagination_start: Option<u32>, pagination_len: Option<u32>, ) -> Result<Vec<TxLogEntry>, Error> where T: WalletBackend<'a, C, K>, C: NodeClient + 'a, K: Keychain + 'a, { let mut txs: Vec<TxLogEntry> = wallet .tx_log_iter() .filter(|tx_entry| { let f_pk = match parent_key_id { Some(k) => tx_entry.parent_key_id == *k, None => true, }; let f_tx_id = match tx_id { Some(i) => tx_entry.id == i, None => true, }; let f_txs = match tx_slate_id { Some(t) => tx_entry.tx_slate_id == Some(t), None => true, }; let f_outstanding = match outstanding_only { true => { !tx_entry.confirmed && (tx_entry.tx_type == TxLogEntryType::TxReceived || tx_entry.tx_type == TxLogEntryType::TxSent) } false => true, }; // Miners doesn't like the fact that CoinBase tx can be unconfirmed. That is we are hiding them fir Rest API and for UI let non_confirmed_coinbase = !tx_entry.confirmed && (tx_entry.tx_type == TxLogEntryType::ConfirmedCoinbase); f_pk && f_tx_id && f_txs && f_outstanding && !non_confirmed_coinbase }) .collect(); txs.sort_by_key(|tx| tx.creation_ts); if pagination_start.is_some() || pagination_len.is_some() { let pag_len = pagination_len.unwrap_or(txs.len() as u32); let mut pag_txs: Vec<TxLogEntry> = Vec::new(); let mut pre_count = 0; let mut count = 0; let pagination_start = pagination_start.unwrap_or(0); for tx in txs { if pre_count >= pagination_start { pag_txs.push(tx); count = count + 1; if count == pag_len { break; } } pre_count = pre_count + 1; } Ok(pag_txs) } else { Ok(txs) } } /// Cancel transaction and associated outputs pub fn cancel_tx_and_outputs<'a, T: ?Sized, C, K>( wallet: &mut T, keychain_mask: Option<&SecretKey>, tx: TxLogEntry, outputs: Vec<OutputData>, parent_key_id: &Identifier, ) -> Result<(), Error> where T: WalletBackend<'a, C, K>, C: NodeClient + 'a, K: Keychain + 'a, { let mut batch = wallet.batch(keychain_mask)?; for mut o in outputs { // unlock locked outputs //if o.status == OutputStatus::Unconfirmed { WMC don't delete outputs, we want to keep them mapped to cancelled trasactions // batch.delete(&o.key_id, &o.mmr_index)?; //} if o.status == OutputStatus::Locked { o.status = OutputStatus::Unspent; batch.save(o)?; } } let mut tx = tx; if tx.tx_type == TxLogEntryType::TxSent { tx.tx_type = TxLogEntryType::TxSentCancelled; } if tx.tx_type == TxLogEntryType::TxReceived { tx.tx_type = TxLogEntryType::TxReceivedCancelled; } batch.save_tx_log_entry(tx, parent_key_id)?; batch.commit()?; Ok(()) } /// Retrieve summary info about the wallet /// caller should refresh first if desired pub fn retrieve_info<'a, T: ?Sized, C, K>( wallet: &mut T, parent_key_id: &Identifier, minimum_confirmations: u64, ) -> Result<WalletInfo, Error> where T: WalletBackend<'a, C, K>, C: NodeClient + 'a, K: Keychain + 'a, { let current_height = wallet.last_confirmed_height()?; println!("updater: the current_height is {}", current_height); let outputs = wallet .iter() .filter(|out| out.root_key_id == *parent_key_id); // Key: tx_log id; Value: true if active, false if cancelled let tx_log_cancellation_status: HashMap<u32, bool> = wallet .tx_log_iter() .filter(|tx_log| tx_log.parent_key_id == *parent_key_id) .map(|tx_log| (tx_log.id, !tx_log.is_cancelled())) .collect(); let mut unspent_total = 0; let mut immature_total = 0; let mut awaiting_finalization_total = 0; let mut unconfirmed_total = 0; let mut locked_total = 0; for out in outputs { match out.status { OutputStatus::Unspent => { if out.is_coinbase && out.lock_height > current_height { immature_total += out.value; } else if out.num_confirmations(current_height) < minimum_confirmations { // Treat anything less than minimum confirmations as "unconfirmed". unconfirmed_total += out.value; } else { unspent_total += out.value; } } OutputStatus::Unconfirmed => { // We ignore unconfirmed coinbase outputs completely. if let Some(tx_log_id) = out.tx_log_entry { if !tx_log_cancellation_status.get(&tx_log_id).unwrap_or(&true) { continue; } } if !out.is_coinbase { if minimum_confirmations == 0 { unconfirmed_total += out.value; } else { awaiting_finalization_total += out.value; } } } OutputStatus::Locked => { locked_total += out.value; } OutputStatus::Spent => {} } } Ok(WalletInfo { last_confirmed_height: current_height, minimum_confirmations, total: unspent_total + unconfirmed_total + immature_total, amount_awaiting_finalization: awaiting_finalization_total, amount_awaiting_confirmation: unconfirmed_total, amount_immature: immature_total, amount_locked: locked_total, amount_currently_spendable: unspent_total, }) } /// Build a coinbase output and insert into wallet pub fn build_coinbase<'a, T: ?Sized, C, K>( wallet: &mut T, keychain_mask: Option<&SecretKey>, block_fees: &BlockFees, test_mode: bool, ) -> Result<CbData, Error> where T: WalletBackend<'a, C, K>, C: NodeClient + 'a, K: Keychain + 'a, { let (out, kern, block_fees) = receive_coinbase(wallet, keychain_mask, block_fees, test_mode)?; Ok(CbData { output: out, kernel: kern, key_id: block_fees.key_id, }) } //TODO: Split up the output creation and the wallet insertion /// Build a coinbase output and the corresponding kernel pub fn receive_coinbase<'a, T: ?Sized, C, K>( wallet: &mut T, keychain_mask: Option<&SecretKey>, block_fees: &BlockFees, test_mode: bool, ) -> Result<(Output, TxKernel, BlockFees), Error> where T: WalletBackend<'a, C, K>, C: NodeClient + 'a, K: Keychain + 'a, { let height = block_fees.height; let lock_height = height + global::coinbase_maturity(); let key_id = block_fees.key_id(); let parent_key_id = wallet.parent_key_id(); let key_id = match key_id { Some(key_id) => match keys::retrieve_existing_key(wallet, key_id, None) { Ok(k) => k.0, Err(_) => keys::next_available_key(wallet, keychain_mask)?, }, None => keys::next_available_key(wallet, keychain_mask)?, }; { // Now acquire the wallet lock and write the new output. let amount = reward(block_fees.fees, height); let commit = wallet.calc_commit_for_cache(keychain_mask, amount, &key_id)?; let mut batch = wallet.batch(keychain_mask)?; batch.save(OutputData { root_key_id: parent_key_id, key_id: key_id.clone(), n_child: key_id.to_path().last_path_index(), mmr_index: None, commit: commit, value: amount, status: OutputStatus::Unconfirmed, height: height, lock_height: lock_height, is_coinbase: true, tx_log_entry: None, })?; batch.commit()?; } debug!( "receive_coinbase: built candidate output - {:?}, {}", key_id.clone(), key_id, ); let mut block_fees = block_fees.clone(); block_fees.key_id = Some(key_id.clone()); debug!("receive_coinbase: {:?}", block_fees); let keychain = wallet.keychain(keychain_mask)?; let (out, kern) = reward::output( &keychain, &ProofBuilder::new(&keychain), &key_id, block_fees.fees, test_mode, height, )?; Ok((out, kern, block_fees)) }
use chrono::{DateTime, Utc}; use serde::{self, Deserialize, Serialize}; use std::collections::HashMap; use std::path::PathBuf; #[derive(Serialize, Deserialize, Debug)] pub(crate) struct Opcache { pub opcache_enabled: bool, pub cache_full: bool, pub restart_pending: bool, pub restart_in_progress: bool, pub memory_usage: MemoryUsage, pub interned_strings_usage: InternedStringsUsage, pub opcache_statistics: OpcacheStatistics, pub scripts: HashMap<PathBuf, Scripts>, } #[derive(Serialize, Deserialize, Debug)] pub struct MemoryUsage { pub used_memory: isize, pub free_memory: isize, pub wasted_memory: isize, pub current_wasted_percentage: f64, } #[derive(Serialize, Deserialize, Debug)] pub struct InternedStringsUsage { pub buffer_size: isize, pub used_memory: isize, pub free_memory: isize, pub number_of_strings: isize, } #[derive(Serialize, Deserialize, Debug)] pub struct OpcacheStatistics { pub num_cached_scripts: isize, pub num_cached_keys: isize, pub max_cached_keys: isize, pub hits: isize, pub start_time: isize, pub last_restart_time: isize, pub oom_restarts: isize, pub hash_restarts: isize, pub manual_restarts: isize, pub misses: isize, pub blacklist_misses: isize, pub blacklist_miss_ratio: f64, pub opcache_hit_rate: f64, } #[derive(Serialize, Deserialize, Debug)] pub struct Scripts { pub full_path: PathBuf, pub hits: isize, pub memory_consumption: f64, #[serde(with = "php_date_format")] pub last_used: DateTime<Utc>, pub last_used_timestamp: isize, pub timestamp: isize, } mod php_date_format { use chrono::{DateTime, TimeZone, Utc}; use serde::{self, Deserialize, Deserializer, Serializer}; const FORMAT: &str = "%a %b %e %T %Y"; pub fn serialize<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let s = format!("{}", date.format(FORMAT)); serializer.serialize_str(&s) } pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; Utc.datetime_from_str(&s, FORMAT) .map_err(serde::de::Error::custom) } }
use crate::errors::ResT; use crate::rest::pmvd; use crate::hbs::reg_templates; use durations::MILLISECOND as MS; use crossbeam_channel::{Receiver, TryRecvError}; use reqwest::Client; use handlebars::Handlebars; use std::thread; pub struct Work { pub iter_id: usize, pub line_id: usize, pub text: String, } pub enum Message { Work(Work), Quit, } pub struct Worker { pub thread_id: usize, /// The receive side of our mpmc queue. pub rx: Receiver<Message>, } impl Worker { pub fn run(self) -> ResT<()> { // mut self let client = Client::new(); let mut hbs: Handlebars = Handlebars::new(); reg_templates(& mut hbs)?; while let Some(work) = self.get_work()? { // mut work self.process_request(&client, &hbs, &work.text)?; } Ok(()) } fn process_request(&self, client: &Client, hbs: &Handlebars, text: &str) -> ResT<()> { pmvd(&client, &hbs, "bn1", text)?; Ok(()) } fn get_work(&self) -> ResT<Option<Work>> { // &mut self loop { match self.rx.try_recv() { Ok(Message::Work(work)) => { println!("thread: {} iter: {} line: {}", self.thread_id, work.iter_id, work.line_id); return Ok(Some(work)); } Ok(Message::Quit) => { println!("thread: {} quit", self.thread_id); return Ok(None); } Err(TryRecvError::Empty) => { thread::sleep(100*MS); } Err(TryRecvError::Disconnected) => { return Ok(None); } } } } }
use crate::{YarnBuildpack, YarnBuildpackError}; use heroku_nodejs_utils::inv::Release; use libcnb::build::BuildContext; use libcnb::data::buildpack::StackId; use libcnb::data::layer_content_metadata::LayerTypes; use libcnb::layer::{ExistingLayerStrategy, Layer, LayerData, LayerResult, LayerResultBuilder}; use libcnb::Buildpack; use libherokubuildpack::download::{download_file, DownloadError}; use libherokubuildpack::fs::move_directory_contents; use libherokubuildpack::log::log_info; use libherokubuildpack::tar::decompress_tarball; use serde::{Deserialize, Serialize}; use std::fs; use std::os::unix::fs::PermissionsExt; use std::path::Path; use tempfile::NamedTempFile; use thiserror::Error; /// A layer that downloads and installs the yarn cli pub(crate) struct CliLayer { pub release: Release, } #[derive(Deserialize, Serialize, Clone, PartialEq, Eq)] pub(crate) struct CliLayerMetadata { layer_version: String, yarn_version: String, stack_id: StackId, } #[derive(Error, Debug)] pub(crate) enum CliLayerError { #[error("Couldn't create tempfile for yarn CLI: {0}")] TempFile(std::io::Error), #[error("Couldn't download yarn CLI: {0}")] Download(DownloadError), #[error("Couldn't decompress yarn CLI: {0}")] Untar(std::io::Error), #[error("Couldn't move yarn CLI to the target location: {0}")] Installation(std::io::Error), #[error("Couldn't set CLI permissions: {0}")] Permissions(std::io::Error), } const LAYER_VERSION: &str = "1"; impl Layer for CliLayer { type Buildpack = YarnBuildpack; type Metadata = CliLayerMetadata; fn types(&self) -> LayerTypes { LayerTypes { build: true, launch: true, cache: true, } } fn create( &self, context: &BuildContext<Self::Buildpack>, layer_path: &Path, ) -> Result<LayerResult<Self::Metadata>, YarnBuildpackError> { let yarn_tgz = NamedTempFile::new().map_err(CliLayerError::TempFile)?; log_info(format!("Downloading yarn {}", self.release.version)); download_file(&self.release.url, yarn_tgz.path()).map_err(CliLayerError::Download)?; log_info(format!("Extracting yarn {}", self.release.version)); decompress_tarball(&mut yarn_tgz.into_file(), layer_path).map_err(CliLayerError::Untar)?; log_info(format!("Installing yarn {}", self.release.version)); let dist_name = if layer_path.join("package").exists() { "package".to_string() } else { format!("yarn-v{}", self.release.version) }; move_directory_contents(layer_path.join(dist_name), layer_path) .map_err(CliLayerError::Installation)?; fs::set_permissions( layer_path.join("bin").join("yarn"), fs::Permissions::from_mode(0o755), ) .map_err(CliLayerError::Permissions)?; LayerResultBuilder::new(CliLayerMetadata::current(self, context)).build() } fn existing_layer_strategy( &self, context: &BuildContext<Self::Buildpack>, layer_data: &LayerData<Self::Metadata>, ) -> Result<ExistingLayerStrategy, <Self::Buildpack as Buildpack>::Error> { if layer_data.content_metadata.metadata == CliLayerMetadata::current(self, context) { log_info(format!("Reusing yarn {}", self.release.version)); Ok(ExistingLayerStrategy::Keep) } else { Ok(ExistingLayerStrategy::Recreate) } } } impl CliLayerMetadata { fn current(layer: &CliLayer, context: &BuildContext<YarnBuildpack>) -> Self { CliLayerMetadata { yarn_version: layer.release.version.to_string(), stack_id: context.stack_id.clone(), layer_version: String::from(LAYER_VERSION), } } }
pub fn call() { println!("Call function."); }
use libc::c_char; use librespot::metadata::Artist; use metadata::SpMetadata; #[allow(non_camel_case_types)] pub type sp_artist = SpMetadata<Artist>; #[no_mangle] pub unsafe extern "C" fn sp_artist_is_loaded(c_artist: *mut sp_artist) -> bool { let artist = &*c_artist; artist.is_loaded() } #[no_mangle] pub unsafe extern "C" fn sp_artist_name(c_artist: *mut sp_artist) -> *const c_char { let artist = &mut *c_artist; let name = artist.get() .map(|metadata| &metadata.name as &str) .unwrap_or(""); artist.intern(name).as_ptr() }
use actix::Handler; pub type Message = crate::pb::teddy::Message; impl actix::Message for Message { type Result = (); } impl Handler<Message> for super::session::WsChatSession { type Result = (); fn handle(&mut self, msg: Message, ctx: &mut Self::Context) { ctx.binary(msg.body); } }
/// Decodes a raw Stream into Packets pub mod decode; /// Encodes Packets into a raw Stream pub mod encode; /// Describes a stream of semi-parsed data from a connection pub mod stream; /// Describes pieces of information from the network pub mod packet; pub use self::stream::Stream;
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<error_response::Error>, } pub mod error_response { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub innererror: Option<serde_json::Value>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PutJobParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<JobDetails>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JobDetails { #[serde(rename = "storageAccountId", default, skip_serializing_if = "Option::is_none")] pub storage_account_id: Option<String>, #[serde(rename = "jobType", default, skip_serializing_if = "Option::is_none")] pub job_type: Option<String>, #[serde(rename = "returnAddress", default, skip_serializing_if = "Option::is_none")] pub return_address: Option<ReturnAddress>, #[serde(rename = "returnShipping", default, skip_serializing_if = "Option::is_none")] pub return_shipping: Option<ReturnShipping>, #[serde(rename = "shippingInformation", default, skip_serializing_if = "Option::is_none")] pub shipping_information: Option<ShippingInformation>, #[serde(rename = "deliveryPackage", default, skip_serializing_if = "Option::is_none")] pub delivery_package: Option<DeliveryPackageInformation>, #[serde(rename = "returnPackage", default, skip_serializing_if = "Option::is_none")] pub return_package: Option<PackageInfomation>, #[serde(rename = "diagnosticsPath", default, skip_serializing_if = "Option::is_none")] pub diagnostics_path: Option<String>, #[serde(rename = "logLevel", default, skip_serializing_if = "Option::is_none")] pub log_level: Option<String>, #[serde(rename = "backupDriveManifest", default, skip_serializing_if = "Option::is_none")] pub backup_drive_manifest: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<String>, #[serde(rename = "cancelRequested", default, skip_serializing_if = "Option::is_none")] pub cancel_requested: Option<bool>, #[serde(rename = "percentComplete", default, skip_serializing_if = "Option::is_none")] pub percent_complete: Option<i32>, #[serde(rename = "incompleteBlobListUri", default, skip_serializing_if = "Option::is_none")] pub incomplete_blob_list_uri: Option<String>, #[serde(rename = "driveList", default, skip_serializing_if = "Vec::is_empty")] pub drive_list: Vec<DriveStatus>, #[serde(default, skip_serializing_if = "Option::is_none")] pub export: Option<Export>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<String>, #[serde(rename = "encryptionKey", default, skip_serializing_if = "Option::is_none")] pub encryption_key: Option<EncryptionKeyDetails>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EncryptionKeyDetails { #[serde(rename = "kekType", default, skip_serializing_if = "Option::is_none")] pub kek_type: Option<encryption_key_details::KekType>, #[serde(rename = "kekUrl", default, skip_serializing_if = "Option::is_none")] pub kek_url: Option<String>, #[serde(rename = "kekVaultResourceID", default, skip_serializing_if = "Option::is_none")] pub kek_vault_resource_id: Option<String>, } pub mod encryption_key_details { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum KekType { MicrosoftManaged, CustomerManaged, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IdentityDetails { #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<identity_details::Type>, #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, } pub mod identity_details { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { None, SystemAssigned, UserAssigned, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UpdateJobParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<update_job_parameters::Properties>, } pub mod update_job_parameters { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "cancelRequested", default, skip_serializing_if = "Option::is_none")] pub cancel_requested: Option<bool>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<String>, #[serde(rename = "returnAddress", default, skip_serializing_if = "Option::is_none")] pub return_address: Option<ReturnAddress>, #[serde(rename = "returnShipping", default, skip_serializing_if = "Option::is_none")] pub return_shipping: Option<ReturnShipping>, #[serde(rename = "deliveryPackage", default, skip_serializing_if = "Option::is_none")] pub delivery_package: Option<DeliveryPackageInformation>, #[serde(rename = "logLevel", default, skip_serializing_if = "Option::is_none")] pub log_level: Option<String>, #[serde(rename = "backupDriveManifest", default, skip_serializing_if = "Option::is_none")] pub backup_drive_manifest: Option<bool>, #[serde(rename = "driveList", default, skip_serializing_if = "Vec::is_empty")] pub drive_list: Vec<DriveStatus>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ListJobsResponse { #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<JobResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct JobResponse { #[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")] pub system_data: Option<SystemData>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<JobDetails>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<IdentityDetails>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { pub name: String, pub display: operation::Display, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Location { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<location::Properties>, } pub mod location { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "recipientName", default, skip_serializing_if = "Option::is_none")] pub recipient_name: Option<String>, #[serde(rename = "streetAddress1", default, skip_serializing_if = "Option::is_none")] pub street_address1: Option<String>, #[serde(rename = "streetAddress2", default, skip_serializing_if = "Option::is_none")] pub street_address2: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub city: Option<String>, #[serde(rename = "stateOrProvince", default, skip_serializing_if = "Option::is_none")] pub state_or_province: Option<String>, #[serde(rename = "postalCode", default, skip_serializing_if = "Option::is_none")] pub postal_code: Option<String>, #[serde(rename = "countryOrRegion", default, skip_serializing_if = "Option::is_none")] pub country_or_region: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub phone: Option<String>, #[serde(rename = "additionalShippingInformation", default, skip_serializing_if = "Option::is_none")] pub additional_shipping_information: Option<String>, #[serde(rename = "supportedCarriers", default, skip_serializing_if = "Vec::is_empty")] pub supported_carriers: Vec<String>, #[serde(rename = "alternateLocations", default, skip_serializing_if = "Vec::is_empty")] pub alternate_locations: Vec<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ReturnAddress { #[serde(rename = "recipientName")] pub recipient_name: String, #[serde(rename = "streetAddress1")] pub street_address1: String, #[serde(rename = "streetAddress2", default, skip_serializing_if = "Option::is_none")] pub street_address2: Option<String>, pub city: String, #[serde(rename = "stateOrProvince", default, skip_serializing_if = "Option::is_none")] pub state_or_province: Option<String>, #[serde(rename = "postalCode")] pub postal_code: String, #[serde(rename = "countryOrRegion")] pub country_or_region: String, pub phone: String, pub email: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ReturnShipping { #[serde(rename = "carrierName")] pub carrier_name: String, #[serde(rename = "carrierAccountNumber")] pub carrier_account_number: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ShippingInformation { #[serde(rename = "recipientName", default, skip_serializing_if = "Option::is_none")] pub recipient_name: Option<String>, #[serde(rename = "streetAddress1", default, skip_serializing_if = "Option::is_none")] pub street_address1: Option<String>, #[serde(rename = "streetAddress2", default, skip_serializing_if = "Option::is_none")] pub street_address2: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub city: Option<String>, #[serde(rename = "stateOrProvince", default, skip_serializing_if = "Option::is_none")] pub state_or_province: Option<String>, #[serde(rename = "postalCode", default, skip_serializing_if = "Option::is_none")] pub postal_code: Option<String>, #[serde(rename = "countryOrRegion", default, skip_serializing_if = "Option::is_none")] pub country_or_region: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub phone: Option<String>, #[serde(rename = "additionalInformation", default, skip_serializing_if = "Option::is_none")] pub additional_information: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PackageInfomation { #[serde(rename = "carrierName")] pub carrier_name: String, #[serde(rename = "trackingNumber")] pub tracking_number: String, #[serde(rename = "driveCount")] pub drive_count: i32, #[serde(rename = "shipDate")] pub ship_date: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DeliveryPackageInformation { #[serde(rename = "carrierName")] pub carrier_name: String, #[serde(rename = "trackingNumber")] pub tracking_number: String, #[serde(rename = "driveCount", default, skip_serializing_if = "Option::is_none")] pub drive_count: Option<i64>, #[serde(rename = "shipDate", default, skip_serializing_if = "Option::is_none")] pub ship_date: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DriveStatus { #[serde(rename = "driveId", default, skip_serializing_if = "Option::is_none")] pub drive_id: Option<String>, #[serde(rename = "bitLockerKey", default, skip_serializing_if = "Option::is_none")] pub bit_locker_key: Option<String>, #[serde(rename = "manifestFile", default, skip_serializing_if = "Option::is_none")] pub manifest_file: Option<String>, #[serde(rename = "manifestHash", default, skip_serializing_if = "Option::is_none")] pub manifest_hash: Option<String>, #[serde(rename = "driveHeaderHash", default, skip_serializing_if = "Option::is_none")] pub drive_header_hash: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub state: Option<drive_status::State>, #[serde(rename = "copyStatus", default, skip_serializing_if = "Option::is_none")] pub copy_status: Option<String>, #[serde(rename = "percentComplete", default, skip_serializing_if = "Option::is_none")] pub percent_complete: Option<i64>, #[serde(rename = "verboseLogUri", default, skip_serializing_if = "Option::is_none")] pub verbose_log_uri: Option<String>, #[serde(rename = "errorLogUri", default, skip_serializing_if = "Option::is_none")] pub error_log_uri: Option<String>, #[serde(rename = "manifestUri", default, skip_serializing_if = "Option::is_none")] pub manifest_uri: Option<String>, #[serde(rename = "bytesSucceeded", default, skip_serializing_if = "Option::is_none")] pub bytes_succeeded: Option<i64>, } pub mod drive_status { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum State { Specified, Received, NeverReceived, Transferring, Completed, CompletedMoreInfo, ShippedBack, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Export { #[serde(rename = "blobList", default, skip_serializing_if = "Option::is_none")] pub blob_list: Option<export::BlobList>, #[serde(rename = "blobListBlobPath", default, skip_serializing_if = "Option::is_none")] pub blob_list_blob_path: Option<String>, } pub mod export { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BlobList { #[serde(rename = "blobPath", default, skip_serializing_if = "Vec::is_empty")] pub blob_path: Vec<String>, #[serde(rename = "blobPathPrefix", default, skip_serializing_if = "Vec::is_empty")] pub blob_path_prefix: Vec<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LocationsResponse { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Location>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GetBitLockerKeysResponse { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DriveBitLockerKey>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DriveBitLockerKey { #[serde(rename = "bitLockerKey", default, skip_serializing_if = "Option::is_none")] pub bit_locker_key: Option<String>, #[serde(rename = "driveId", default, skip_serializing_if = "Option::is_none")] pub drive_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ListOperationsResponse { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SystemData { #[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")] pub created_by: Option<String>, #[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")] pub created_by_type: Option<system_data::CreatedByType>, #[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, #[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")] pub last_modified_by: Option<String>, #[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")] pub last_modified_by_type: Option<system_data::LastModifiedByType>, #[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")] pub last_modified_at: Option<String>, } pub mod system_data { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CreatedByType { User, Application, ManagedIdentity, Key, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum LastModifiedByType { User, Application, ManagedIdentity, Key, } }
pub const EXT_INSTANCE: &str = "instance"; pub const EXT_APPLICATION: &str = "application"; pub const EXT_DEVICE: &str = "device";
use std::process::{Command, Output}; use config::Config; use report::scan_report::ScanReport; use CONFIG; #[cfg(target_os = "windows")] pub const IKE_SCAN_BIN: &'static str = "ike-scan.exe"; #[cfg(not(target_os = "windows"))] pub const IKE_SCAN_BIN: &'static str = "./ike-scan"; const SUCCESS_MSG: &'static str = "[✓] Reached endpoint"; const FAILURE_MSG: &'static str = "[!] Unable to reach endpoint"; pub fn run(report: &mut ScanReport) { if let Ok(output) = exec_ike_scan(&CONFIG) { if CONFIG.verbose { println!("ike-scan executed. Processing result..."); } match analyze_output(&output) { Ok(msg) => { println!("{}", msg); report.endpoint_reachable() } Err(msg) => println!("{}", msg), } } else { println!("ike-scan failed"); } if let Ok(output) = exec_ike_scan(&Config::with_nat_t()) { if CONFIG.verbose { println!("ike-scan executed with NAT-T. Processing result..."); } match analyze_output(&output) { Ok(msg) => { println!("{} using NAT-T", msg); report.endpoint_reachable_using_nat_t() } Err(msg) => println!("{} using NAT-T", msg), } } else { println!("ike-scan with NAT-T failed"); } } fn exec_ike_scan(conf: &Config) -> Result<Output, String> { let mut cmd = Command::new(IKE_SCAN_BIN); if conf.use_nat_t { cmd.arg("--nat-t"); } else { cmd.arg("--sport").arg(&conf.source_port.to_string()); } cmd.arg(&conf.vpn_endpoint_ip); let output = cmd.output().expect(&format!( "Failed to connect using ike-scan with parameters --sport {} to endpoint {}", &conf.source_port.to_string(), &conf.vpn_endpoint_ip )); Ok(output) } fn analyze_output(output: &Output) -> Result<String, String> { if !output.status.success() { println!("ike-scan was a failure - exit code {:?}", output.status.code().expect("Cannot get exit code of ike-scan execution (process must have been killed by signal)")); println!( "ike-scan stderr output: {:?}", String::from_utf8_lossy(&output.stderr) ); panic!("Aborting analysis due to failed ike-scan run"); } let string_to_analyze = String::from_utf8_lossy(&output.stdout); let ret; if string_to_analyze.contains("1 returned notify") { ret = Ok(SUCCESS_MSG.to_owned()); } else { ret = Err(FAILURE_MSG.to_owned()) } if CONFIG.verbose { println!("Response was:\n{:?}", &string_to_analyze); } ret }
extern crate notify; use notify::{Watcher, RecursiveMode, RawEvent,raw_watcher}; //use std::collections::HashMap; //use std::iter::Map; //use std::sync::{Mutex,Arc}; //use std::error::Error; use std::sync::mpsc::channel; use std::io::prelude::*; use std::net::{TcpStream}; use ssh2::Session; use std::io::stdin; use std::path::Path; use std::path::PathBuf; use std::fs::File; use indicatif::{ProgressBar,ProgressStyle}; use chrono::prelude::*; use std::{ thread}; struct DirOp{ l:PathBuf, r:PathBuf, t:i64 } fn main() { let (addr,username,password) = ssh_auth(); let (tx,rx)=channel(); //let mut watcher = watcher(tx, Duration::from_secs(10)).unwrap(); let mut watcher = raw_watcher(tx).unwrap(); println!("(本地)notify dir:"); let mut notify_dir = String::new(); let mut upload_dir: String =String::new(); stdin().read_line(&mut notify_dir).unwrap(); println!("(服务器)upload dir:"); stdin().read_line(&mut upload_dir).unwrap(); watcher.watch(notify_dir.trim(), RecursiveMode::Recursive).unwrap(); let loca_path_buf: PathBuf = PathBuf::from(notify_dir.trim()); let (f_tx,f_rx) = channel(); let f_send=f_tx.clone(); let f_addr = addr.clone(); let f_username=username.clone(); let f_password=password.clone(); thread::spawn(move||{ loop { match rx.recv() { Ok(RawEvent{path:Some(path),op:Ok(op),cookie}) =>{ //let fname = path.as_path().file_name().unwrap().to_os_string(); let lit = path.ancestors(); let remote_file: PathBuf = dir_filter(&upload_dir.trim().to_string(),lit,&loca_path_buf); // let ff =format!("{}", format!("{:?}",fname) // .trim_start_matches(|c| c=='"' || c=='\'') // .trim_end_matches(|c| c=='"' || c=='\'')); if op.contains(notify::op::WRITE) || op.contains(notify::op::CREATE) { if path.is_dir() { let cmd=format!("mkdir -p {}",remote_file.display().to_string()); ssh_cmd(&f_addr, &f_username, &f_password, &cmd); }else if path.is_file() { let location_now = Local::now(); let mills = location_now.timestamp_millis();//毫秒 let wdir=DirOp{ l: path.to_path_buf(), r: remote_file.to_path_buf(), t: mills }; f_send.send(wdir).unwrap(); //ssh_upload(&addr, &username, &password, remote_file.to_path_buf(), &path.as_path().to_path_buf()); }else { let cmd=format!("mkdir -p {}",remote_file.display().to_string()); ssh_cmd(&f_addr, &f_username, &f_password, &cmd); } } else if op.contains(notify::op::RENAME) || op.contains(notify::op::REMOVE) { if !path.exists() { let cmd=format!("rm -rf {}",remote_file.display().to_string()); ssh_cmd(&f_addr, &f_username, &f_password, &cmd); }else if path.is_file(){ let location_now = Local::now(); let mills = location_now.timestamp_millis();//毫秒 let wdir=DirOp{ l: path.to_path_buf(), r: remote_file.to_path_buf(), t: mills }; f_send.send(wdir).unwrap(); //ssh_upload(&addr, &username, &password, remote_file.to_path_buf(), &path.as_path().to_path_buf()); }else { let cmd=format!("mkdir -p {}",remote_file.display().to_string()); ssh_cmd(&f_addr, &f_username, &f_password, &cmd); } } println!("{:?} {:?} ({:?})",op,path,cookie) }, Ok(event) => println!("{:?}", event), Err(e) => println!("watch error: {:?}", e), } } }); // let wmaps=Arc::new(Mutex::new(HashMap::new())); // let cmu_map=Arc::clone(&wmaps); // //let ff_send = f_tx.clone(); // thread::spawn(move || { // loop{ // (*cmu_map).lock().unwrap().iter().for_each( |d: (&String, &DirOp)| { // let wdir=DirOp{ l: d.1.l.clone(), r: d.1.r.clone(), t: d.1.t }; // f_tx.send(wdir).unwrap(); // (*cmu_map).lock().unwrap().remove(d.0); // }); // } // }); loop{ match f_rx.try_recv() { Ok(dop)=>{ let location_now = Local::now(); if 3000<=(location_now.timestamp_millis()-dop.t) { ssh_upload(&addr, &username, &password, dop.r, &dop.l); }else{ f_tx.send(dop).unwrap(); // let mut mu_map=wmaps.lock().unwrap(); // mu_map.insert(dop.l.display().to_string(), dop); } }, Err(_)=>{} } } } /// /// 执行一条命令 /// fn ssh_cmd(addr:&str,username:&str,password:&str,cmd:&str) { let tcp = TcpStream::connect(addr).expect("Couldn't connect to the server..."); let mut sess = Session::new().unwrap(); sess.set_tcp_stream(tcp); sess.handshake().unwrap(); sess.userauth_password(&username.trim(),&password.trim()).unwrap(); assert!(sess.authenticated()); match sess.channel_session(){ Ok(mut channel)=>{ channel.exec(cmd).unwrap(); let mut s = String::new(); channel.read_to_string(&mut s).unwrap(); println!("cmd: {} status: {}",cmd,s); channel.wait_close().unwrap(); channel.exit_status().unwrap(); }, Err(error)=>println!("ssh_cmd error:{}",error), }; } /// /// 认证ssh服务器 /// fn ssh_auth<'a>() ->(String,String,String) { let mut add = String::new(); let mut username = String::new(); let mut password = String::new(); println!("add host:"); stdin().read_line(&mut add).unwrap(); println!("user name:"); stdin().read_line(&mut username).unwrap(); println!("password:"); stdin().read_line(&mut password).unwrap(); // let mut pwd=[0]; // while pwd[0]!=10 { // print!("\x1b[H\x1b[k"); // if let Ok(_) = stdin().lock().read(&mut pwd[..]) { // //println!("{:?}",pwd); // //if pwd[0]!=10{ // let ch = std::str::from_utf8(&pwd).unwrap(); // for c in ch.chars().into_iter(){ // password.push(c); // } // }else{ // // println!("enter Error!"); // break; // } // //print!("\x1b[2J"); // } //stdin().read_line(&mut password).unwrap(); println!("ssh {}@{} {}",username.trim(),add.trim(),password.trim()); let tcp = TcpStream::connect(add.trim()).expect("Couldn't connect to the server..."); let mut sess = Session::new().unwrap(); sess.set_tcp_stream(tcp); sess.handshake().unwrap(); sess.userauth_password(&username.trim(),&password.trim()).unwrap(); assert!(sess.authenticated()); match sess.channel_session() { Ok(mut channel)=>{ channel.exec("free -m").unwrap(); let mut s = String::new(); channel.read_to_string(&mut s).unwrap(); println!("{}",s); channel.wait_close().unwrap(); println!("{}",channel.exit_status().unwrap()); }, Err(error)=>println!("ssh_auth error:{}",error), }; (add.trim().to_string(),username.trim().to_string(),password.trim().to_string()) } /// /// 从本地上传文件到远程服务器 /// fn ssh_upload(addr:&str,username:&str,password:&str,remote_path:PathBuf,loca_file:&PathBuf){ let tcp = TcpStream::connect(addr).expect("Couldn't connect to the server..."); let mut sess = Session::new().unwrap(); sess.set_tcp_stream(tcp); sess.handshake().unwrap(); sess.userauth_password(username,password).unwrap(); let mut file_in = File::open(loca_file.as_path().display().to_string()).unwrap(); let len = file_in.metadata().unwrap().len(); //file_in.stream_len().unwrap(); let pb = ProgressBar::new(len); match sess.scp_send(Path::new(&remote_path.display().to_string()), 0o644,len,None){ Ok(mut remote_file)=>{ let mut buffer = [0u8; 4096]; let mut up_size :u64 =0; pb.set_style(ProgressStyle::default_bar() .template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes_per_sec}|{bytes}/{total_bytes} ({eta})") .progress_chars("#>-")); //println!("upload file form {} to {}:{}",loca_file.display().to_string(),addr,remote_path.display().to_string()); loop { let nbytes = file_in.read(&mut buffer).unwrap(); remote_file.write(&buffer[..nbytes]).unwrap(); up_size += std::cmp::min(nbytes as u64,len); pb.set_position(up_size); if nbytes < buffer.len() { remote_file.send_eof().unwrap(); remote_file.wait_eof().unwrap(); break; } } pb.finish_with_message("ok"); remote_file.close().unwrap(); remote_file.wait_close().unwrap(); println!("upload file: {} size:{} Ok!",loca_file.as_path().display().to_string(),len); }, Err(error)=>println!("ssh_upload error:{}",error), }; } /// /// 保留不相同的目录名称,返回新的PathBuf /// fn dir_filter(root:&String,lit:std::path::Ancestors,loca_path_buf:&PathBuf) -> PathBuf { let mut remote_file = PathBuf::new(); //先添加根目录 remote_file.push(Path::new(&root)); let mut tmp_p = PathBuf::new(); //再过滤每层目录,留下不同的 for i in lit { if !loca_path_buf.starts_with(i) { tmp_p.push(i.file_name().to_owned().unwrap()); } } for i in tmp_p.ancestors() { if i.file_name() !=None { remote_file.push(i.file_name().unwrap()); } } remote_file } #[test] fn test_path(){ let path: PathBuf=PathBuf::from("/tmp/user"); let path1 =PathBuf::from("/tmp/user/tmp/file1"); let it = path1.ancestors(); let mut new_path = PathBuf::new(); new_path.push("/Users"); let mut tmp_p =PathBuf::new(); for i in it { //let fname = i.file_name(); // println!("{:?} {:?}",path.starts_with(i),fname); if !path.starts_with(i) { tmp_p.push(i.file_name().unwrap()); } } for i in tmp_p.ancestors(){ if i.file_name() !=None { println!("{:?}",i.file_name().to_owned().unwrap()); new_path.push(i.file_name().to_owned().unwrap()); } } println!("-- {}",new_path.display()); } #[test] fn test_ansi(){ print!("\x1b[2J"); for i in 0..256 { if i>0 && i%16==0 { print!("\n"); } print!("\x1b[38;5;{}m【{}】",i,i); } print!("\x1b[32m完成\x1b[0m\n"); } #[test] fn test_map(){ let mut wmaps=HashMap::new(); wmaps.insert("one", 1); wmaps.insert("tow", 2); wmaps.insert("one", 0); wmaps.remove("k"); println!("{:?}",wmaps); }
use std::cell::RefCell; use std::collections::{HashMap, VecDeque}; use std::fs::File; use std::io::Write; use std::time::{Duration, SystemTime}; use log::debug; use log::error; type Ptr = u32; type Size = u32; const HISTORY_CAPACITY: usize = 100_000; thread_local! { static STATE: RefCell<HeapProfilerState> = RefCell::new(HeapProfilerState::new()); } struct HeapProfilerState { memory: HashMap<Ptr, Size>, started: SystemTime, heap_history: VecDeque<(i32, Duration)>, } impl HeapProfilerState { pub fn new() -> Self { Self { memory: HashMap::new(), started: SystemTime::now(), heap_history: VecDeque::new(), } } fn history_push(&mut self, change: i32) { if self.heap_history.len() == HISTORY_CAPACITY { // if HISTRY_CAPACITY > 0 this should be safe self.heap_history.pop_front().unwrap(); } match self.started.elapsed() { Ok(duration) => self.heap_history.push_back((change, duration)), Err(e) => error!("profiler_history_push: {}", e), } } // Write heap profile history to a file. Format: // // #time/sec heap/byte // 0.1 10 // 0.2 15 // ... pub fn write_dat(&self, fd: &mut File) -> std::io::Result<()> { let mut graph = Vec::new(); let mut heap_size: u64 = 0; writeln!(&mut graph, "#time/sec heap/byte")?; // write initial entry for plotting writeln!(&mut graph, "0 0").unwrap(); self.heap_history.iter().for_each(|(heap, duration)| { if *heap < 0 { // TODO check for overflow heap_size -= (-*heap) as u64; } else { heap_size += *heap as u64; } writeln!(&mut graph, "{} {}", duration.as_secs_f64(), heap_size).unwrap(); }); fd.write_all(&graph) } } #[export_name = "aligned_alloc_profiler"] pub extern "C" fn aligned_alloc_profiler(_self: u32, size: Size, ptr: Ptr) { malloc_profiler(size, ptr); } #[export_name = "malloc_profiler"] pub extern "C" fn malloc_profiler(size: Size, ptr: Ptr) { debug!("heap_profiler: malloc({}) -> {}", size, ptr); STATE.with(|state| { let mut state = state.borrow_mut(); state.memory.insert(ptr, size); state.history_push(size as i32); }); } #[export_name = "calloc_profiler"] pub extern "C" fn calloc_profiler(len: Size, elem_size: Size, ptr: Ptr) { debug!("heap_profiler: calloc({},{}) -> {}", len, elem_size, ptr); STATE.with(|state| { let mut state = state.borrow_mut(); let size = len * elem_size; state.memory.insert(ptr, size); state.history_push(size as i32); }); } #[export_name = "realloc_profiler"] pub extern "C" fn realloc_profiler(old_ptr: Ptr, size: Size, new_ptr: Ptr) { debug!( "heap_profiler: realloc({},{}) -> {}", old_ptr, size, new_ptr ); STATE.with(|state| { let mut state = state.borrow_mut(); // realloc spec: if ptr is null then the call is equivalent to malloc(size) if old_ptr == 0 { state.memory.insert(new_ptr, size); state.history_push(size as i32); } else { match state.memory.remove(&old_ptr) { Some(removed_size) => { state.memory.insert(new_ptr, size); let size_delta = size as i32 - removed_size as i32; state.history_push(size_delta); } None => error!( "heap_profiler: can't reallocate, pointer {} doesn't exist", old_ptr ), }; } }); } #[export_name = "free_profiler"] pub extern "C" fn free_profiler(ptr: Ptr) { debug!("heap_profiler: free({})", ptr); STATE.with(|state| { let mut state = state.borrow_mut(); // free spec: if ptr is null no action is performed if ptr != 0 { match state.memory.remove(&ptr) { Some(size) => { state.history_push(-(size as i32)); } None => error!("heap_profiler: can't free, pointer {} doesn't exist", ptr), }; } }); }
use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::prelude::*; use std::iter::Iterator; #[inline] fn update_row( player: i32, marble: i32, row: &mut Vec<i32>, pos: i32, scores: &mut HashMap<i32, i32>, ) -> i32 { let n = row.len() as i32; let npos; if marble % 23 != 0 { npos = ((pos + 1) % n + 1) % (n + 1); row.insert(npos as usize, marble); } else { let to_del = if (pos - 7) > 0 { pos - 7 } else { n + pos - 7 }; let del_value = row[to_del as usize]; row.remove(to_del as usize); npos = to_del % n; let player_score = scores.entry(player).or_insert(0); *player_score += del_value + marble; } npos } fn play(players: i32, turns: i32) -> HashMap<i32, i32> { let mut scores: HashMap<i32, i32> = HashMap::new(); let mut current_row: Vec<i32> = vec![0]; let mut current_pos = 0; for marble in 1..=turns { let player = marble % players; current_pos = update_row(player, marble, &mut current_row, current_pos, &mut scores); } scores } fn max_score(players: i32, turns: i32) -> Option<i32> { let scores = play(players, turns); scores.values().max().map(|&n| n) } pub fn day9(input: &str) { // let mut file = File::open(input).expect("Failed to open input file"); // let mut contents = String::new(); // file.read_to_string(&mut contents) // .expect("Failed to read file"); println!("Part 1: {:?}", max_score(470, 72170)); println!("Part 2: {:?}", max_score(470, 72170 * 100)); } #[test] fn test_max_score() { assert_eq!(max_score(9, 25), Some(32)); assert_eq!(max_score(10, 1618), Some(8317)); assert_eq!(max_score(13, 7999), Some(146373)); assert_eq!(max_score(17, 1104), Some(2764)); assert_eq!(max_score(21, 6111), Some(54718)); assert_eq!(max_score(30, 5807), Some(37305)); }
use crate::{ models::{Restaurant, Pin, User, NoIdPin}, mapper, logic, errors::* }; pub fn index_included_by(community_id: i32) -> Result<Vec<Restaurant>, ErrCode>{ mapper::restaurant::find_all_pined_by(&community_id) } pub fn add(current: User, no_id_pin: NoIdPin) -> Result<Pin, ErrCode>{ logic::privilege::check_joined(&current, &no_id_pin.community_id)?; mapper::pin::create(&no_id_pin) } pub fn search(lat: f64, lng: f64, name: Option<String>) -> Result<Vec<Restaurant>, ErrCode>{ let no_id_restaurants = logic::vendor::gnavi::search(lat, lng, name)?; let mut restaurants: Vec<Restaurant> = Vec::new(); for no_id_restaurant in &no_id_restaurants { let restaurant = mapper::restaurant::insert_or_update(&no_id_restaurant)?; restaurants.push(restaurant); } Ok(restaurants) } pub fn find(rid: i32) -> Result<Restaurant, ErrCode>{ mapper::restaurant::find(&rid) }
use core::{convert::AsMut, default::Default, ptr::copy_nonoverlapping}; use necsim_core_bond::{ClosedUnitF64, NonNegativeF64, PositiveF64}; use crate::{ cogs::Habitat, intrinsics::{cos, floor, ln, sin, sqrt}, landscape::IndexedLocation, }; #[allow(clippy::module_name_repetitions)] pub trait RngCore: crate::cogs::Backup + Sized + Clone + core::fmt::Debug { type Seed: AsMut<[u8]> + Default + Sized; #[must_use] fn from_seed(seed: Self::Seed) -> Self; #[must_use] fn seed_from_u64(mut state: u64) -> Self { // Implementation from: // https://docs.rs/rand/0.7.3/rand/trait.SeedableRng.html#method.seed_from_u64 // We use PCG32 to generate a u32 sequence, and copy to the seed const MUL: u64 = 6_364_136_223_846_793_005_u64; const INC: u64 = 11_634_580_027_462_260_723_u64; let mut seed = Self::Seed::default(); for chunk in seed.as_mut().chunks_mut(4) { // We advance the state first (to get away from the input value, // in case it has low Hamming Weight). state = state.wrapping_mul(MUL).wrapping_add(INC); // Use PCG output function with to_le to generate x: #[allow(clippy::cast_possible_truncation)] let xorshifted = (((state >> 18) ^ state) >> 27) as u32; #[allow(clippy::cast_possible_truncation)] let rot = (state >> 59) as u32; let x = xorshifted.rotate_right(rot).to_le(); unsafe { let p = (&x as *const u32).cast::<u8>(); copy_nonoverlapping(p, chunk.as_mut_ptr(), chunk.len()); } } Self::from_seed(seed) } #[must_use] fn sample_u64(&mut self) -> u64; } #[allow(clippy::inline_always, clippy::inline_fn_without_body)] #[allow(clippy::module_name_repetitions)] #[contract_trait] pub trait RngSampler: RngCore { #[must_use] #[inline] fn sample_uniform(&mut self) -> ClosedUnitF64 { // http://prng.di.unimi.it -> Generating uniform doubles in the unit interval #[allow(clippy::cast_precision_loss)] let u01 = ((self.sample_u64() >> 11) as f64) * f64::from_bits(0x3CA0_0000_0000_0000_u64); // 0x1.0p-53 unsafe { ClosedUnitF64::new_unchecked(u01) } } #[must_use] #[inline] #[debug_ensures(ret < length, "samples U(0, length - 1)")] fn sample_index(&mut self, length: usize) -> usize { // attributes on expressions are experimental // see https://github.com/rust-lang/rust/issues/15701 #[allow( clippy::cast_precision_loss, clippy::cast_possible_truncation, clippy::cast_sign_loss )] let index = floor(self.sample_uniform().get() * (length as f64)) as usize; index } #[must_use] #[inline] #[debug_ensures(ret < length, "samples U(0, length - 1)")] fn sample_index_u32(&mut self, length: u32) -> u32 { // attributes on expressions are experimental // see https://github.com/rust-lang/rust/issues/15701 #[allow( clippy::cast_precision_loss, clippy::cast_possible_truncation, clippy::cast_sign_loss )] let index = floor(self.sample_uniform().get() * f64::from(length)) as u32; index } #[must_use] #[inline] fn sample_exponential(&mut self, lambda: PositiveF64) -> NonNegativeF64 { let exp = -ln(self.sample_uniform().get()) / lambda.get(); unsafe { NonNegativeF64::new_unchecked(exp) } } #[must_use] #[inline] fn sample_event(&mut self, probability: ClosedUnitF64) -> bool { self.sample_uniform().get() < probability.get() } #[must_use] #[inline] fn sample_2d_standard_normal(&mut self) -> (f64, f64) { // Basic Box-Muller transform let u0 = self.sample_uniform(); let u1 = self.sample_uniform(); let r = sqrt(-2.0_f64 * ln(u0.get())); let theta = -core::f64::consts::TAU * u1.get(); (r * sin(theta), r * cos(theta)) } #[must_use] #[inline] fn sample_2d_normal(&mut self, mu: f64, sigma: NonNegativeF64) -> (f64, f64) { let (z0, z1) = self.sample_2d_standard_normal(); (z0 * sigma.get() + mu, z1 * sigma.get() + mu) } } impl<R: RngCore> RngSampler for R {} #[allow(clippy::module_name_repetitions)] pub trait PrimeableRng: RngCore { fn prime_with(&mut self, location_index: u64, time_index: u64); } #[allow(clippy::module_name_repetitions)] pub trait HabitatPrimeableRng<H: Habitat>: PrimeableRng { #[inline] fn prime_with_habitat( &mut self, habitat: &H, indexed_location: &IndexedLocation, time_index: u64, ) { self.prime_with( habitat.map_indexed_location_to_u64_injective(indexed_location), time_index, ); } } impl<R: PrimeableRng, H: Habitat> HabitatPrimeableRng<H> for R {} #[allow(clippy::module_name_repetitions)] pub trait SplittableRng: RngCore { fn split(self) -> (Self, Self); fn split_to_stream(self, stream: u64) -> Self; }
use crate::parse::{ParseError, Token}; use crate::turtle::Turtle; pub mod basic; pub mod flow_control; pub mod pen; /// Represents a command pub trait Command { /// Run the command fn run(&self, turtle: &mut Turtle); /// Parse the command, gets given an iterator which represents the stream of tokens /// The parser should only `take()` the number of tokens it needs, since the stream will contain tokens of other commands fn parse<'a>( tokens: &mut impl ::std::iter::Iterator<Item = Token<'a>>, ) -> Result<Box<Self>, ParseError<'a>> where Self: Sized; /// Convert the `Command` to the logo code representation of it fn to_code(&self) -> String; }
// This file is part of Bit.Country. // Copyright (C) 2020-2021 Bit.Country. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![cfg(test)] use super::*; use crate as continuum; use auction_manager::{Auction, AuctionHandler, AuctionInfo, Change, OnNewBidResult}; use frame_support::pallet_prelude::{GenesisBuild, Hooks, MaybeSerializeDeserialize}; use frame_support::sp_runtime::traits::AtLeast32Bit; use frame_support::{construct_runtime, ord_parameter_types, parameter_types, weights::Weight}; use frame_system::{EnsureRoot, EnsureSignedBy}; use primitives::{Amount, AssetId, CurrencyId, FungibleTokenId}; use sp_core::H256; use sp_runtime::{testing::Header, traits::IdentityLookup, ModuleId}; parameter_types! { pub const BlockHashCount: u32 = 256; pub const MaximumBlockWeight: u32 = 1024; pub const MaximumBlockLength: u32 = 2 * 1024; } // Configure a mock runtime to test the pallet. pub type AccountId = u128; pub type AuctionId = u64; pub type Balance = u64; pub type BitCountryId = u64; pub type BlockNumber = u64; pub const ALICE: AccountId = 1; pub const BOB: AccountId = 2; pub const CHARLIE: AccountId = 3; pub const CLASS_ID: u32 = 0; pub const COLLECTION_ID: u64 = 0; pub const ALICE_COUNTRY_ID: BitCountryId = 1; pub const BOB_COUNTRY_ID: BitCountryId = 2; ord_parameter_types! { pub const One: AccountId = ALICE; } impl frame_system::Config for Runtime { type Origin = Origin; type Index = u64; type BlockNumber = BlockNumber; type Call = Call; type Hash = H256; type Hashing = ::sp_runtime::traits::BlakeTwo256; type AccountId = AccountId; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type BlockWeights = (); type BlockLength = (); type Version = (); type PalletInfo = PalletInfo; type AccountData = pallet_balances::AccountData<Balance>; type OnNewAccount = (); type OnKilledAccount = (); type DbWeight = (); type BaseCallFilter = (); type SystemWeightInfo = (); type SS58Prefix = (); } parameter_types! { pub const ExistentialDeposit: u64 = 1; } impl pallet_balances::Config for Runtime { type Balance = Balance; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type MaxLocks = (); type WeightInfo = (); } pub struct MockAuctionManager; impl Auction<AccountId, BlockNumber> for MockAuctionManager { type Balance = Balance; fn auction_info(id: u64) -> Option<AuctionInfo<u128, Self::Balance, u64>> { None } fn update_auction(id: u64, info: AuctionInfo<u128, Self::Balance, u64>) -> DispatchResult { None } fn new_auction( recipient: u128, initial_amount: Self::Balance, start: u64, end: Option<u64>, ) -> Result<u64, DispatchError> { None } fn create_auction( auction_type: AuctionType, item_id: ItemId, end: Option<u64>, recipient: u128, initial_amount: Self::Balance, start: u64, listing_level: ListingLevel, ) -> Result<u64, DispatchError> { None } fn remove_auction(id: u64, item_id: ItemId) { None } fn auction_bid_handler( _now: u64, id: u64, new_bid: (u128, Self::Balance), last_bid: Option<(u128, Self::Balance)>, ) -> DispatchResult { None } fn local_auction_bid_handler( _now: u64, id: u64, new_bid: (u128, Self::Balance), last_bid: Option<(u128, Self::Balance)>, social_currency_id: FungibleTokenId, ) -> DispatchResult { None } fn check_item_in_auction(asset_id: AssetId) -> bool { None } } parameter_types! { pub const ContinuumTreasuryModuleId: ModuleId = ModuleId(*b"bit/ctmu"); pub const AuctionTimeToClose: u32 = 10; //Default 100800 Blocks pub const SessionDuration: BlockNumber = 10; //Default 43200 Blocks pub const SpotAuctionChillingDuration: BlockNumber = 10; //Default 43200 Blocks } pub struct BitCountryInfoSource {} impl BitCountryTrait<AccountId> for BitCountryInfoSource { fn check_ownership(who: &AccountId, country_id: &BitCountryId) -> bool { match *who { ALICE => *country_id == ALICE_COUNTRY_ID, BOB => *country_id == BOB_COUNTRY_ID, _ => false, } } fn get_bitcountry(bitcountry_id: u64) -> Option<BitCountryStruct<u128>> { None } fn get_bitcountry_token(bitcountry_id: u64) -> Option<FungibleTokenId> { None } fn update_bitcountry_token( bitcountry_id: u64, currency_id: FungibleTokenId, ) -> Result<(), DispatchError> { None } } impl Config for Runtime { type Event = Event; type SessionDuration = SessionDuration; type SpotAuctionChillingDuration = SpotAuctionChillingDuration; type EmergencyOrigin = EnsureSignedBy<One, AccountId>; type AuctionHandler = MockAuctionManager; type AuctionDuration = SpotAuctionChillingDuration; type ContinuumTreasury = ContinuumTreasuryModuleId; type Currency = Balances; type BitCountryInfoSource = BitCountryInfoSource; } pub type ContinuumModule = Pallet<Runtime>; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Runtime>; type Block = frame_system::mocking::MockBlock<Runtime>; construct_runtime!( pub enum Runtime where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic { System: frame_system::{Module, Call, Config, Storage, Event<T>}, Balances: pallet_balances::{Module, Call, Storage, Config<T>, Event<T>}, Continuum: continuum::{Module, Call ,Storage, Event<T>}, } ); pub struct ExtBuilder; impl Default for ExtBuilder { fn default() -> Self { ExtBuilder } } impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { self.build_with_block_number(1) } pub fn build_with_block_number(self, block_number: u64) -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default() .build_storage::<Runtime>() .unwrap(); pallet_balances::GenesisConfig::<Runtime> { balances: vec![(ALICE, 100000), (BOB, 500)], } .assimilate_storage(&mut t) .unwrap(); continuum::GenesisConfig::<Runtime> { initial_active_session: 0, initial_auction_rate: 5, initial_max_bound: (-100, 100), spot_price: 10000, } .assimilate_storage((&mut t)) .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(block_number)); ext } } pub fn last_event() -> Event { frame_system::Pallet::<Runtime>::events() .pop() .expect("Event expected") .event } fn next_block() { System::set_block_number(System::block_number() + 1); ContinuumModule::on_initialize(System::block_number()); } pub fn run_to_block(n: u64) { while System::block_number() < n { next_block(); } }
#[doc = "Register `IER` reader"] pub type R = crate::R<IER_SPEC>; #[doc = "Register `IER` writer"] pub type W = crate::W<IER_SPEC>; #[doc = "Field `TAMP1IE` reader - TAMP1IE"] pub type TAMP1IE_R = crate::BitReader; #[doc = "Field `TAMP1IE` writer - TAMP1IE"] pub type TAMP1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP2IE` reader - TAMP2IE"] pub type TAMP2IE_R = crate::BitReader; #[doc = "Field `TAMP2IE` writer - TAMP2IE"] pub type TAMP2IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP3IE` reader - TAMP3IE"] pub type TAMP3IE_R = crate::BitReader; #[doc = "Field `TAMP3IE` writer - TAMP3IE"] pub type TAMP3IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP4IE` reader - TAMP4IE"] pub type TAMP4IE_R = crate::BitReader; #[doc = "Field `TAMP4IE` writer - TAMP4IE"] pub type TAMP4IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP5IE` reader - TAMP5IE"] pub type TAMP5IE_R = crate::BitReader; #[doc = "Field `TAMP5IE` writer - TAMP5IE"] pub type TAMP5IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP6IE` reader - TAMP6IE"] pub type TAMP6IE_R = crate::BitReader; #[doc = "Field `TAMP6IE` writer - TAMP6IE"] pub type TAMP6IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP7IE` reader - TAMP7IE"] pub type TAMP7IE_R = crate::BitReader; #[doc = "Field `TAMP7IE` writer - TAMP7IE"] pub type TAMP7IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP8IE` reader - TAMP8IE"] pub type TAMP8IE_R = crate::BitReader; #[doc = "Field `TAMP8IE` writer - TAMP8IE"] pub type TAMP8IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ITAMP1IE` reader - ITAMP1IE"] pub type ITAMP1IE_R = crate::BitReader; #[doc = "Field `ITAMP1IE` writer - ITAMP1IE"] pub type ITAMP1IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ITAMP2IE` reader - ITAMP2IE"] pub type ITAMP2IE_R = crate::BitReader; #[doc = "Field `ITAMP2IE` writer - ITAMP2IE"] pub type ITAMP2IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ITAMP3IE` reader - ITAMP3IE"] pub type ITAMP3IE_R = crate::BitReader; #[doc = "Field `ITAMP3IE` writer - ITAMP3IE"] pub type ITAMP3IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ITAMP5IE` reader - ITAMP5IE"] pub type ITAMP5IE_R = crate::BitReader; #[doc = "Field `ITAMP5IE` writer - ITAMP5IE"] pub type ITAMP5IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ITAMP8IE` reader - ITAMP8IE"] pub type ITAMP8IE_R = crate::BitReader; #[doc = "Field `ITAMP8IE` writer - ITAMP8IE"] pub type ITAMP8IE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - TAMP1IE"] #[inline(always)] pub fn tamp1ie(&self) -> TAMP1IE_R { TAMP1IE_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - TAMP2IE"] #[inline(always)] pub fn tamp2ie(&self) -> TAMP2IE_R { TAMP2IE_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - TAMP3IE"] #[inline(always)] pub fn tamp3ie(&self) -> TAMP3IE_R { TAMP3IE_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - TAMP4IE"] #[inline(always)] pub fn tamp4ie(&self) -> TAMP4IE_R { TAMP4IE_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - TAMP5IE"] #[inline(always)] pub fn tamp5ie(&self) -> TAMP5IE_R { TAMP5IE_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - TAMP6IE"] #[inline(always)] pub fn tamp6ie(&self) -> TAMP6IE_R { TAMP6IE_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - TAMP7IE"] #[inline(always)] pub fn tamp7ie(&self) -> TAMP7IE_R { TAMP7IE_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - TAMP8IE"] #[inline(always)] pub fn tamp8ie(&self) -> TAMP8IE_R { TAMP8IE_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 16 - ITAMP1IE"] #[inline(always)] pub fn itamp1ie(&self) -> ITAMP1IE_R { ITAMP1IE_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - ITAMP2IE"] #[inline(always)] pub fn itamp2ie(&self) -> ITAMP2IE_R { ITAMP2IE_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - ITAMP3IE"] #[inline(always)] pub fn itamp3ie(&self) -> ITAMP3IE_R { ITAMP3IE_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 20 - ITAMP5IE"] #[inline(always)] pub fn itamp5ie(&self) -> ITAMP5IE_R { ITAMP5IE_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 23 - ITAMP8IE"] #[inline(always)] pub fn itamp8ie(&self) -> ITAMP8IE_R { ITAMP8IE_R::new(((self.bits >> 23) & 1) != 0) } } impl W { #[doc = "Bit 0 - TAMP1IE"] #[inline(always)] #[must_use] pub fn tamp1ie(&mut self) -> TAMP1IE_W<IER_SPEC, 0> { TAMP1IE_W::new(self) } #[doc = "Bit 1 - TAMP2IE"] #[inline(always)] #[must_use] pub fn tamp2ie(&mut self) -> TAMP2IE_W<IER_SPEC, 1> { TAMP2IE_W::new(self) } #[doc = "Bit 2 - TAMP3IE"] #[inline(always)] #[must_use] pub fn tamp3ie(&mut self) -> TAMP3IE_W<IER_SPEC, 2> { TAMP3IE_W::new(self) } #[doc = "Bit 3 - TAMP4IE"] #[inline(always)] #[must_use] pub fn tamp4ie(&mut self) -> TAMP4IE_W<IER_SPEC, 3> { TAMP4IE_W::new(self) } #[doc = "Bit 4 - TAMP5IE"] #[inline(always)] #[must_use] pub fn tamp5ie(&mut self) -> TAMP5IE_W<IER_SPEC, 4> { TAMP5IE_W::new(self) } #[doc = "Bit 5 - TAMP6IE"] #[inline(always)] #[must_use] pub fn tamp6ie(&mut self) -> TAMP6IE_W<IER_SPEC, 5> { TAMP6IE_W::new(self) } #[doc = "Bit 6 - TAMP7IE"] #[inline(always)] #[must_use] pub fn tamp7ie(&mut self) -> TAMP7IE_W<IER_SPEC, 6> { TAMP7IE_W::new(self) } #[doc = "Bit 7 - TAMP8IE"] #[inline(always)] #[must_use] pub fn tamp8ie(&mut self) -> TAMP8IE_W<IER_SPEC, 7> { TAMP8IE_W::new(self) } #[doc = "Bit 16 - ITAMP1IE"] #[inline(always)] #[must_use] pub fn itamp1ie(&mut self) -> ITAMP1IE_W<IER_SPEC, 16> { ITAMP1IE_W::new(self) } #[doc = "Bit 17 - ITAMP2IE"] #[inline(always)] #[must_use] pub fn itamp2ie(&mut self) -> ITAMP2IE_W<IER_SPEC, 17> { ITAMP2IE_W::new(self) } #[doc = "Bit 18 - ITAMP3IE"] #[inline(always)] #[must_use] pub fn itamp3ie(&mut self) -> ITAMP3IE_W<IER_SPEC, 18> { ITAMP3IE_W::new(self) } #[doc = "Bit 20 - ITAMP5IE"] #[inline(always)] #[must_use] pub fn itamp5ie(&mut self) -> ITAMP5IE_W<IER_SPEC, 20> { ITAMP5IE_W::new(self) } #[doc = "Bit 23 - ITAMP8IE"] #[inline(always)] #[must_use] pub fn itamp8ie(&mut self) -> ITAMP8IE_W<IER_SPEC, 23> { ITAMP8IE_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "TAMP interrupt enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ier::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ier::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct IER_SPEC; impl crate::RegisterSpec for IER_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ier::R`](R) reader structure"] impl crate::Readable for IER_SPEC {} #[doc = "`write(|w| ..)` method takes [`ier::W`](W) writer structure"] impl crate::Writable for IER_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets IER to value 0"] impl crate::Resettable for IER_SPEC { const RESET_VALUE: Self::Ux = 0; }
use std::str; #[allow(unused_imports)] use nom::*; use ast::Ast; use datatype::Datatype; named!(string<String>, do_parse!( str: map_res!( delimited!( tag!("\""), take_until!("\""), tag!("\"") ), str::from_utf8 ) >> (str.to_string()) ) ); named!(pub string_literal<Ast>, do_parse!( str: ws!(string) >> (Ast::Literal (Datatype::String(str))) ) ); #[test] fn parse_string_test() { let input_string = "\"Hello World\""; let (_, value) = match string(input_string.as_bytes()) { IResult::Done(r, v) => (r, v), IResult::Error(e) => panic!("{:?}", e), _ => panic!(), }; assert_eq!("Hello World".to_string(), value) } #[test] fn parse_string_literal_test() { let input_string = " \"Hello World\" "; let (_, value) = match string_literal(input_string.as_bytes()) { IResult::Done(r, v) => (r, v), IResult::Error(e) => panic!("{:?}", e), _ => panic!(), }; assert_eq!(Ast::Literal ( Datatype::String("Hello World".to_string())), value) } #[test] fn parse_string_literal_with_escape_sequences() { let input_string = "\"\n \n\""; let (_, value) = match string_literal(input_string.as_bytes()) { IResult::Done(r, v) => (r, v), IResult::Error(e) => panic!("{:?}", e), _ => panic!(), }; assert_eq!(Ast::Literal(Datatype::String("\n \n".to_string())), value) } #[test] fn parse_string_literal_with_escape_sequences_2() { let input_string = "\"\n\""; let (_, value) = match string_literal(input_string.as_bytes()) { IResult::Done(r, v) => (r, v), IResult::Error(e) => panic!("{:?}", e), _ => panic!(), }; assert_eq!(Ast::Literal(Datatype::String("\n".to_string())), value) }
use xor::xor; fn main() { let original_string = "1c0111001f010100061a024b53535009181c"; let xor_string = "686974207468652062756c6c277320657965"; // let actual = "746865206b696420646f6e277420706c6179"; xor(original_string, xor_string); }
use crate::lib::environment::Environment; use crate::lib::error::DfxResult; use crate::lib::identity::identity_manager::IdentityManager; use clap::Clap; use slog::info; /// Removes an existing identity. #[derive(Clap)] pub struct RemoveOpts { /// The identity to remove. identity: String, } pub fn exec(env: &dyn Environment, opts: RemoveOpts) -> DfxResult { let name = opts.identity.as_str(); let log = env.get_logger(); info!(log, r#"Removing identity "{}"."#, name); IdentityManager::new(env)?.remove(name)?; info!(log, r#"Removed identity "{}"."#, name); Ok(()) }
#![crate_type= "lib"] #![feature(custom_derive, custom_attribute, plugin)] #![plugin(serde_macros)] #![cfg_attr(feature = "lints", plugin(clippy))] #![cfg_attr(feature = "lints", allow(explicit_iter_loop))] #![cfg_attr(feature = "lints", allow(should_implement_trait))] #![cfg_attr(feature = "lints", deny(warnings))] #![deny(trivial_casts, trivial_numeric_casts, unsafe_code, unused_import_braces)] extern crate serde; extern crate serde_json; extern crate hyper; extern crate regex; mod domanager; #[macro_use] mod macros; pub mod request; pub mod response; pub use domanager::DoManager; pub use request::RequestBuilder; pub use request::DoRequest; #[cfg(test)] mod tests {}
use coi::{Container, Inject}; use rocket::{ http::Status, outcome::IntoOutcome as _, request::{FromRequest, Outcome}, Request, State, }; use std::{marker::PhantomData, sync::Arc}; pub use coi_rocket_derive::inject; #[doc(hidden)] pub trait ContainerKey<T> where T: Inject + ?Sized, { const KEY: &'static str; } #[doc(hidden)] pub struct Injected<T, K>(pub T, pub PhantomData<K>); impl<T, K> Injected<T, K> { #[doc(hidden)] pub fn new(injected: T) -> Self { Self(injected, PhantomData) } } struct ScopedContainer(Container); #[derive(Debug)] pub enum Error { Coi(coi::Error), MissingContainer, } // For every request that needs a container, create a scoped container that lives // for the duration of that request. impl<'a, 'r> FromRequest<'a, 'r> for &'a ScopedContainer { type Error = Error; fn from_request(req: &'a Request<'r>) -> Outcome<&'a ScopedContainer, Error> { req.local_cache(|| { let container = req.guard::<State<Container>>().succeeded()?; Some(ScopedContainer(container.scoped())) }) .as_ref() .into_outcome((Status::InternalServerError, Error::MissingContainer)) } } // For every injected param, just us the local cached scoped container impl<'a, 'r, T, K> FromRequest<'a, 'r> for Injected<Arc<T>, K> where T: Inject + ?Sized, K: ContainerKey<T>, { type Error = Error; fn from_request(req: &'a Request<'r>) -> Outcome<Injected<Arc<T>, K>, Error> { let container = match req.guard::<&ScopedContainer>() { Outcome::Success(container) => container, Outcome::Failure(f) => return Outcome::Failure(f), Outcome::Forward(f) => return Outcome::Forward(f), }; container .0 .resolve::<T>(<K as ContainerKey<T>>::KEY) .map(Injected::new) .map_err(Error::Coi) .into_outcome(Status::InternalServerError) } }
use bio::io::fasta; use std::collections::HashMap; use std::io; use crate::read_correction::CorrectionResults; /// This is a wrapper for the bio::io::fasta::Writer that forces reads to be written in a specified order. /// Reads that are not ready to be written are stored until the reads before it are written. /// # Examples /// ```rust /// use std::fs; /// use tempfile::{Builder, NamedTempFile}; /// use fmlrc::ordered_fasta_writer::OrderedFastaWriter; /// use fmlrc::read_correction::CorrectionResults; /// /// // File setup /// let file: NamedTempFile = Builder::new().prefix("out_fasta_").suffix(".fa").tempfile().unwrap(); /// let temp_filename: String = file.path().to_str().unwrap().to_string(); /// { /// let mut fasta_writer = OrderedFastaWriter::new(&file); /// /// //write some corrections out of order /// let corr_result_a = CorrectionResults { /// read_index: 0, /// label: "a".to_string(), /// original_seq: "GCTA".to_string(), /// corrected_seq: "CCTA".to_string(), /// avg_before: 0.0, /// avg_after: 0.0 /// }; /// let corr_result_b = CorrectionResults { /// read_index: 1, /// label: "b".to_string(), /// original_seq: "ACGT".to_string(), /// corrected_seq: "ACCT".to_string(), /// avg_before: 0.0, /// avg_after: 0.0 /// }; /// fasta_writer.write_correction(corr_result_b).unwrap(); /// fasta_writer.write_correction(corr_result_a).unwrap(); /// //call flush manually if the file writer stays in scope /// //fasta_writer.flush().unwrap(); /// } /// //check that the results are in order /// let file_string = fs::read_to_string(temp_filename).unwrap(); /// let expected = ">a\nCCTA\n>b\nACCT\n"; /// assert_eq!(expected, file_string); /// ``` pub struct OrderedFastaWriter<W: io::Write> { /// the actual fasta writer writer: fasta::Writer<W>, /// contains results we aren't ready to write yet map_store: HashMap<u64, CorrectionResults>, /// the index for the next read to write current_index: u64 } impl <W: io::Write> OrderedFastaWriter<W> { /// Creates an `OrderedFastaWriter` that is wrapping a file buffer. /// # Arguments /// `writer` - a buffer implementing `std::io::Write` pub fn new(writer: W) -> Self { OrderedFastaWriter { writer: fasta::Writer::new(writer), map_store: HashMap::<u64, CorrectionResults>::new(), current_index: 0 } } /// Writes a correction to the file or buffers it if not ready to write. /// # Arguments /// `correction` - a read correction from fmlrc containing the read index, label, and corrected sequence pub fn write_correction(&mut self, correction: CorrectionResults) -> io::Result<()> { if correction.read_index < self.current_index { return Err(io::Error::new(io::ErrorKind::Other, "Read index is smaller than next expected index")); } match self.map_store.insert(correction.read_index, correction) { None => {}, Some(_) => { return Err(io::Error::new(io::ErrorKind::Other, "Read index was already present in the map_store")); } }; self.drain_map_store() } fn drain_map_store(&mut self) -> io::Result<()> { while !self.map_store.is_empty() { match self.map_store.remove(&self.current_index) { Some(correction) => { let record: fasta::Record = fasta::Record::with_attrs(&correction.label, None, correction.corrected_seq.as_bytes()); match self.writer.write_record(&record) { Ok(()) => {}, Err(e) => return Err(e) }; self.current_index += 1; }, None => { break; } }; } Ok(()) } /// Flushes the buffer, call before trying to read anything. /// # Arguments /// `correction` - a read correction from fmlrc containing the read index, label, and corrected sequence pub fn flush(&mut self) -> io::Result<()> { match self.drain_map_store() { Ok(()) => {}, Err(e) => return Err(e) }; self.writer.flush() } } #[cfg(test)] mod tests { use super::*; use std::fs; use tempfile::{Builder, NamedTempFile}; #[test] fn test_fasta_writing() { let file: NamedTempFile = Builder::new().prefix("out_fasta_").suffix(".fa").tempfile().unwrap(); let temp_filename: String = file.path().to_str().unwrap().to_string(); { let mut ofw = OrderedFastaWriter::new(&file); let correction_a = CorrectionResults { read_index: 0, label: "a".to_string(), original_seq: "GCTA".to_string(), corrected_seq: "CCTA".to_string(), avg_before: 0.0, avg_after: 0.0 }; let correction_b = CorrectionResults { read_index: 1, label: "b".to_string(), original_seq: "ACGT".to_string(), corrected_seq: "ACCT".to_string(), avg_before: 0.0, avg_after: 0.0 }; let correction_c = CorrectionResults { read_index: 2, label: "c".to_string(), original_seq: "ACGT".to_string(), corrected_seq: "GGTT".to_string(), avg_before: 0.0, avg_after: 0.0 }; //submit them out of order, b -> c -> a ofw.write_correction(correction_b).unwrap(); ofw.write_correction(correction_c).unwrap(); ofw.write_correction(correction_a).unwrap(); } let file_string = fs::read_to_string(temp_filename).unwrap(); let expected = ">a\nCCTA\n>b\nACCT\n>c\nGGTT\n"; assert_eq!(expected, file_string); } }
#![allow(dead_code)] use std::{ any::{Any, TypeId}, fmt, mem::ManuallyDrop, slice, sync::Arc, }; use crate::traits::*; use crate::value::*; use crate::VecCopy; pub trait Elem: Any + DropBytes {} impl<T> Elem for T where T: Any + DropBytes {} /// This container is a WIP, not to be used in production. #[derive(Hash)] pub struct VecDyn<V> { data: ManuallyDrop<VecCopy>, vtable: Arc<(DropFn, V)>, } impl<V> Drop for VecDyn<V> { fn drop(&mut self) { unsafe { for elem_bytes in self.data.byte_chunks_mut() { self.vtable.drop_fn().0(elem_bytes); } } } } impl<V: HasClone> Clone for VecDyn<V> { fn clone(&self) -> Self { let data_clone = |bytes: &[u8]| { let mut new_data = bytes.to_vec(); self.data .byte_chunks() .zip(new_data.chunks_exact_mut(self.data.element_size())) .for_each(|(src, dst)| unsafe { self.vtable.1.clone_from_fn()(dst, src) }); new_data }; VecDyn { data: ManuallyDrop::new(self.data.clone_with(data_clone)), vtable: Arc::clone(&self.vtable), } } } impl<V: HasPartialEq> PartialEq for VecDyn<V> { fn eq(&self, other: &Self) -> bool { self.iter() .zip(other.iter()) .all(|(this, that)| this == that) } } impl<V: HasDebug> fmt::Debug for VecDyn<V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } impl<V> VecDyn<V> { /// Retrieve the associated virtual function table. pub fn vtable(&self) -> &V { &self.vtable.1 } /// Construct an empty vector with a specific pointed-to element type. #[inline] pub fn with_type<T: Elem>() -> Self where V: VTable<T>, { VecDyn { // This is safe because we are handling the additional processing needed // by `Clone` types in this container. data: ManuallyDrop::new(unsafe { VecCopy::with_type_non_copy::<T>() }), vtable: Arc::new((DropFn(T::drop_bytes), V::build_vtable())), } } /// Construct a vector with the same type as the given vector without copying its data. #[inline] pub fn with_type_from(other: &VecDyn<V>) -> Self { VecDyn { data: ManuallyDrop::new(VecCopy::with_type_from(&other.data)), vtable: Arc::clone(&other.vtable), } } /// Construct an empty vector with a capacity for a given number of typed pointed-to elements. #[inline] pub fn with_capacity<T: Elem>(n: usize) -> Self where V: VTable<T>, { VecDyn { // This is safe because we are handling the additional processing needed // by `Clone` types in this container. data: ManuallyDrop::new(unsafe { VecCopy::with_capacity_non_copy::<T>(n) }), vtable: Arc::new((DropFn(T::drop_bytes), V::build_vtable())), } } /// Construct a `VecDyn` from a given `Vec` reusing the space already allocated by the given /// vector. pub fn from_vec<T: Elem>(vec: Vec<T>) -> Self where V: VTable<T>, { VecDyn { // This is safe because we are handling the additional processing needed // by `Clone` types in this container. data: ManuallyDrop::new(unsafe { VecCopy::from_vec_non_copy(vec) }), vtable: Arc::new((DropFn(T::drop_bytes), V::build_vtable())), } } /// Clear the data buffer without destroying its type information. #[inline] pub fn clear(&mut self) { // Drop all elements manually. unsafe { for bytes in self.data.byte_chunks_mut() { self.vtable.drop_fn().0(bytes); } } self.data.data.clear(); } /// Add an element to this buffer. /// /// If the type of the given element coincides with the type stored by this buffer, /// then the modified buffer is returned via a mutable reference. Otherwise, `None` is /// returned. #[inline] pub fn push<T: Elem>(&mut self, element: T) -> Option<&mut Self> { if let Some(_) = self.data.push(element) { Some(self) } else { None } } /// Check if the current buffer contains elements of the specified type. Returns `Some(self)` /// if the type matches and `None` otherwise. #[inline] pub fn check<T: Elem>(self) -> Option<Self> { if let Some(_) = self.data.check_ref::<T>() { Some(self) } else { None } } /// Check if the current buffer contains elements of the specified type. Returns `None` if the /// check fails, otherwise a reference to self is returned. #[inline] pub fn check_ref<T: Elem>(&self) -> Option<&Self> { self.data.check_ref::<T>().map(|_| self) } /// Check if the current buffer contains elements of the specified type. Same as `check_ref` /// but consumes and produces a mut reference to self. #[inline] pub fn check_mut<'a, T: Elem>(&'a mut self) -> Option<&'a mut Self> { if let Some(_) = self.data.check_mut::<T>() { Some(self) } else { None } } /* * Accessors */ /// Get the `TypeId` of data stored within this buffer. #[inline] pub fn element_type_id(&self) -> TypeId { self.data.element_type_id() } /// Get the number of elements stored in this buffer. #[inline] pub fn len(&self) -> usize { self.data.len() } /// Check if there are any elements stored in this buffer. #[inline] pub fn is_empty(&self) -> bool { self.data.is_empty() } /// Get the byte capacity of this buffer. #[inline] pub fn byte_capacity(&self) -> usize { self.data.byte_capacity() } /// Get the size of the element type in bytes. #[inline] pub fn element_size(&self) -> usize { self.data.element_size() } /// Return an iterator to a slice representing typed data. /// /// Returns `None` if the given type `T` doesn't match the internal. #[inline] pub fn iter_as<'a, T: Elem>(&'a self) -> Option<slice::Iter<T>> { self.data.iter::<T>() } /// Return an iterator to a mutable slice representing typed data. /// /// Returns `None` if the given type `T` doesn't match the internal. #[inline] pub fn iter_mut_as<'a, T: Elem>(&'a mut self) -> Option<slice::IterMut<T>> { self.data.iter_mut::<T>() } /// An alternative to using the `Into` trait. /// /// This function helps the compiler determine the type `T` automatically. #[inline] pub fn into_vec<T: Elem>(self) -> Option<Vec<T>> { // This is safe because self.data will not be used after this call, and the resulting // Vec<T> will drop all elements correctly. unsafe { // Inhibit the Drop for self. let mut no_drop = ManuallyDrop::new(self); // Extract the value from data and turn it into a `Vec` which will handle the drop // correctly. ManuallyDrop::take(&mut no_drop.data).into_vec() } } /// Convert this buffer into a typed slice. /// Returs `None` if the given type `T` doesn't match the internal. #[inline] pub fn as_slice<T: Elem>(&self) -> Option<&[T]> { self.data.as_slice() } /// Convert this buffer into a typed mutable slice. /// Returs `None` if the given type `T` doesn't match the internal. #[inline] pub fn as_mut_slice<T: Elem>(&mut self) -> Option<&mut [T]> { self.data.as_mut_slice() } /// Get a `const` reference to the `i`'th element of the buffer. #[inline] pub fn get_ref_as<T: Elem>(&self, i: usize) -> Option<&T> { self.data.get_ref::<T>(i) } /// Get a mutable reference to the `i`'th element of the buffer. #[inline] pub fn get_mut_as<T: Elem>(&mut self, i: usize) -> Option<&mut T> { self.data.get_mut::<T>(i) } /// Move bytes to this buffer. /// /// The given buffer must have the same underlying type as `self`. #[inline] pub fn append(&mut self, buf: &mut VecDyn<V>) -> Option<&mut Self> { // It is sufficient to move the bytes, no clones or drops are necessary here. if let Some(_) = self.data.append(&mut buf.data) { Some(self) } else { None } } /// Rotates the slice in-place such that the first `mid` elements of the slice move to the end /// while the last `self.len() - mid` elements move to the front. /// /// After calling `rotate_left`, the element previously at index `mid` will become the /// first element in the slice. #[inline] pub fn rotate_left(&mut self, mid: usize) { self.data.rotate_left(mid) } /// Rotates the slice in-place such that the first `self.len() - k` elements of the slice move /// to the end while the last `k` elements move to the front. /// /// After calling `rotate_right`, the element previously at index `k` will become the /// first element in the slice. #[inline] pub fn rotate_right(&mut self, k: usize) { self.data.rotate_right(k) } /* * Value API. This allows users to manipulate contained data without knowing the element type. */ /// Push a value onto this buffer. /// /// If the type of the given value coincides with the type stored by this buffer, /// then the modified buffer is returned via a mutable reference. Otherwise, `None` is /// returned. #[inline] pub fn push_value(&mut self, value: BoxValue<V>) -> Option<&mut Self> { if self.element_type_id() == value.value_type_id() { // Prevent the value from being dropped at the end of this scope since it will be later // dropped by this container. let value = ManuallyDrop::new(value); self.data.data.extend_from_slice(&value.bytes); Some(self) } else { None } } /// Push a clone of the referenced value to this buffer. /// /// If the type of the given value coincides with the type stored by this buffer, /// then the modified buffer is returned via a mutable reference. Otherwise, `None` is /// returned. /// /// This is more efficient than `push_value` since it avoids an extra allocation, however it /// requires the contained value to be `Clone`. #[inline] pub fn push_cloned(&mut self, value: ValueRef<V>) -> Option<&mut Self> where V: HasClone { if self.element_type_id() == value.value_type_id() { let orig_len = self.data.data.len(); self.data.data.resize(orig_len + value.bytes.len(), 0u8); // This does not leak because the copied bytes are guaranteed to be dropped. unsafe { self.vtable.1.clone_into_raw_fn()(value.bytes, &mut self.data.data[orig_len..]); } Some(self) } else { None } } /// Get a reference to a value stored in this container at index `i`. #[inline] pub fn get(&self, i: usize) -> ValueRef<V> { debug_assert!(i < self.len()); // This call is safe since our buffer guarantees that the given bytes have the // corresponding TypeId. unsafe { ValueRef::from_raw_parts(self.data.get_bytes(i), self.element_type_id(), &self.vtable) } } /// Return an iterator over untyped value references stored in this buffer. /// /// In contrast to `iter`, this function defers downcasting on a per element basis. /// As a result, this type of iteration is typically less efficient if a typed value is /// needed for each element. #[inline] pub fn iter<'a>(&'a self) -> impl Iterator<Item = ValueRef<'a, V>> + 'a { let &Self { ref data, ref vtable, } = self; let VecCopy { data, element_size, element_type_id, } = &**data; data.chunks_exact(*element_size) .map(move |bytes| unsafe { ValueRef::from_raw_parts(bytes, *element_type_id, vtable) }) } /// Get a mutable reference to a value stored in this container at index `i`. #[inline] pub fn get_mut<'a>(&'a mut self, i: usize) -> ValueMut<'a, V> { debug_assert!(i < self.len()); let Self { data, vtable } = self; let type_id = data.element_type_id(); // Safety is guaranteed here by the value API. unsafe { ValueMut::from_raw_parts(data.get_bytes_mut(i), type_id, vtable) } } /// Return an iterator over mutable untyped value references stored in this buffer. /// /// In contrast to `iter_mut`, this function defers downcasting on a per element basis. As a /// result, this type of iteration is typically less efficient if a typed value is needed /// for each element. #[inline] pub fn iter_mut<'a>(&'a mut self) -> impl Iterator<Item = ValueMut<'a, V>> + 'a { let &mut Self { ref mut data, ref vtable, } = self; let VecCopy { data, element_size, element_type_id, } = &mut **data; data.chunks_exact_mut(*element_size) .map(move |bytes| unsafe { ValueMut::from_raw_parts(bytes, *element_type_id, vtable) }) } } // Additional functionality of VecDyns that implement Clone. impl<V: HasClone> VecDyn<V> { /// Construct a typed `DataBuffer` with a given size and filled with the specified default /// value. #[inline] pub fn with_size<T: Elem + Clone>(n: usize, def: T) -> Self where V: VTable<T>, { VecDyn { // This is safe because we are handling the additional processing needed // by `Clone` types in this container. data: ManuallyDrop::new(unsafe { VecCopy::from_vec_non_copy(vec![def; n]) }), vtable: Arc::new((DropFn(T::drop_bytes), V::build_vtable())), } } /// Construct a buffer from a given slice by cloning the data. #[inline] pub fn from_slice<T: Elem + Clone>(slice: &[T]) -> Self where V: VTable<T>, { VecDyn { // This is safe because we are handling the additional processing needed // by `Clone` types in this container. data: ManuallyDrop::new(unsafe { VecCopy::from_slice_non_copy::<T>(slice) }), vtable: Arc::new((DropFn(T::drop_bytes), V::build_vtable())), } } /// Resizes the buffer in-place to store `new_len` elements and returns an optional /// mutable reference to `Self`. /// /// If `value` does not correspond to the underlying element type, then `None` is returned and the /// buffer is left unchanged. /// /// This function has the similar properties to `Vec::resize`. #[inline] pub fn resize<T: Elem + Clone>(&mut self, new_len: usize, value: T) -> Option<&mut Self> { self.check_ref::<T>()?; let size_t = std::mem::size_of::<T>(); if new_len >= self.len() { let diff = new_len - self.len(); self.data.reserve_bytes(diff * size_t); for _ in 0..diff { self.data.push(value.clone()); } } else { // Drop trailing elements manually. unsafe { for bytes in self.data.byte_chunks_mut().skip(new_len) { self.vtable.drop_fn().0(bytes); } } // Truncate data self.data.data.resize(new_len * size_t, 0); } Some(self) } /// Fill the current buffer with clones of the given value. /// /// The size of the buffer is left unchanged. If the given type doesn't match the /// internal type, `None` is returned, otherwise a mutable reference to the modified buffer is /// returned. #[inline] pub fn fill<T: Elem + Clone>(&mut self, def: T) -> Option<&mut Self> { for v in self.iter_mut_as::<T>()? { *v = def.clone(); } Some(self) } /// Append cloned items from this buffer to a given `Vec`. /// /// Return the mutable reference `Some(vec)` if type matched the internal type and /// `None` otherwise. #[inline] pub fn append_cloned_to_vec<'a, T: Elem + Clone>( &self, vec: &'a mut Vec<T>, ) -> Option<&'a mut Vec<T>> { let slice = self.as_slice()?; // Only allocate once we have confirmed that the given `T` matches to avoid unnecessary // overhead. vec.reserve(self.len()); vec.extend_from_slice(slice); Some(vec) } /// Clones contents of `self` into the given `Vec`. #[inline] pub fn clone_into_vec<T: Elem + Clone>(&self) -> Option<Vec<T>> { let mut vec = Vec::new(); match self.append_cloned_to_vec(&mut vec) { Some(_) => Some(vec), None => None, } } } /// Convert a `Vec` to a buffer. impl<T: Elem, V: VTable<T>> From<Vec<T>> for VecDyn<V> { #[inline] fn from(vec: Vec<T>) -> VecDyn<V> { VecDyn::from_vec(vec) } } /// Convert a slice to a `VecDyn`. impl<'a, T, V> From<&'a [T]> for VecDyn<V> where T: Elem + Clone, V: VTable<T> + HasClone, { #[inline] fn from(slice: &'a [T]) -> VecDyn<V> { VecDyn::from_slice(slice) } } /// Convert a buffer to a `Vec` with an option to fail. impl<T: Elem, V: VTable<T>> Into<Option<Vec<T>>> for VecDyn<V> { #[inline] fn into(self) -> Option<Vec<T>> { self.into_vec() } } #[cfg(test)] mod tests { use super::*; use dyn_derive::dyn_trait; use rand::prelude::*; use std::mem::size_of; use std::rc::Rc; #[dyn_trait(suffix = "VTable", dyn_crate_name = "crate")] pub trait AllTrait: Clone + PartialEq + Eq + std::hash::Hash + std::fmt::Debug {} impl<T> AllTrait for T where T: Clone + PartialEq + Eq + std::hash::Hash + std::fmt::Debug {} type VecDynAll = VecDyn<AllTraitVTable>; #[inline] fn compute(x: i64, y: i64, z: i64) -> [i64; 3] { [x - 2 * y + z * 2, y - 2 * z + x * 2, z - 2 * x + y * 2] } #[inline] fn make_random_vec_dyn(n: usize) -> VecDynAll { let mut rng: StdRng = SeedableRng::from_seed([3; 32]); let between = rand::distributions::Uniform::from(1i64..5); let vec: Vec<_> = (0..n).map(move |_| [between.sample(&mut rng); 3]).collect(); vec.into() } #[inline] fn vec_dyn_compute<V>(v: &mut VecDyn<V>) { for a in v.iter_mut() { let a = a.downcast::<[i64; 3]>().unwrap(); let res = compute(a[0], a[1], a[2]); a[0] = res[0]; a[1] = res[1]; a[2] = res[2]; } } #[test] fn downcast_value_mut() { let mut v: VecDynAll = make_random_vec_dyn(9_000); vec_dyn_compute(&mut v); } #[test] fn clone_from_test() { use std::collections::HashSet; use std::rc::Rc; // Let's create a collection of `Rc`s. let vec_rc: Vec<_> = vec![1, 23, 2, 42, 23, 1, 13534653] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec_rc.clone()); // Clone into VecDyn // Construct a hashset of unique values from the VecDyn. let mut hashset: HashSet<BoxValue<AllTraitVTable>> = HashSet::new(); for rc_ref in buf.iter().take(4) { assert!(hashset.insert(rc_ref.clone_value())); } assert!(!hashset.insert(Value::new(Rc::clone(&vec_rc[4])))); assert!(!hashset.insert(Value::new(Rc::clone(&vec_rc[5])))); assert_eq!(hashset.len(), 4); assert!(hashset.contains(&Value::new(Rc::new(1)))); assert!(hashset.contains(&Value::new(Rc::new(23)))); assert!(hashset.contains(&Value::new(Rc::new(2)))); assert!(hashset.contains(&Value::new(Rc::new(42)))); assert!(!hashset.contains(&Value::new(Rc::new(13534653)))); } #[test] fn iter() { use std::rc::Rc; let vec: Vec<_> = vec![1, 23, 2, 42, 11].into_iter().map(Rc::new).collect(); { let buf = VecDynAll::from(vec.clone()); // Convert into buffer let orig = Rc::new(100); let mut rc = Rc::clone(&orig); assert_eq!(Rc::strong_count(&rc), 2); for val in buf.iter() { ValueMut::new(&mut rc).clone_from(val); } assert_eq!(Rc::strong_count(&orig), 1); assert_eq!(Rc::strong_count(&rc), 3); assert_eq!(Rc::strong_count(&vec[4]), 3); assert!(vec.iter().take(4).all(|x| Rc::strong_count(x) == 2)); assert_eq!(rc, Rc::new(11)); } assert!(vec.iter().all(|x| Rc::strong_count(x) == 1)); } /// Test various ways to create a `VecDyn`. #[test] fn initialization_test() { // Empty typed buffer. let a = VecDynAll::with_type::<Rc<u8>>(); assert_eq!(a.len(), 0); assert_eq!(a.element_type_id(), TypeId::of::<Rc<u8>>()); assert_eq!(a.byte_capacity(), 0); // Ensure nothing is allocated. // Empty buffer typed by the given type id. let b = VecDynAll::with_type_from(&a); assert_eq!(b.len(), 0); assert_eq!(b.element_type_id(), TypeId::of::<Rc<u8>>()); assert_eq!(a.byte_capacity(), 0); // Ensure nothing is allocated. // Empty typed buffer with a given capacity. let a = VecDynAll::with_capacity::<Rc<u8>>(4); assert_eq!(a.len(), 0); assert_eq!(a.byte_capacity(), 4 * size_of::<Rc<u8>>()); assert_eq!(a.element_type_id(), TypeId::of::<Rc<u8>>()); } /// Test resizing a buffer. #[test] fn resize() { let mut a = VecDynAll::with_type::<Rc<u8>>(); // Increase the size of a. a.resize(3, Rc::new(1u8)) .expect("Failed to resize VecDyn up by 3 elements"); assert_eq!(a.len(), 3); for i in 0..3 { assert_eq!(a.get_ref_as::<Rc<u8>>(i).unwrap(), &Rc::new(1)); } // Truncate a. a.resize(2, Rc::new(1u8)) .expect("Failed to resize VecDyn down to 2 elements"); assert_eq!(a.len(), 2); for i in 0..2 { assert_eq!(a.get_ref_as::<Rc<u8>>(i).unwrap(), &Rc::new(1)); } } #[test] fn data_integrity_u8_test() { let vec: Vec<Rc<u8>> = vec![1u8, 3, 4, 1, 2].into_iter().map(Rc::new).collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<u8>> = buf.clone_into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); let vec: Vec<Rc<u8>> = vec![1u8, 3, 4, 1, 2, 52, 1, 3, 41, 23, 2] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<u8>> = buf.clone_into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); } #[test] fn data_integrity_i16_test() { let vec: Vec<Rc<i16>> = vec![1i16, -3, 1002, -231, 32] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<i16>> = buf.clone_into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); let vec: Vec<Rc<i16>> = vec![1i16, -3, 1002, -231, 32, 42, -123, 4] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<i16>> = buf.clone_into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); } #[test] fn data_integrity_i32_test() { let vec: Vec<Rc<i32>> = vec![1i32, -3, 1002, -231, 32] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<i32>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); let vec: Vec<Rc<i32>> = vec![1i32, -3, 1002, -231, 32, 42, -123] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<i32>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); } #[derive(Clone, Debug, PartialEq, Eq, Hash)] struct Foo { a: u8, b: i64, } #[test] fn from_empty_vec_test() { let vec: Vec<Rc<u32>> = Vec::new(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<u32>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); let vec: Vec<Rc<String>> = Vec::new(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<String>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); let vec: Vec<Rc<Foo>> = Vec::new(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<Foo>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); } #[test] fn from_struct_test() { let f1 = Foo { a: 3, b: -32 }; let f2 = Foo { a: 33, b: -3342432412, }; let vec: Vec<Rc<Foo>> = vec![Rc::new(f1.clone()), Rc::new(f2.clone())]; let buf = VecDynAll::from(vec.clone()); // Convert into buffer assert_eq!(Rc::new(f1), buf.get_ref_as::<Rc<Foo>>(0).unwrap().clone()); assert_eq!(Rc::new(f2), buf.get_ref_as::<Rc<Foo>>(1).unwrap().clone()); let nu_vec: Vec<Rc<Foo>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); } #[test] fn from_strings_test() { let vec: Vec<Rc<String>> = vec![ String::from("hi"), String::from("hello"), String::from("goodbye"), String::from("bye"), String::from("supercalifragilisticexpialidocious"), String::from("42"), ] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer assert_eq!( &Rc::new("hi".to_string()), buf.get_ref_as::<Rc<String>>(0).unwrap() ); assert_eq!( &Rc::new("hello".to_string()), buf.get_ref_as::<Rc<String>>(1).unwrap() ); assert_eq!( &Rc::new("goodbye".to_string()), buf.get_ref_as::<Rc<String>>(2).unwrap() ); let nu_vec: Vec<Rc<String>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); } #[test] fn iter_test() { let vec_u8: Vec<Rc<u8>> = vec![1u8, 3, 4, 1, 2, 4, 128, 32] .into_iter() .map(Rc::new) .collect(); let buf = VecDynAll::from(vec_u8.clone()); // Convert into buffer for (i, val) in buf.iter_as::<Rc<u8>>().unwrap().enumerate() { assert_eq!(val, &vec_u8[i]); } } #[test] fn large_sizes_clone() { for i in 100000..100010 { let vec: Vec<Rc<u8>> = vec![32u8; i].into_iter().map(Rc::new).collect(); let buf = VecDynAll::from(vec.clone()); // Convert into buffer let nu_vec: Vec<Rc<u8>> = buf.into_vec().unwrap(); // Convert back into vec assert_eq!(vec, nu_vec); } } /// This test checks that an error is returned whenever the user tries to access data with the /// wrong type data. #[test] fn wrong_type_test() { let vec: Vec<Rc<u8>> = vec![1, 23, 2, 42, 11].into_iter().map(Rc::new).collect(); let mut buf = VecDynAll::from(vec.clone()); // Convert into buffer assert_eq!(vec, buf.clone_into_vec::<Rc<u8>>().unwrap()); assert!(buf.clone_into_vec::<Rc<f64>>().is_none()); assert!(buf.as_slice::<Rc<f64>>().is_none()); assert!(buf.iter_as::<Rc<[u8; 3]>>().is_none()); assert!(buf.get_ref_as::<Rc<i32>>(1).is_none()); assert!(buf.get_mut_as::<Rc<i32>>(2).is_none()); } /// Test pushing values and bytes to a buffer. #[test] fn push_test() { let mut vec_u8: Vec<Rc<u8>> = vec![1u8, 23, 2].into_iter().map(Rc::new).collect(); let mut buf = VecDynAll::from(vec_u8.clone()); // Convert into buffer for (i, val) in buf.iter_as::<Rc<u8>>().unwrap().enumerate() { assert_eq!(val, &vec_u8[i]); } vec_u8.push(Rc::new(42u8)); buf.push(Rc::new(42u8)).unwrap(); // must provide explicit type for (i, val) in buf.iter_as::<Rc<u8>>().unwrap().enumerate() { assert_eq!(val, &vec_u8[i]); } vec_u8.push(Rc::new(11u8)); buf.push(Rc::new(11u8)).unwrap(); for (i, val) in buf.iter_as::<Rc<u8>>().unwrap().enumerate() { assert_eq!(val, &vec_u8[i]); } } /// Test appending to a buffer from another buffer. #[test] fn append_test() { let mut buf = VecDynAll::with_type::<Rc<u8>>(); // Create an empty buffer. let data: Vec<Rc<u8>> = vec![1, 23, 2, 42, 11].into_iter().map(Rc::new).collect(); // Append an ordianry vector of data. let mut other_buf = VecDynAll::from_vec(data.clone()); buf.append(&mut other_buf); assert!(other_buf.is_empty()); for (i, val) in buf.iter_as::<Rc<u8>>().unwrap().enumerate() { assert_eq!(val, &data[i]); } } }
/* Copyright 2019-2023 Didier Plaindoux Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use std::iter::Iterator; use std::marker::PhantomData; use crate::stream::end_line::EndLine; use crate::stream::position::Position; use crate::stream::stream::Len; use crate::stream::stream::Stream; #[derive(Clone)] pub struct IteratorStream<E, I, P>(I, P, PhantomData<E>) where I: Iterator<Item = E>, E: EndLine, P: Position; impl<'a, E, I> IteratorStream<E, I, (usize, usize, usize)> where I: Iterator<Item = E>, E: EndLine, { pub fn new(s: I) -> Self { IteratorStream(s, <(usize, usize, usize)>::new(), PhantomData) } } impl<'a, E, I, P> IteratorStream<E, I, P> where I: Iterator<Item = E>, E: EndLine, P: Position, { pub fn new_with_position(s: I, p: P) -> Self { IteratorStream(s, p, PhantomData) } } impl<E, I, P> Stream for IteratorStream<E, I, P> where I: Iterator<Item = E> + Clone, E: EndLine + Clone, P: Position + Clone, { type Item = E; type Pos = P; fn position(&self) -> Self::Pos { self.1.clone() } fn next(&self) -> (Option<Self::Item>, Self) { let mut this = self.clone(); // Mutability required by the Iterator::next call (below) let option = this.0.next(); if option.is_some() { ( option.clone(), IteratorStream( this.0, this.1.step(option.unwrap().is_end_line()), PhantomData, ), ) } else { (option, IteratorStream(this.0, this.1, PhantomData)) } } } impl<E, I, P> Len for IteratorStream<E, I, P> where I: Iterator<Item = E> + Clone, E: EndLine, P: Position, { fn len(&self) -> usize { self.0.clone().count() } }
pub struct DnsMessageHeader { id: u16, query: bool, opcode: Opcode, aa: bool, tc: bool, rd: bool, ra: bool, rcode: DnsResponseCode qdcount: u16, ancount: u16, nscount: u16, arcount: u16, } struct DnsMessageQuestion { qname: Vec<Vec<u8>>, qtype: DnsQueryType, qclass: DnsQueryClass, } struct DnsMessageQuery { header: DnsMessageHeader, body: Vec<DnsMessageQueryBody> }
use anyhow::{Context, Result}; use chrono::prelude::*; use std::fs::File; use std::io::BufRead; mod gpx; fn usage() -> ! { eprintln!("usage: {} file.slopes", std::env::args().next().unwrap()); eprintln!("converts a Slopes export file into GPX on stdout"); std::process::exit(1); } fn main() -> Result<()> { let path = match std::env::args().nth(1) { None => usage(), Some(path) => { if path == "--help" || path == "-h" { usage(); } path } }; let file = File::open(path).context("failed to open file")?; let mut z = zip::ZipArchive::new(file).context("failed to read zip file")?; // TODO: Metadata.xml let gps_file = z.by_name("GPS.csv").context("failed to get GPS.csv from archive")?; let mut points = vec![]; for line in std::io::BufReader::new(gps_file).lines() { let line = line.context("read error")?; let mut fields = line.split(','); macro_rules! parse { ($name:expr) => { fields.next() .ok_or_else(|| anyhow::anyhow!(concat!("missing field ", $name)))? .parse::<f64>() .context(concat!("invalid ", $name))? } } let utc_seconds = parse!("timestamp"); let lat = parse!("latitude"); let lon = parse!("longitude"); let ele = parse!("elevation"); let course = parse!("course"); let speed = parse!("speed"); let _dunno1 = fields.next(); // horizontal accuracy in meters? let _dunno2 = fields.next(); // vertical accuracy in meters? points.push(gpx::Point { time: FixedOffset::west(0).timestamp( utc_seconds.floor() as i64, (utc_seconds.fract() / 1e-9) as u32), lat, lon, ele, course, speed, }); } gpx::write_gpx( std::io::stdout(), &[&points[..]])?; Ok(()) }
use crate::validate; use std::io; /// This [`Error`] represents anything that can go wrong with this library #[derive(Debug, thiserror::Error)] pub enum Error { /// Metadata validation error #[error("Validation Error")] Validation(#[from] validate::Error), /// filesystem IO error #[error("IO Error")] Io(#[from] io::Error), /// libgit2 error #[error("Git Error")] Git(#[from] git2::Error), } /// The result type for fallible functions in this library pub type Result<T> = std::result::Result<T, Error>;
use chrono::Utc; use criterion::{black_box, criterion_group, criterion_main, Criterion}; use drogue_cloud_database_common::models::{app::Application, diff::diff_paths}; use serde_json::json; fn criterion_benchmark(c: &mut Criterion) { let now = Utc::now(); let data = json!({ "spec": { "core": { "disabled": false, }, "credentials": { "credentials": [ { "pass": "password"}, { "username": {"username": "foo", "password": "pwd" }} ], }, }, "status": { "trustAnchors": [ { "anchor": { "certificate": "", }} ] }, }); c.bench_function("all equal", |b| { b.iter(|| { black_box(diff_paths( &black_box(Application { id: "id1".to_string(), labels: Default::default(), annotations: Default::default(), creation_timestamp: now, resource_version: "12345678".to_string(), generation: 0, data: data.clone(), }), &black_box(Application { id: "id1".to_string(), labels: Default::default(), annotations: Default::default(), creation_timestamp: now, resource_version: "12345678".to_string(), generation: 0, data: data.clone(), }), )); }) }); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches);
//! Contains all elements except [`greater_elements`]. use super::*; mod babel_call; mod clock; mod comment; mod comment_block; mod diary_sexp; mod example_block; mod export_block; mod fixed_width; mod horizontal_rule; mod keyword; mod latex_environment; mod node_property; mod paragraph; mod planning; mod src_block; pub use self::babel_call::BabelCall; pub use self::clock::{Clock, ClockStatus}; pub use self::comment::Comment; pub use self::comment_block::CommentBlock; pub use self::diary_sexp::DiarySexp; pub use self::example_block::ExampleBlock; pub use self::export_block::ExportBlock; pub use self::fixed_width::FixedWidth; pub use self::horizontal_rule::HorizontalRule; pub use self::keyword::{Keyword, KeywordValueSetOfObjects}; pub use self::latex_environment::LatexEnvironment; pub use self::node_property::NodeProperty; pub use self::paragraph::Paragraph; pub use self::planning::Planning; pub use self::src_block::SrcBlock; /// Contains the flags of an [`ExampleBlock`] or [`SrcBlock`]. /// /// Can contain the following flags: /// /// - `+n AMOUNT`: continued number lines, will continue the numbering of the previos numbered /// snippet. `AMOUNT` will be added to the last line of the previod block to determine the /// number of the first line. /// - `-n AMOUNT`: new number lines (`AMOUNT` is the start line number of the block) /// - `-i`: preserve indent /// - `-r`: removes the labels when exporting. References will use line numbers. /// - `-k`: don't use labels /// - `-l "FMT"`: label format (if the default format conflicts with the language you are /// using) /// /// `AMOUNT` is an optional positive number. /// /// `FMT` can contain everything except `"` and newlines. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct BlockFlags { pub number_lines: Option<NumberLinesFlag>, /// Default: false pub preserve_indent: bool, /// Default: true /// /// If true, code-references should use labels instead of line numbers. pub retain_labels: bool, pub label_fmt: Option<String>, } /// Flag of [`BlockFlags`] that defines if line numbering is continued or start fresh (and /// optionally from where) #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum NumberLinesFlag { Continued(Option<u64>), New(Option<u64>), }
fn md5(input: &[u8]) -> [u8; 16] { #![allow(non_snake_case)] let s: [u32; 64] = [ 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, ]; let K: [u32; 64] = [ 0xd76aa478, 0xe8c7b756, 0x242070db, 0xc1bdceee, 0xf57c0faf, 0x4787c62a, 0xa8304613, 0xfd469501, 0x698098d8, 0x8b44f7af, 0xffff5bb1, 0x895cd7be, 0x6b901122, 0xfd987193, 0xa679438e, 0x49b40821, 0xf61e2562, 0xc040b340, 0x265e5a51, 0xe9b6c7aa, 0xd62f105d, 0x02441453, 0xd8a1e681, 0xe7d3fbc8, 0x21e1cde6, 0xc33707d6, 0xf4d50d87, 0x455a14ed, 0xa9e3e905, 0xfcefa3f8, 0x676f02d9, 0x8d2a4c8a, 0xfffa3942, 0x8771f681, 0x6d9d6122, 0xfde5380c, 0xa4beea44, 0x4bdecfa9, 0xf6bb4b60, 0xbebfbc70, 0x289b7ec6, 0xeaa127fa, 0xd4ef3085, 0x04881d05, 0xd9d4d039, 0xe6db99e5, 0x1fa27cf8, 0xc4ac5665, 0xf4292244, 0x432aff97, 0xab9423a7, 0xfc93a039, 0x655b59c3, 0x8f0ccc92, 0xffeff47d, 0x85845dd1, 0x6fa87e4f, 0xfe2ce6e0, 0xa3014314, 0x4e0811a1, 0xf7537e82, 0xbd3af235, 0x2ad7d2bb, 0xeb86d391, ]; let mut a0: u32 = 0x67452301; let mut b0: u32 = 0xefcdab89; let mut c0: u32 = 0x98badcfe; let mut d0: u32 = 0x10325476; let orig_len = input.len(); let mut input = input.to_vec(); input.push(0x80); while input.len() % 64 != 56 { input.push(0x00); } let orig_len_bits = ((orig_len as u128 * 8) % 2u128.pow(64)) as u64; input.extend(orig_len_bits.to_le_bytes()); for chunk in input.chunks(512 / 8) { let mut M: [u32; 16] = [0; 16]; for (i, word) in chunk.chunks(32 / 8).enumerate() { let word = u32::from_le_bytes([word[0], word[1], word[2], word[3]]); M[i] = word; } let mut A = a0; let mut B = b0; let mut C = c0; let mut D = d0; for i in 0..=63 { let mut F; let g; match i { 0..=15 => { F = (B & C) | ((!B) & D); g = i; } 16..=31 => { F = (D & B) | ((!D) & C); g = (5usize.wrapping_mul(i).wrapping_add(1)) % 16; } 32..=47 => { F = B ^ C ^ D; g = (3usize.wrapping_mul(i).wrapping_add(5)) % 16; } 48..=63 => { F = C ^ (B | !D); g = (7usize.wrapping_mul(i)) % 16; } _ => unreachable!(), } F = F.wrapping_add(A).wrapping_add(K[i]).wrapping_add(M[g]); A = D; D = C; C = B; B = B.wrapping_add(F.rotate_left(s[i])); } a0 = a0.wrapping_add(A); b0 = b0.wrapping_add(B); c0 = c0.wrapping_add(C); d0 = d0.wrapping_add(D); } let a0 = a0.to_le_bytes(); let b0 = b0.to_le_bytes(); let c0 = c0.to_le_bytes(); let d0 = d0.to_le_bytes(); let mut digest: [u8; 16] = [0; 16]; for i in 0..16 { match i { 0..=3 => digest[i] = a0[i % 4], 4..=7 => digest[i] = b0[i % 4], 8..=11 => digest[i] = c0[i % 4], 12..=15 => digest[i] = d0[i % 4], _ => unreachable!(), } } digest } fn md5_hex(input: &[u8]) -> String { md5(input) .into_iter() .map(|byte| format!("{:02x}", byte)) .collect::<String>() //.into_bytes() } fn mine(key: &str, zeroes: usize) -> u32 { let zeroes = "0".repeat(zeroes); for n in 1.. { let bytes = format!("{}{}", key, n).into_bytes(); let hash = md5_hex(&bytes); if hash.starts_with(&zeroes) { return n; } } unreachable!() } fn main() { let input = "ckczppom"; part1(&input); part2(&input); } fn part1(input: &str) { println!("{}", mine(input, 5)); } fn part2(input: &str) { println!("{}", mine(input, 6)); }
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::context::Context; use diem_api_types::views; use anyhow::{Error, Result}; use warp::{reject, Rejection, Reply}; pub async fn index(context: Context) -> Result<impl Reply, Rejection> { let ledger_info = context.get_latest_ledger_info().map_err(internal_error)?; let chain_id = context.chain_id().id(); let ledger_version = ledger_info.ledger_info().version(); let ledger_timestamp = ledger_info.ledger_info().timestamp_usecs(); let info = views::LedgerInfo { chain_id, ledger_version, ledger_timestamp, }; Ok(warp::reply::json(&info)) } fn internal_error(err: Error) -> Rejection { reject::custom(views::InternalError::from(err)) }
// Copyright (c) 2017 King's College London // created by the Software Development Team <http://soft-dev.org/> // // The Universal Permissive License (UPL), Version 1.0 // // Subject to the condition set forth below, permission is hereby granted to any person obtaining a // copy of this software, associated documentation and/or data (collectively the "Software"), free // of charge and under any and all copyright rights in the Software, and any and all patent rights // owned or freely licensable by each licensor hereunder covering either (i) the unmodified // Software as contributed to or provided by such licensor, or (ii) the Larger Works (as defined // below), to deal in both // // (a) the Software, and // (b) any piece of software and/or hardware listed in the lrgrwrks.txt file // if one is included with the Software (each a "Larger Work" to which the Software is contributed // by such licensors), // // without restriction, including without limitation the rights to copy, create derivative works // of, display, perform, and distribute the Software and make, use, sell, offer for sale, import, // export, have made, and have sold the Software and the Larger Work(s), and to sublicense the // foregoing rights on either these or other terms. // // This license is subject to the following condition: The above copyright notice and either this // complete permission notice or at a minimum a reference to the UPL must be included in all copies // or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING // BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #![feature(test)] #![feature(try_from)] extern crate cactus; extern crate cfgrammar; #[macro_use] extern crate indexmap; extern crate lrlex; extern crate lrtable; extern crate num_traits; extern crate rmp_serde as rmps; extern crate serde; extern crate test; extern crate typename; extern crate vob; mod astar; mod builder; mod cpctplus; pub mod parser; pub use parser::{Node, parse_rcvry, ParseError, ParseRepair, RecoveryKind}; mod mf; pub use builder::{process_file, process_file_in_src, reconstitute}; /// A convenience macro for including statically compiled `.y` files. A file `src/x.y` which is /// statically compiled by lrpar can then be used in a crate with `lrpar_mod!(x)`. #[macro_export] macro_rules! lrpar_mod { ($n:ident) => { include!(concat!(env!("OUT_DIR"), "/", stringify!($n), ".rs")); }; } #[doc(hidden)] pub use cfgrammar::NTIdx;
#[cfg(test)] mod test_delete_from_db { use diar::{ command::CommandError, commands::delete::delete_from_db, domain::{model::Favorite, repository::IRepository}, }; use crate::infrastructure::inmemory::repository::Repository; #[test] fn delete() { let fav = Favorite::new("name1", "/"); let repo = &Repository::new(vec![fav.clone()]); let _ = delete_from_db(repo, fav.name()).unwrap(); let got = repo.get(&fav.name()).unwrap(); assert!(got.is_none()) } #[test] fn not_found_error() { let fav = Favorite::new("name1", "/"); let repo = &Repository::new(Vec::new()); let result = delete_from_db(repo, fav.name()); assert_eq!( result.err().unwrap().to_string(), CommandError::GivenKeyNotFound.to_string() ) } }
use actix_web::web; use actix_web::{HttpResponse}; use handlebars::Handlebars; use super::global; // Macro documentation can be found in the actix_web_codegen crate #[get("/")] pub async fn index(hb: web::Data<Handlebars<'_>>) -> HttpResponse { let data = json!({ "title": global::get_config_name(), "description": "", "keywords": "", "headInjects": "", "content": "", "summary": "", "sider": "", "config": "{}", "timestamp": "", }); let body = hb.render("index", &data).unwrap(); HttpResponse::Ok().body(body) } #[get("/{user}/{data}")] pub async fn user( hb: web::Data<Handlebars<'_>>, info: web::Path<(String, String)>, ) -> HttpResponse { let data = json!({ "user": info.0, "data": info.1 }); let body = hb.render("user", &data).unwrap(); HttpResponse::Ok().body(body) }
#[derive(Debug)] struct Student{ name : String, number:u32, } fn main() { let mehran=Student{ name:String::from("Mehran"), number:12, }; println!("{:#?}",mehran.name); }
pub trait Thing { fn do_something_interesting(&self) -> i32; } pub struct A; impl Thing for A { fn do_something_interesting(&self) -> i32 { 1 } } pub struct B; impl Thing for B { fn do_something_interesting(&self) -> i32 { 2 } } pub fn do_something_to_everyone(everyone: &Vec<Box<Thing>>) -> Vec<i32> { everyone.iter().map(|thing| thing.do_something_interesting()).collect() } fn main() { let mut everyone: Vec<Box<Thing>> = vec![]; everyone.push(Box::new(A)); everyone.push(Box::new(B)); everyone.push(Box::new(B)); everyone.push(Box::new(A)); let res = do_something_to_everyone(&everyone); println!("{:#?}", res); }
#[doc = "Register `CCR` reader"] pub type R = crate::R<CCR_SPEC>; #[doc = "Register `CCR` writer"] pub type W = crate::W<CCR_SPEC>; #[doc = "Field `DUAL` reader - Dual ADC mode selection"] pub type DUAL_R = crate::FieldReader; #[doc = "Field `DUAL` writer - Dual ADC mode selection"] pub type DUAL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 5, O>; #[doc = "Field `DELAY` reader - Delay between 2 sampling phases"] pub type DELAY_R = crate::FieldReader; #[doc = "Field `DELAY` writer - Delay between 2 sampling phases"] pub type DELAY_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `DMACFG` reader - DMA configuration (for dual ADC mode)"] pub type DMACFG_R = crate::BitReader; #[doc = "Field `DMACFG` writer - DMA configuration (for dual ADC mode)"] pub type DMACFG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `MDMA` reader - Direct memory access mode for dual ADC mode"] pub type MDMA_R = crate::FieldReader; #[doc = "Field `MDMA` writer - Direct memory access mode for dual ADC mode"] pub type MDMA_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `CKMODE` reader - ADC clock mode"] pub type CKMODE_R = crate::FieldReader; #[doc = "Field `CKMODE` writer - ADC clock mode"] pub type CKMODE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `PRESC` reader - ADC prescaler"] pub type PRESC_R = crate::FieldReader; #[doc = "Field `PRESC` writer - ADC prescaler"] pub type PRESC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `VREFEN` reader - Vrefint enable"] pub type VREFEN_R = crate::BitReader; #[doc = "Field `VREFEN` writer - Vrefint enable"] pub type VREFEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CH17SEL` reader - CH17 selection (temperature)"] pub type CH17SEL_R = crate::BitReader; #[doc = "Field `CH17SEL` writer - CH17 selection (temperature)"] pub type CH17SEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CH18SEL` reader - CH18 selection (Vbat)"] pub type CH18SEL_R = crate::BitReader; #[doc = "Field `CH18SEL` writer - CH18 selection (Vbat)"] pub type CH18SEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bits 0:4 - Dual ADC mode selection"] #[inline(always)] pub fn dual(&self) -> DUAL_R { DUAL_R::new((self.bits & 0x1f) as u8) } #[doc = "Bits 8:11 - Delay between 2 sampling phases"] #[inline(always)] pub fn delay(&self) -> DELAY_R { DELAY_R::new(((self.bits >> 8) & 0x0f) as u8) } #[doc = "Bit 13 - DMA configuration (for dual ADC mode)"] #[inline(always)] pub fn dmacfg(&self) -> DMACFG_R { DMACFG_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bits 14:15 - Direct memory access mode for dual ADC mode"] #[inline(always)] pub fn mdma(&self) -> MDMA_R { MDMA_R::new(((self.bits >> 14) & 3) as u8) } #[doc = "Bits 16:17 - ADC clock mode"] #[inline(always)] pub fn ckmode(&self) -> CKMODE_R { CKMODE_R::new(((self.bits >> 16) & 3) as u8) } #[doc = "Bits 18:21 - ADC prescaler"] #[inline(always)] pub fn presc(&self) -> PRESC_R { PRESC_R::new(((self.bits >> 18) & 0x0f) as u8) } #[doc = "Bit 22 - Vrefint enable"] #[inline(always)] pub fn vrefen(&self) -> VREFEN_R { VREFEN_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - CH17 selection (temperature)"] #[inline(always)] pub fn ch17sel(&self) -> CH17SEL_R { CH17SEL_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 24 - CH18 selection (Vbat)"] #[inline(always)] pub fn ch18sel(&self) -> CH18SEL_R { CH18SEL_R::new(((self.bits >> 24) & 1) != 0) } } impl W { #[doc = "Bits 0:4 - Dual ADC mode selection"] #[inline(always)] #[must_use] pub fn dual(&mut self) -> DUAL_W<CCR_SPEC, 0> { DUAL_W::new(self) } #[doc = "Bits 8:11 - Delay between 2 sampling phases"] #[inline(always)] #[must_use] pub fn delay(&mut self) -> DELAY_W<CCR_SPEC, 8> { DELAY_W::new(self) } #[doc = "Bit 13 - DMA configuration (for dual ADC mode)"] #[inline(always)] #[must_use] pub fn dmacfg(&mut self) -> DMACFG_W<CCR_SPEC, 13> { DMACFG_W::new(self) } #[doc = "Bits 14:15 - Direct memory access mode for dual ADC mode"] #[inline(always)] #[must_use] pub fn mdma(&mut self) -> MDMA_W<CCR_SPEC, 14> { MDMA_W::new(self) } #[doc = "Bits 16:17 - ADC clock mode"] #[inline(always)] #[must_use] pub fn ckmode(&mut self) -> CKMODE_W<CCR_SPEC, 16> { CKMODE_W::new(self) } #[doc = "Bits 18:21 - ADC prescaler"] #[inline(always)] #[must_use] pub fn presc(&mut self) -> PRESC_W<CCR_SPEC, 18> { PRESC_W::new(self) } #[doc = "Bit 22 - Vrefint enable"] #[inline(always)] #[must_use] pub fn vrefen(&mut self) -> VREFEN_W<CCR_SPEC, 22> { VREFEN_W::new(self) } #[doc = "Bit 23 - CH17 selection (temperature)"] #[inline(always)] #[must_use] pub fn ch17sel(&mut self) -> CH17SEL_W<CCR_SPEC, 23> { CH17SEL_W::new(self) } #[doc = "Bit 24 - CH18 selection (Vbat)"] #[inline(always)] #[must_use] pub fn ch18sel(&mut self) -> CH18SEL_W<CCR_SPEC, 24> { CH18SEL_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "ADC common control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CCR_SPEC; impl crate::RegisterSpec for CCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ccr::R`](R) reader structure"] impl crate::Readable for CCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`ccr::W`](W) writer structure"] impl crate::Writable for CCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CCR to value 0"] impl crate::Resettable for CCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
pub mod common; #[cfg(feature = "client")] pub mod client; #[cfg(feature = "server")] pub mod server; #[cfg(feature = "node")] pub mod node;
use chrono::{TimeZone, Utc}; use cloudevents::{EventBuilder, EventBuilderV10}; use etherparse::{InternetSlice::*, ReadError, SlicedPacket, TransportSlice::*}; use pcap::{Capture, Packet}; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; use std::net::IpAddr; use uuid::Uuid; #[derive(Serialize, Deserialize)] enum IpProtocolKind { UNKNOWN, UDP, TCP, } #[derive(Serialize, Deserialize)] enum IpAddrKind { UNKNOWN, V4, V6, } #[derive(Serialize, Deserialize)] struct PacketEventData { source_ip: IpAddr, dest_ip: IpAddr, version: IpAddrKind, length: u64, source_port: u16, dest_port: u16, protocol: IpProtocolKind, } impl<'a> TryFrom<Packet<'a>> for PacketEventData { type Error = ReadError; fn try_from(packet: Packet) -> Result<Self, Self::Error> { let mut packet_data = PacketEventData::default(); packet_data.length = packet.header.len as u64; match SlicedPacket::from_ethernet(&packet) { Err(value) => return Err(value), Ok(value) => { match value.ip { Some(Ipv4(value)) => { packet_data.source_ip = IpAddr::V4(value.source_addr()); packet_data.dest_ip = IpAddr::V4(value.destination_addr()); packet_data.version = IpAddrKind::V4; } Some(Ipv6(value, _)) => { packet_data.source_ip = IpAddr::V6(value.source_addr()); packet_data.dest_ip = IpAddr::V6(value.destination_addr()); packet_data.version = IpAddrKind::V6; } None => {} } match value.transport { Some(Udp(value)) => { packet_data.protocol = IpProtocolKind::UDP; packet_data.source_port = value.source_port(); packet_data.dest_port = value.destination_port(); } Some(Tcp(value)) => { packet_data.protocol = IpProtocolKind::TCP; packet_data.source_port = value.source_port(); packet_data.dest_port = value.destination_port(); } None => {} } } } Ok(packet_data) } } impl Default for PacketEventData { fn default() -> PacketEventData { PacketEventData { source_ip: IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED), dest_ip: IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED), version: IpAddrKind::UNKNOWN, length: 0, source_port: 0, dest_port: 0, protocol: IpProtocolKind::UNKNOWN, } } } fn main() { let mut cap = Capture::from_device("wlo1") .unwrap() .promisc(true) .snaplen(5000) .open() .unwrap(); while let Ok(packet) = cap.next() { let dt = Utc.timestamp(packet.header.ts.tv_sec, packet.header.ts.tv_usec as u32); let packet_data = PacketEventData::try_from(packet).unwrap(); let event = EventBuilderV10::new() .id(&Uuid::new_v4().to_hyphenated().to_string()) .ty("net-events.packet") .source("com.github.mattdevy.net-events") .time(dt) .data( "application/json", serde_json::to_string(&packet_data).unwrap(), ) .build() .unwrap(); println!("{}", event.to_string()); } }
//! An SDC point record. //! //! At this point, we're keeping it simple and only handling 5.0. use error::Error; use result::Result; /// An SDC point. #[derive(Clone, Copy, Debug, Default)] #[allow(missing_docs)] pub struct Point { pub time: f64, pub range: f32, pub theta: f32, pub x: f32, pub y: f32, pub z: f32, pub amplitude: u16, pub width: u16, pub target_type: TargetType, pub target: u8, pub num_target: u8, pub rg_index: u16, pub facet_number: u8, pub high_channel: bool, pub class_id: Option<u8>, pub rho: Option<f32>, pub reflectance: Option<i16>, } impl Point { /// Creates a new, default point. /// /// # Examples /// /// ``` /// use sdc::point::Point; /// let point = Point::new(); /// ``` pub fn new() -> Point { Default::default() } /// Returns the channel description byte from this point. /// /// # Examples /// /// ``` /// use sdc::point::Point; /// let point = Point::new(); /// let byte = point.channel_desc_byte(); /// ``` pub fn channel_desc_byte(&self) -> u8 { let mut byte = self.facet_number & 0x3; if self.high_channel { byte |= 0b01000000; } byte } } /// How the point was derived from a waveform. #[derive(Clone, Copy, Debug, PartialEq)] #[allow(missing_docs)] pub enum TargetType { CenterOfGravity, Parabola, Gaussian, Peak, } impl TargetType { /// Returns this target type as a `u8`. /// /// # Examples /// /// ``` /// use sdc::point::TargetType; /// assert_eq!(0, TargetType::CenterOfGravity.as_u8()); /// assert_eq!(1, TargetType::Parabola.as_u8()); /// assert_eq!(2, TargetType::Gaussian.as_u8()); /// ``` pub fn as_u8(&self) -> u8 { match *self { TargetType::CenterOfGravity => 0, TargetType::Parabola => 1, TargetType::Gaussian => 2, TargetType::Peak => 3, } } /// Returns the target type for this `u8`. /// /// # Examples /// /// ``` /// use sdc::point::TargetType; /// assert_eq!(TargetType::Peak, TargetType::from_u8(3).unwrap()); /// assert!(TargetType::from_u8(10).is_err()); /// ``` pub fn from_u8(n: u8) -> Result<TargetType> { match n { 0 => Ok(TargetType::CenterOfGravity), 1 => Ok(TargetType::Parabola), 2 => Ok(TargetType::Gaussian), 3 => Ok(TargetType::Peak), _ => Err(Error::InvalidTargetType(n)), } } } impl Default for TargetType { fn default() -> TargetType { TargetType::Peak } }
#[doc = "Reader of register CH1_DBG_CTDREQ"] pub type R = crate::R<u32, super::CH1_DBG_CTDREQ>; #[doc = "Reader of field `CH1_DBG_CTDREQ`"] pub type CH1_DBG_CTDREQ_R = crate::R<u8, u8>; impl R { #[doc = "Bits 0:5"] #[inline(always)] pub fn ch1_dbg_ctdreq(&self) -> CH1_DBG_CTDREQ_R { CH1_DBG_CTDREQ_R::new((self.bits & 0x3f) as u8) } }
use std::time::{SystemTime, UNIX_EPOCH}; use std::collections::VecDeque; use rustc_serialize::*; #[derive(Debug, Clone)] pub enum BuildState { Good, Bad } #[derive(Debug, Clone)] pub struct ServiceStatus { time : u64, status : BuildState } impl Encodable for ServiceStatus { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("ServiceStatus", 2, |s| { try!(s.emit_struct_field("time", 0, |s| { s.emit_u64(self.time) })); try!(s.emit_struct_field("status", 1, |s| { match self.status { BuildState::Good => s.emit_str("GOOD"), BuildState::Bad => s.emit_str("BAD") } })); Ok(()) }) } } impl Decodable for ServiceStatus { fn decode<D: Decoder>(d: &mut D) -> Result<ServiceStatus, D::Error> { d.read_struct("ServiceStatus", 4, |d| { Ok(ServiceStatus { time : try!(d.read_struct_field("time", 0, |d| { Decodable::decode(d) })), status : try!(d.read_struct_field("status", 1, |d| { let status : Result<String, _> = Decodable::decode(d); status.and_then(|status| match status.as_ref() { "GOOD" => Ok(BuildState::Good), "BAD" => Ok(BuildState::Bad), _ => Err(d.error("invalid build state")) }) })), }) }) } } #[derive(RustcEncodable, RustcDecodable, Debug, Clone)] pub struct ServiceHistory { pub secret : String, history : VecDeque<ServiceStatus> } impl ServiceHistory { pub fn new(secret : String) -> ServiceHistory { ServiceHistory { secret : secret, history : VecDeque::new() } } pub fn add(&mut self, state: BuildState) { let now = SystemTime::now(); let seconds_since = now.duration_since(UNIX_EPOCH).unwrap().as_secs(); self.history.push_front(ServiceStatus{time : seconds_since, status : state}) } pub fn get_last_n_elements(&mut self, n: i32) -> Vec<&ServiceStatus> { self.history.iter().take(n as usize).collect() } } #[derive(RustcEncodable, RustcDecodable, Debug, Clone)] pub struct ServicesHandler { services : Vec<ServiceHistory> } impl ServicesHandler { pub fn new() -> ServicesHandler { ServicesHandler { services: Vec::new() } } pub fn load_from_file() -> ServicesHandler { let path = ::std::path::Path::new("db/history.toml"); if path.exists() == false { println!("History not found, creating"); return ServicesHandler::new(); } ::config::load_toml(&path).unwrap() } pub fn save(&self) { use std::io::Write; let encoded = ::toml::encode_str(self); let mut f = ::std::fs::File::create("db/history.toml").unwrap(); f.write_all(encoded.as_bytes()).unwrap(); } pub fn add_history(&mut self, secret : &str, status : &str) { let status = match status.as_ref() { "good" => BuildState::Good, "bad" => BuildState::Bad, _ => panic!("invalid request (not \"good\"/\"bad\")") }; for service_history in self.services.iter_mut() { if service_history.secret == secret { service_history.add(status); println!("{:?}", "finded"); return; } } println!("creating new"); let mut history = ServiceHistory::new(secret.to_string()); history.add(status); self.services.push(history); } pub fn get_last_history(&mut self, secret: &String, n: i32) -> String { for service_history in self.services.iter_mut() { if &service_history.secret == secret { return json::encode(&service_history.get_last_n_elements(n)).unwrap(); } } return String::new(); } }
#![allow(non_camel_case_types)] #![allow(non_snake_case)] #![allow(non_upper_case_globals)] #![allow(unused)] #![no_std] pub use bindings::*; pub mod bindings; use core::convert::Infallible; // implement digital traits from embedded_hal use embedded_hal::digital::v2::{InputPin, OutputPin, ToggleableOutputPin}; pub struct Pin { pin: u8, } impl Pin { pub fn new(port: gpio_port, pin: u8) -> Self { Pin { pin: unsafe { pin_new(port, pin) }, } } pub fn from(pin: u8) -> Self { Pin { pin: pin } } pub fn into_output(self) -> Self { unsafe { pin_into_output(self.pin); } self } pub fn into_input(self) -> Self { unsafe { pin_into_input(self.pin); } self } pub fn into_pull_down_input(self) -> Self { unsafe { pin_into_pull_down_input(self.pin); } self } pub fn into_pull_up_input(self) -> Self { unsafe { pin_into_pull_up_input(self.pin); } self } } impl OutputPin for Pin { type Error = Infallible; fn set_low(&mut self) -> Result<(), Self::Error> { unsafe { pin_set_low(self.pin); } Ok(()) } fn set_high(&mut self) -> Result<(), Self::Error> { unsafe { pin_set_high(self.pin); } Ok(()) } } impl InputPin for Pin { type Error = Infallible; fn is_low(&self) -> Result<bool, Self::Error> { Ok(unsafe { pin_is_low(self.pin) }) } fn is_high(&self) -> Result<bool, Self::Error> { Ok(unsafe { pin_is_high(self.pin) }) } } impl ToggleableOutputPin for Pin { type Error = Infallible; fn toggle(&mut self) -> Result<(), Self::Error> { unsafe { pin_toggle(self.pin); } Ok(()) } } // implement delay traits from embedded_hal // Note: <u32> unimplemented because atmel start hal uses uin16_t for delay time use embedded_hal::blocking::delay::{DelayMs, DelayUs}; pub struct Delay; // empty struct impl DelayMs<u16> for Delay { fn delay_ms(&mut self, ms: u16) { unsafe { delay_ms(ms); } } } impl DelayMs<u8> for Delay { fn delay_ms(&mut self, ms: u8) { self.delay_ms(ms as u16); } } impl DelayUs<u16> for Delay { fn delay_us(&mut self, us: u16) { unsafe { delay_us(us); } } } impl DelayUs<u8> for Delay { fn delay_us(&mut self, us: u8) { self.delay_us(us as u16); } } // implement blocking spi traits from embedded_hal use embedded_hal::blocking::spi::Write; pub struct Spi { io: *mut bindings::io_descriptor, } impl Spi { pub fn new(module: &mut bindings::spi_m_sync_descriptor) -> Self { let module = module as *mut bindings::spi_m_sync_descriptor; let mut io = core::ptr::null_mut() as *mut bindings::io_descriptor; unsafe { bindings::spi_m_sync_get_io_descriptor( module, &mut io as *mut *mut bindings::io_descriptor, ); bindings::spi_m_sync_enable(module); } Spi { io: io } } } impl Write<u8> for Spi { type Error = Infallible; fn write(&mut self, buffer: &[u8]) -> Result<(), Self::Error> { unsafe { bindings::io_write(self.io, buffer.as_ptr(), buffer.len() as u16); } Ok(()) } }
use std::os::raw::c_char; use crate::util; pub mod dataframe; pub mod manipulation; pub mod sum; pub mod count; pub mod mean; pub mod variance; #[no_mangle] pub extern "C" fn opendp_trans__bootstrap() -> *const c_char { let spec = r#"{ "functions": [ { "name": "make_identity", "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_split_lines", "args": [ ["const char *", "selector"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_parse_series", "args": [ ["const char *", "selector"], ["bool", "impute"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_split_records", "args": [ ["const char *", "selector"], ["const char *", "separator"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_create_dataframe", "args": [ ["const char *", "selector"], ["FfiObject *", "col_names"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_split_dataframe", "args": [ ["const char *", "selector"], ["const char *", "separator"], ["FfiObject *", "col_names"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_parse_column", "args": [ ["const char *", "selector"], ["void *", "key"], ["bool", "impute"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_select_column", "args": [ ["const char *", "selector"], ["void *", "key"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_clamp_vec", "args": [ ["const char *", "selector"], ["void *", "lower"], ["void *", "upper"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_clamp_scalar", "args": [ ["const char *", "selector"], ["void *", "lower"], ["void *", "upper"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_cast_vec", "args": [ ["const char *", "selector"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_bounded_covariance", "args": [ ["const char *", "selector"], ["FfiObject *", "lower"], ["FfiObject *", "upper"], ["unsigned int", "length"], ["unsigned int", "ddof"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_bounded_mean", "args": [ ["const char *", "selector"], ["void *", "lower"], ["void *", "upper"], ["unsigned int", "length"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_bounded_sum", "args": [ ["const char *", "selector"], ["void *", "lower"], ["void *", "upper"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_bounded_sum_n", "args": [ ["const char *", "selector"], ["void *", "lower"], ["void *", "upper"], ["unsigned int", "n"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_bounded_variance", "args": [ ["const char *", "selector"], ["void *", "lower"], ["void *", "upper"], ["unsigned int", "length"], ["unsigned int", "ddof"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_count", "args": [ ["const char *", "selector"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_count_by", "args": [ ["const char *", "selector"], ["unsigned int", "n"] ], "ret": "FfiResult<FfiTransformation *>" }, { "name": "make_count_by_categories", "args": [ ["const char *", "selector"], ["FfiObject *", "categories"] ], "ret": "FfiResult<FfiTransformation *>" } ] }"#; util::bootstrap(spec) }
use std::fs::File; use std::io::ErrorKind; use std::io::prelude::*; fn main() { let fp = File::open("hello.txt"); let mut f = match fp { Ok(file) => file, Err(error) => match error.kind() { ErrorKind::NotFound => match File::create("hello.txt") { Ok(fc) => fc, Err(e) => panic!("Tried to create file but there was a problem: {:?}", e), }, other_error => panic!("There was a problem opening the file: {:?}", other_error), }, }; f.write_all(b"Hello, world!"); f.sync_all(); }
use crate::config::Config; use slog::{o, Drain, Logger}; use std::sync::Mutex; pub fn create_logger(config: &Config) -> Logger { if config.log_json { slog::Logger::root( slog_async::Async::new( Mutex::new(slog_json::Json::default(std::io::stderr())).map(slog::Fuse), ) .build() .fuse(), o!("version" => env!("CARGO_PKG_VERSION")), ) } else { slog::Logger::root( slog_async::Async::new( slog_term::FullFormat::new(slog_term::TermDecorator::new().build()) .build() .fuse(), ) .build() .fuse(), o!("version" => env!("CARGO_PKG_VERSION")), ) } } pub trait LoggerExt: private::Sealed { fn with_scope<S: ToString>(&self, scope: S) -> Self; } impl LoggerExt for Logger { fn with_scope<S: ToString>(&self, scope: S) -> Self { self.new(o!("scope" => scope.to_string())) } } mod private { use slog::Logger; pub trait Sealed {} impl Sealed for Logger {} }
//! Module is responsible for tables underlyign grid. //! //! It might be used when implementing your own [`TableOption`] and [`CellOption`]. //! //! [`TableOption`]: crate::settings::TableOption //! [`CellOption`]: crate::settings::CellOption #[cfg(feature = "std")] mod colored_config; mod compact_multiline_config; pub mod dimension; pub mod records; pub use papergrid::color; pub use papergrid::colors; pub use papergrid::util; pub mod config { //! Module contains a list of configs for varios tables/grids. pub use papergrid::config::{ compact::CompactConfig, AlignmentHorizontal, AlignmentVertical, Border, Borders, Entity, EntityIterator, Indent, Line, Position, Sides, }; #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub use papergrid::config::spanned::{ EntityMap, Formatting, HorizontalLine, Offset, SpannedConfig, VerticalLine, }; #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub use super::colored_config::{ColorMap, ColoredConfig}; pub use super::compact_multiline_config::CompactMultilineConfig; } pub use papergrid::grid::compact::CompactGrid; #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub use papergrid::grid::iterable::Grid; #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub use papergrid::grid::peekable::PeekableGrid;
#![deny(warnings)] extern crate libc; use std::mem; use std::marker::PhantomData; use std::os::unix::io::RawFd; use libc::{c_void, size_t, ssize_t}; use libc::funcs::posix88::unistd::fork; /// A continuation, accepting an argument of type `T`. pub struct Cont<T> { fd: RawFd, phantom: PhantomData<Box<FnOnce(T)>>, } impl<T> Cont<T> where T: Copy + Send + 'static, { /// Invoke the continuation. pub fn invoke(&self, x: T) -> ! { let ptr = &x as *const T; let size = mem::size_of::<T>(); unsafe { let r = libc::write(self.fd, ptr as *const c_void, size as size_t); assert_eq!(r, size as ssize_t); libc::exit(0) } } } impl<T> Drop for Cont<T> { fn drop(&mut self) { unsafe { libc::close(self.fd); } } } /// Call with current continuation. pub fn call_cc<T, F>(f: F) -> T where T: Copy + Send + 'static, F: FnOnce(Cont<T>) -> T { unsafe { let mut fds: [RawFd; 2] = [0, 0]; let r = libc::pipe(fds.as_mut_ptr()); assert_eq!(r, 0); let pid = fork(); if pid < 0 { panic!("fork() failed"); } else if pid > 0 { // parent: call f immediately. libc::close(fds[0]); f(Cont { fd: fds[1], phantom: PhantomData, }) } else { // child: wait for the continuation to be invoked. libc::close(fds[1]); // read(2) will return 0 if the write end of the pipe is closed. // This will happen when the parent exits, or when the // corresponding Cont<T> is deleted. // // We naively assume the value can be read in a single call to // read(2). let mut buf: T = mem::uninitialized(); let ptr = &mut buf as *mut T; let size = mem::size_of::<T>(); if libc::read(fds[0], ptr as *mut c_void, size as size_t) <= 0 { libc::exit(0); } libc::close(fds[0]); buf } } }
//! This example demonstrates using the [`Concat`] [`TableOption`] to concatenate //! [`tables`](Table) together. //! //! * [`Concat`] supports appending tables vertically and horizontally. //! //! * Note how the base tables style settings take take precedence over the appended table. //! If the two tables are of unequal shape, additional blank cells are added as needed. use tabled::{ settings::{object::Segment, Alignment, Concat, Modify, Style}, Table, Tabled, }; #[derive(Debug, Tabled)] struct Weather { temperature_c: f64, wind_ms: f64, } #[derive(Debug, Tabled)] struct Location( #[tabled(rename = "latitude")] f64, #[tabled(rename = "longitude")] f64, ); fn main() { let weather_data = [ Weather { temperature_c: 1.0, wind_ms: 3.0, }, Weather { temperature_c: -20.0, wind_ms: 30.0, }, Weather { temperature_c: 40.0, wind_ms: 100.0, }, ]; let location_data = [ Location(111.111, 333.333), Location(5.111, 7282.1), Location(0.0, 0.0), Location(0.0, 0.0), ]; let location_table = Table::new(location_data); let mut weather_table = Table::new(weather_data); weather_table .with(Concat::horizontal(location_table)) .with(Style::empty()) .with(Modify::new(Segment::all()).with(Alignment::left())); println!("{weather_table}"); }
use super::matrix::model::ModelMatrix; use super::webgl::{ProgramType, WebGlF32Vbo, WebGlI16Ibo, WebGlRenderingContext}; use crate::arena::block; use ndarray::Array2; pub struct TableGrid { grid_index_buffer: WebGlI16Ibo, grid_index_len: i32, grid_vertexis_buffer: WebGlF32Vbo, table_size: [u64; 2], } impl TableGrid { pub fn new(gl: &WebGlRenderingContext) -> Self { let table_size = [20, 20]; let (grid_vertexis_buffer, grid_index_buffer, grid_index_len) = Self::create_grid_buffers(&gl, &table_size); Self { grid_index_buffer, grid_index_len, grid_vertexis_buffer, table_size, } } pub fn render( &mut self, gl: &mut WebGlRenderingContext, vp_matrix: &Array2<f32>, table: &block::table::Table, ) { let table_size = { let sz = table.size(); [sz[0].floor() as u64, sz[1].floor() as u64] }; if table_size[0] != self.table_size[0] || table_size[1] != self.table_size[1] { let (grid_vertexis_buffer, grid_index_buffer, grid_index_len) = Self::create_grid_buffers(&gl, &table_size); self.grid_vertexis_buffer = grid_vertexis_buffer; self.grid_index_buffer = grid_index_buffer; self.grid_index_len = grid_index_len; self.table_size = table_size; } gl.use_program(ProgramType::TablegridProgram); gl.depth_func(web_sys::WebGlRenderingContext::ALWAYS); gl.set_attr_vertex(&self.grid_vertexis_buffer, 3, 0); gl.bind_buffer( web_sys::WebGlRenderingContext::ELEMENT_ARRAY_BUFFER, Some(&self.grid_index_buffer), ); let model_matrix: Array2<f32> = ModelMatrix::new().into(); let mvp_matrix = vp_matrix.dot(&model_matrix); gl.set_unif_translate(mvp_matrix.reversed_axes()); gl.set_unif_point_size(1.0); gl.line_width(5.0); gl.set_unif_bg_color(&table.grid_color().to_color().to_f32array()); gl.draw_elements_with_i32( web_sys::WebGlRenderingContext::LINES, self.grid_index_len, web_sys::WebGlRenderingContext::UNSIGNED_SHORT, 0, ); } fn create_grid_buffers( gl: &WebGlRenderingContext, table_size: &[u64; 2], ) -> (WebGlF32Vbo, WebGlI16Ibo, i32) { let width = table_size[0]; let height = table_size[1]; let x_offset = width as f32 / 2.0; let y_offset = height as f32 / 2.0; let mut grid_vertexis = vec![]; for x in 0..(width + 1) { let x = x as f32 - x_offset; grid_vertexis.push(vec![x, -y_offset, 0.0]); grid_vertexis.push(vec![x, y_offset, 0.0]); } for y in 0..(height + 1) { let y = y as f32 - y_offset; grid_vertexis.push(vec![-x_offset, y, 0.0]); grid_vertexis.push(vec![x_offset, y, 0.0]); } let mut grid_idx = vec![]; for idx in 0..grid_vertexis.len() { grid_idx.push(idx as i16); } let grid_vertexis: Vec<f32> = grid_vertexis.into_iter().flatten().collect(); let grid_vertexis_buffer = gl.create_vbo_with_f32array(&grid_vertexis); let grid_index_buffer = gl.create_ibo_with_i16array(&grid_idx); ( grid_vertexis_buffer, grid_index_buffer, grid_idx.len() as i32, ) } }
#![feature(test)] use std::{cmp::Reverse, collections::HashSet}; extern crate test; use itertools::Itertools; use lazy_static::lazy_static; use rand::seq::SliceRandom; use rand::Rng; use rand::SeedableRng; use rayon::prelude::*; use std::io::prelude::*; const SSS: u8 = 100; lazy_static! { static ref ALL_TEAMS: Vec<[u8; 4]> = { (0..SSS) .combinations(4) .map(|x| [x[0], x[1], x[2], x[3]]) .collect_vec() }; } // fn check() { // let t = include_str!("../message.txt"); // let mut seen = HashSet::<[u8; 2]>::new(); // for line in t.lines() { // let v = line // .split(' ') // .map(|x| x.parse::<u8>().unwrap()) // .collect_vec(); // if !add_seen(&[v[0], v[1], v[2], v[3]], &mut seen) { // panic!("seen {:?}", v); // } // } // } fn main() { let mut mmm = 0; let mut rng = rand::thread_rng(); let pool = 100usize; let top_count = 25usize; println!("Seeding teams"); // Create a gene pool let mut team_collection = (0..pool as u64) .collect_vec() .into_par_iter() .map(|i| { let mut rng = rand::rngs::StdRng::seed_from_u64(i); create_team_list(&mut rng) }) .collect::<Vec<_>>(); println!("Generated initial teams"); for i in 0.. { // Select top 10 team_collection.sort_by_key(|x| Reverse(x.len())); let mut top_teams = team_collection.into_iter().take(top_count).collect_vec(); top_teams.shuffle(&mut rng); team_collection = top_teams .iter() .combinations(2) .take(pool - top_count) .collect_vec() .into_par_iter() .flat_map(|x| { let t1 = &x[0]; let t2 = &x[1]; let (nt1, nt2) = merge_teams(t1, t2); vec![fix_team(nt1), fix_team(nt2)] }) .collect::<Vec<_>>(); team_collection.append(&mut top_teams); let biggest = team_collection.iter().max_by_key(|x| x.len()).unwrap(); println!( "Round: {}, round biggest: {}, overal biggest: {}", i, biggest.len(), mmm ); if biggest.len() > mmm { mmm = biggest.len(); print_team(i, biggest); } } // for i in 0.. { // let team_list = create_team_list(&mut rng); // if team_list.len() > mmm { // mmm = team_list.len(); // index = i; // let mut f = std::fs::File::create(format!("result-{}", i)).unwrap(); // f.write_all(&format!("{:?}", team_list).bytes().collect_vec()[..]) // .expect("could not write"); // println!( // "{}: Length {}, max: {} at {}", // i, // team_list.len(), // mmm, // index // ); // } // } } fn print_team(i: usize, t: &Vec<[u8; 4]>) { let mut f = std::fs::File::create(format!("result-{}", i)).unwrap(); f.write_all(&format!("{:?}", t).bytes().collect_vec()[..]) .expect("could not write"); println!("{}: Length {}", i, t.len()); } fn create_team_list<R: Rng>(rand: &mut R) -> Vec<[u8; 4]> { let mut teams = ALL_TEAMS.clone(); //println!("Generated teams!"); teams.shuffle(rand); //println!("Shuffled!"); let mut seen = HashSet::<[u8; 2]>::new(); let mut team_list = Vec::with_capacity(720); for team in teams { if !contains(&team, &seen) { add_seen(&team, &mut seen); team_list.push(team); } } team_list } fn merge_teams(team1: &[[u8; 4]], team2: &[[u8; 4]]) -> (Vec<[u8; 4]>, Vec<[u8; 4]>) { let h1 = split_team(team1); let h2 = split_team(team2); let v1 = h1.0.into_iter() .chain(h2.1.into_iter()) .cloned() .collect_vec(); let v2 = h2.0.into_iter() .chain(h1.1.into_iter()) .cloned() .collect_vec(); (v1, v2) } fn split_team(team: &[[u8; 4]]) -> (&[[u8; 4]], &[[u8; 4]]) { let mid = team.len() / 2; (&team[..mid], &team[mid..]) } fn contains(team: &[u8; 4], set: &HashSet<[u8; 2]>) -> bool { team.into_iter() .combinations(2) .any(|f| set.contains(&[*f[0], *f[1]])) } fn add_seen(team: &[u8; 4], set: &mut HashSet<[u8; 2]>) -> bool { team.into_iter() .combinations(2) .all(|slce| set.insert([*slce[0], *slce[1]])) } fn fix_team(team: Vec<[u8; 4]>) -> Vec<[u8; 4]> { let mut seen = HashSet::new(); let mut doubles = HashSet::new(); for t in &team { if !add_seen(t, &mut seen) { add_seen(t, &mut doubles); } } let mut teams = team .into_iter() .filter(|x| !contains(x, &doubles)) .collect_vec(); let mut seen = HashSet::new(); for t in &teams { if !add_seen(t, &mut seen) { unreachable!("doubles should have been removed"); } } let mut iter_teams = ALL_TEAMS.clone(); iter_teams.shuffle(&mut rand::thread_rng()); for team in iter_teams { if !contains(&team, &seen) { add_seen(&team, &mut seen); teams.push(team); } } teams }
use crate::{ controllers::{handler, routes}, error, repositories::postgres_repo::BicycleRepoPostgres, services::bicycle::BicycleService, }; use warp::{hyper::StatusCode, Filter}; #[tokio::main] pub async fn rest() { let bike_repo = BicycleRepoPostgres {}; let bike_service = BicycleService::new(bike_repo); let health_route = warp::path!("health").map(|| StatusCode::OK); // let bikes = warp::path!("bicycles"); // let bicycle_routes = bikes // .and(warp::get()) // .and(with_manager(bike_service.clone())) // .and_then(handler::find_all) // .or(bikes // .and(warp::post()) // .and(warp::body::json()) // .and(with_manager(bike_service.clone())) // .and_then(handler::create)); let bicycle_prefix = routes::path_prefix(bike_service); let list_all_bicycles = bicycle_prefix .clone() .and(routes::list()) .and_then(handler::find_all); let get_bike = bicycle_prefix .clone() .and(routes::get()) .and_then(handler::find_by_id); let create = bicycle_prefix .clone() .and(routes::create()) .and_then(handler::create); let update = bicycle_prefix .clone() .and(routes::update()) .and_then(handler::update); let delete = bicycle_prefix .and(routes::delete()) .and_then(handler::delete); let bicycles_api = list_all_bicycles .or(create) .or(get_bike) .or(update) .or(delete) .with(warp::log("bicycle_api")); let routes = health_route .or(bicycles_api) .with(warp::cors().allow_any_origin()) .recover(error::handle_rejection); warp::serve(routes).run(([127, 0, 0, 1], 8000)).await; }
use std::io::Read; use crate::instructions::{Instructions, Instruction}; use crate::decoder::{DecodeError, DecodeResult, Decoder}; use crate::types::*; type Local = (u32, Values); struct Code { locals: Vec<Local>, body: Vec<Instruction>, } pub struct CodeSection(Vec<Code>); impl CodeSection { // read locals fn locals<R: Read>(decoder: &mut Decoder<R>) -> DecodeResult<Local> { let count = decoder.varunint32()?; let valtype = decoder.valtype()?; Ok((count, valtype)) } fn func<R: Read>(decoder: &mut Decoder<R>) -> DecodeResult<Code> { let body_size = decoder.varunint32()?; let locals = decoder.vec(CodeSection::locals)?; let body = decoder.decode(CodeSection::body)?; Ok(Code { locals: locals, body: body, }) } // read code section // read instructions fn body<R: Read>(decoder: &mut Decoder<R>) -> DecodeResult<Vec<Instruction>> { Ok(decoder.decode(Instructions::decode)?) } pub fn decode<R: Read>(decoder: &mut Decoder<R>) -> DecodeResult<Self> { Ok(Self(decoder.vec(CodeSection::func)?)) } }