text
stringlengths
8
4.13M
extern crate chrono; extern crate itertools; extern crate regex; extern crate structopt; use chrono::prelude::*; use itertools::Itertools; use regex::Regex; use std::error::Error; use std::io; use std::io::prelude::*; use structopt::StructOpt; /// Get a time slice of a log #[derive(StructOpt, Debug)] #[structopt(name = "timegrep")] pub struct CLI { // A flag, true if used in the command line. Note doc comment will // be used for the help message of the flag. /// Activate debug mode #[structopt(short = "d", long = "debug")] debug: bool, // The number of occurrences of the `v/verbose` flag /// Verbose mode (-v, -vv, -vvv, etc.) #[structopt(short = "v", long = "verbose", parse(from_occurrences))] verbose: u8, // This option can be specified either `--start` or `-s value`. /// start time #[structopt(short = "s", long = "start")] start: String, // This option can be specified either `--end` or `-e value`. /// end time #[structopt(short = "f", long = "finish")] end: String, // This option can be specified either `--regexp` or `-r value`. /// regular expression to pluck out the time each line #[structopt(short = "r", long = "regexp")] regexp: String, // This option can be specified either `--format` or `-f value`. /// format of the timestamp #[structopt(short = "t", long = "time-format")] format: String, // This option can be specified either `--chunk` or `-c value`. /// Specify chunk size #[structopt(short = "c", long = "chunk", default_value = "8192")] chunk: usize, } pub fn run(cli: CLI) -> Result<(), Box<dyn Error>> { if cli.verbose > 0 { println!("printing logs from '{}' to '{}'", cli.start, cli.end); } let stdin = io::stdin(); let timestamp_re = Regex::new(&cli.regexp).unwrap(); let start = Utc .datetime_from_str(&cli.start, "%Y-%m-%dT%H:%M:%S") .unwrap(); let end = Utc .datetime_from_str(&cli.end, "%Y-%m-%dT%H:%M:%S") .unwrap(); for chunk in &stdin.lock().lines().chunks(cli.chunk) { let lines: Vec<String> = chunk.map(|r| r.unwrap()).collect(); process_chunk(&lines, &timestamp_re, &cli.format, &start, &end); } Ok(()) } fn process_chunk( lines: &Vec<String>, time_re: &Regex, format: &str, start: &DateTime<Utc>, end: &DateTime<Utc>, ) { for line in lines { match time_re.captures(line) { Some(caps) => { if caps.len() > 0 { let stamp = Utc.datetime_from_str(&caps[1], format).unwrap(); if stamp >= *start && stamp <= *end { println!("{}", line) } } } None => eprintln!("{}", line), } } }
use crate::css::{Unit, Value}; use crate::layout::entity::Dimensions; use crate::layout::layout_box::LayoutBox; impl<'a> LayoutBox<'a> { // widthは親コンポーネントから計算可能だが、高さは子要素の合計値に左右される pub fn layout_block(&mut self, containing_block: Dimensions) { // widthは親のコンポーネントから計算できる self.set_block_width(containing_block); // boxがどの位置にあるのかを計算する self.set_block_position(containing_block); // boxの子要素を再起的に計算する self.layout_block_children(); // heightは子要素の高さに左右されるため、子要素の描画後でないと計算できない self.set_block_height(); } pub fn set_block_width(&mut self, container_block: Dimensions) { let style = self.get_style_node(); let auto = Value::Keyword("auto".to_string()); let mut width = style.value("width").unwrap_or(auto.clone()); let zero = Value::Length(0.0, Unit::Px); let mut margin_left = style.lookup("margin-left", "margin", &zero); let mut margin_right = style.lookup("margin-right", "margin", &zero); let border_left = style.lookup("border-left-width", "border-width", &zero); let border_right = style.lookup("border-right-width", "border-width", &zero); let padding_left = style.lookup("padding-left", "padding", &zero); let padding_right = style.lookup("padding-right", "padding", &zero); let total_width: f32 = [ &margin_left, &margin_right, &border_left, &border_right, &padding_left, &padding_right, ] .iter() .map(|v| v.to_px()) .sum(); // widthがcontainerの大きさを超える場合、marginがautoに指定されているなら値を0にする if width != auto && total_width > container_block.content.width { if margin_left == auto { margin_left = Value::Length(0.0, Unit::Px); } if margin_right == auto { margin_right = Value::Length(0.0, Unit::Px); } } // containerの内容がwidthより大きくなってしまった場合の計算 let underflow = container_block.content.width - total_width; match (width == auto, margin_left == auto, margin_right == auto) { // overconstrainedの場合 (false, false, false) => { margin_left = Value::Length(margin_right.to_px() + underflow, Unit::Px); } // margin_right, margin_leftのどちらかが指定されている場合、underflowの値を入れる。 (false, false, true) => margin_right = Value::Length(underflow, Unit::Px), (false, true, false) => margin_left = Value::Length(underflow, Unit::Px), // widthがautoの場合は、他の値は0になる (true, _, _) => { if margin_left == auto { margin_left = Value::Length(0.0, Unit::Px); } if margin_right == auto { margin_right = Value::Length(0.0, Unit::Px); } if underflow >= 0.0 { width = Value::Length(underflow, Unit::Px); } else { width = Value::Length(0.0, Unit::Px); margin_right = Value::Length(margin_right.to_px() + underflow, Unit::Px); } } // margin-right, margin-leftの両方がautoの場合、underflowの値を半分にする (false, true, true) => { margin_left = Value::Length(underflow / 2.0, Unit::Px); margin_right = Value::Length(underflow / 2.0, Unit::Px); } } let d = &mut self.dimensions; d.content.width = width.to_px(); d.padding.left = padding_left.to_px(); d.padding.right = padding_right.to_px(); d.border.left = border_left.to_px(); d.border.right = border_right.to_px(); d.margin.left = margin_left.to_px(); d.margin.right = margin_right.to_px(); } pub fn set_block_position(&mut self, containing_block: Dimensions) { let style = self.get_style_node(); let d = &mut self.dimensions; let zero = Value::Length(0.0, Unit::Px); d.margin.top = style.lookup("margin-top", "margin", &zero).to_px(); d.margin.bottom = style.lookup("margin-bottom", "margin", &zero).to_px(); d.border.top = style.lookup("border-top", "border", &zero).to_px(); d.border.bottom = style.lookup("border-bottom", "border", &zero).to_px(); d.padding.top = style.lookup("padding-top", "padding", &zero).to_px(); d.padding.bottom = style.lookup("padding-bottom", "padding", &zero).to_px(); d.content.x = containing_block.content.x + d.margin.left + d.border.left + d.padding.left; // boxはすべてのblockの下に配置する d.content.y = containing_block.content.height + containing_block.content.y + d.margin.top + d.border.top + d.padding.top; } pub fn layout_block_children(&mut self) { let d = &mut self.dimensions; for child in &mut self.children { child.layout(*d); d.content.height = d.content.height + child.dimensions.margin_box().height; } } pub fn set_block_height(&mut self) { if let Some(Value::Length(h, Unit::Px)) = self.get_style_node().value("height") { self.dimensions.content.height = h; } } }
mod navbar; mod product_card; pub use navbar::nav_bar; pub use product_card::product_card;
use async_trait::async_trait; use chrono::{DateTime, Utc}; use crate::{ model::{Items, Result}, ItemId, OpenItem, }; pub mod in_memory; pub mod postgres; pub type Repository = Box<dyn IRepository + Send + Sync + 'static>; #[async_trait] pub trait IRepository { async fn get_items( &self, offset: Option<usize>, limit: Option<usize>, show_done_items: bool, ) -> Result<Items>; async fn add_open_item(&self, item: OpenItem) -> Result<()>; async fn complete_item(&self, id: ItemId, now: DateTime<Utc>) -> Result<()>; async fn undo_item(&self, id: ItemId) -> Result<()>; async fn edit_item(&self, id: ItemId, item: OpenItem) -> Result<()>; } const DEFAULT_LIMIT: usize = 200;
use crate::custom_types::exceptions::{index_error, value_error}; use crate::custom_types::join_values; use crate::custom_types::list::List; use crate::custom_types::range::Range; use crate::custom_var::{downcast_var, CustomVar}; use crate::int_var::{normalize, IntVar}; use crate::looping::{self, TypicalIterator}; use crate::method::{NativeMethod, StdMethod}; use crate::name::Name; use crate::operator::Operator; use crate::runtime::Runtime; use crate::std_type::Type; use crate::string_var::{MaybeString, StringVar}; use crate::variable::{FnResult, Variable}; use crate::{first, first_n}; use ascii::{AsciiChar, AsciiStr}; use num::ToPrimitive; use once_cell::sync::Lazy; use std::cell::{Cell, RefCell}; use std::cmp::min; use std::rc::Rc; #[derive(Debug)] pub struct Array { vars: RefCell<Box<[Variable]>>, } #[derive(Debug)] struct ArrayIter { current: Cell<usize>, value: Rc<Array>, } impl Array { fn new(args: Box<[Variable]>) -> Rc<Array> { Rc::new(Array { vars: RefCell::new(args), }) } fn op_fn(o: Operator) -> NativeMethod<Rc<Array>> { match o { Operator::GetAttr => Self::index, Operator::SetAttr => Self::set_index, Operator::Bool => Self::bool, Operator::Str => Self::str, Operator::Repr => Self::repr, Operator::Equals => Self::eq, Operator::In => Self::contains, Operator::GetSlice => Self::get_slice, Operator::Iter => Self::iter, Operator::IterSlice => Self::iter_slice, _ => unimplemented!("Array.{}", o.name()), } } fn index(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { let values = self.vars.borrow(); match normalize(values.len(), first(args).into()) { Result::Ok(i) => runtime.return_1(values[i].clone()), Result::Err(index) => Self::index_err(runtime, values.len(), &index), } } fn set_index(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert_eq!(args.len(), 2); let [index, value] = first_n(args); let mut vars = self.vars.borrow_mut(); match normalize(vars.len(), IntVar::from(index)) { Result::Ok(val) => vars[val] = value, Result::Err(val) => return Self::index_err(runtime, vars.len(), &val), } runtime.return_0() } fn bool(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); runtime.return_1((!self.vars.borrow().is_empty()).into()) } fn str(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); let value = self.str_value(runtime)?; runtime.return_1(value.into()) } fn str_value(&self, runtime: &mut Runtime) -> Result<StringVar, ()> { let value = join_values(&**self.vars.borrow(), |x| x.str(runtime))?; Result::Ok(Self::surround(value).into()) } fn repr(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); let value = self.repr_value(runtime)?; runtime.return_1(value.into()) } fn repr_value(&self, runtime: &mut Runtime) -> Result<StringVar, ()> { let value = join_values(&**self.vars.borrow(), |x| x.repr(runtime))?; Result::Ok(Self::surround(value).into()) } fn surround(mut str: MaybeString) -> MaybeString { static ARRAY: Lazy<&AsciiStr> = Lazy::new(|| AsciiStr::from_ascii("Array[").unwrap()); str.insert_ascii_str(0, *ARRAY); str.push_ascii(AsciiChar::BracketClose); str } fn eq(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { for arg in args { if !match downcast_var::<Array>(arg) { Result::Err(_) => false, Result::Ok(other) => { let self_val = self.vars.borrow(); let other_val = other.vars.borrow(); self_val.len() == other_val.len() && Self::arr_eq(&*self_val, &*other_val, runtime)? } } { return runtime.return_1(false.into()); } } runtime.return_1(true.into()) } fn contains(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert_eq!(args.len(), 1); let arg = first(args); for val in self.vars.borrow().iter() { if arg.clone().equals(val.clone(), runtime)? { return runtime.return_1(true.into()); } } runtime.return_1(false.into()) } fn get_slice(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert_eq!(args.len(), 1); let range = Range::from_slice(self.vars.borrow().len(), runtime, first(args))?; let mut raw_vec = Vec::new(); let self_val = self.vars.borrow(); for i in range.values() { raw_vec.push(self_val[i.to_usize().expect("Conversion error")].clone()); } runtime.return_1(Self::new(raw_vec.into_boxed_slice()).into()) } fn iter(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); runtime.return_1(Rc::new(ArrayIter::new(self)).into()) } fn iter_slice(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert_eq!(args.len(), 1); let range = Range::from_slice(self.vars.borrow().len(), runtime, first(args))?; let value = self.vars.borrow(); let len = value.len(); let start = match range.get_start().to_usize() { Option::Some(v) => v, Option::None => return Self::size_error(runtime, range.get_start()), }; let stop = min(range.get_stop().to_usize().unwrap_or(len), len); let step = match range.get_step().to_usize() { Option::Some(v) => v, Option::None => return Self::size_error(runtime, range.get_step()), }; let new_vec = Array::new(value[start..stop].iter().step_by(step).cloned().collect()); runtime.return_1(Rc::new(ArrayIter::new(new_vec)).into()) } fn arr_eq(first: &[Variable], second: &[Variable], runtime: &mut Runtime) -> Result<bool, ()> { for (a, b) in first.iter().zip(second.iter()) { if !a.clone().equals(b.clone(), runtime)? { return Result::Ok(false); } } Result::Ok(true) } fn create(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert_eq!(args.len(), 2); let [len, fill] = first_n(args); let len = IntVar::from(len); let usize_len = match len.to_usize() { Option::Some(val) => val, Option::None => { return runtime.throw_quick(value_error(), "Array init too large to store") } }; runtime.return_1(Array::new(vec![fill; usize_len].into_boxed_slice()).into()) } fn from_list(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { let list = downcast_var::<List>(first(args)).expect("Expected a list"); let new = list.values().iter().cloned().collect(); runtime.return_1(Array::new(new).into()) } fn from_iterable(args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { let mut result = Vec::new(); let iter = first(args).iter(runtime)?; while let Option::Some(next) = iter.next(runtime)?.take_first() { result.push(next); } runtime.return_1(Array::new(result.into_boxed_slice()).into()) } pub fn array_type() -> Type { custom_class!(Array, create, "Array", "fromList" => from_list, "fromIterable" => from_iterable) } fn index_err(runtime: &mut Runtime, len: usize, size: &IntVar) -> FnResult { runtime.throw_quick( index_error(), format!("index {} out of range for array of length {}", size, len), ) } fn size_error(runtime: &mut Runtime, size: &IntVar) -> FnResult { runtime.throw_quick( value_error(), format!( "Index {} too large (must be less than {})", size, usize::MAX ), ) } } impl CustomVar for Array { fn set(self: Rc<Self>, _name: Name, _object: Variable) { unimplemented!() } fn get_type(&self) -> Type { Self::array_type() } fn get_operator(self: Rc<Self>, op: Operator) -> Variable { let func = Array::op_fn(op); StdMethod::new_native(self, func).into() } fn get_attribute(self: Rc<Self>, _name: &str) -> Variable { unimplemented!() } fn call_op( self: Rc<Self>, operator: Operator, args: Vec<Variable>, runtime: &mut Runtime, ) -> FnResult { runtime.call_native_method(Array::op_fn(operator), self, args) } fn call_op_or_goto( self: Rc<Self>, operator: Operator, args: Vec<Variable>, runtime: &mut Runtime, ) -> FnResult { runtime.call_native_method(Array::op_fn(operator), self, args) } fn str(self: Rc<Self>, runtime: &mut Runtime) -> Result<StringVar, ()> { self.str_value(runtime) } fn repr(self: Rc<Self>, runtime: &mut Runtime) -> Result<StringVar, ()> { self.repr_value(runtime) } fn bool(self: Rc<Self>, _runtime: &mut Runtime) -> Result<bool, ()> { Result::Ok(!self.vars.borrow().is_empty()) } fn iter(self: Rc<Self>, _runtime: &mut Runtime) -> Result<looping::Iterator, ()> { Result::Ok(Rc::new(ArrayIter::new(self)).into()) } } impl ArrayIter { pub fn new(value: Rc<Array>) -> ArrayIter { ArrayIter { value, current: Cell::new(0), } } fn create(_args: Vec<Variable>, _runtime: &mut Runtime) -> FnResult { unimplemented!() } } impl TypicalIterator for ArrayIter { fn inner_next(&self) -> Option<Variable> { if self.current.get() != self.value.vars.borrow().len() { let result = self.value.vars.borrow()[self.current.get()].clone(); self.current.set(self.current.get() + 1); Option::Some(result) } else { Option::None } } fn get_type() -> Type { custom_class!(ArrayIter, create, "ArrayIter") } }
use std::thread; use std::time::Duration; use std::sync::mpsc; use std::sync::Mutex; pub fn main() { let handle = thread::spawn(|| { for i in 1..10 { println!("hi number {} from the spawned thread!", i); thread::sleep(Duration::from_millis(1)); } }); for i in 1..5 { println!("hi number {} from the main thread!", i); thread::sleep(Duration::from_millis(1)); } handle.join().unwrap(); } pub fn thread_takes_ownership_of_variable() { let v = vec![1, 2, 3]; let thread_handle = thread::spawn(move || { println!("Here's a vector: {:?}", v); }); thread_handle.join().unwrap(); } // message passing between threads pub fn message_passing() { let (tx, rx) = mpsc::channel(); thread::spawn(move || { let val = String::from("hi"); tx.send(val).unwrap(); // ! unwrap destroys the result instance because the function moves the instance. it takes self instead of &self }); //this all happens in another thread let received = rx.recv().unwrap(); println!("Got: {}", received); } pub fn another_message_passing() { let (sender, receiver) = mpsc::channel(); thread::spawn(move|| { let values = vec![ String::from("hi"), String::from("from"), String::from("the"), String::from("thread"), ]; for val in values { sender.send(val).unwrap(); thread::sleep(Duration::from_secs(1)); } }); for received in receiver { println!("Got {}", received); } } pub fn using_shared_data() { }
use std::collections::HashMap; trait Product { fn use_product(&self, s: String); fn create_clone(&self) -> Box<Product>; } struct Manager { showcase: HashMap<String, Box<Product>>, } impl Manager { fn new() -> Manager { Manager { showcase: HashMap::new(), } } fn register(&mut self, name: String, product: Box<Product>) { self.showcase.insert(name, product); } fn create(&self, product_name: String) -> Box<Product> { let product = self.showcase.get(&product_name); match product { Some(p) => p.create_clone(), _ => panic!(), } } } #[derive(Clone)] struct MessageBox { deco_char: char, } impl MessageBox { fn new(deco_char: char) -> MessageBox { MessageBox { deco_char: deco_char, } } } impl Product for MessageBox { fn use_product(&self, s: String) { let length = s.chars().count(); let mut ch = String::new(); ch.push(self.deco_char); for _ in 0..(length + 4) { print!("{}", ch); } println!(""); println!("{} {} {}", ch, s, ch); for _ in 0..(length + 4) { print!("{}", ch); } println!(""); } fn create_clone(&self) -> Box<Product> { Box::new((*self).clone()) } } #[derive(Clone)] struct UnderlinePen { ul_char: char, } impl UnderlinePen { fn new(ul_char: char) -> UnderlinePen { UnderlinePen { ul_char: ul_char, } } } impl Product for UnderlinePen { fn use_product(&self, s: String) { let length = s.chars().count(); let mut ch = String::new(); ch.push(self.ul_char); println!("\" {} \"", s); print!(" "); for _ in 0..length { print!("{}", ch); } println!(""); } fn create_clone(&self) -> Box<Product> { Box::new((*self).clone()) } } fn main() { let mut manager = Manager::new(); let upen = Box::new(UnderlinePen::new('~')); let mbox = Box::new(MessageBox::new('*')); let sbox = Box::new(MessageBox::new('/')); manager.register("strong message".to_string(), upen); manager.register("warning box".to_string(), mbox); manager.register("slash box".to_string(), sbox); let p1 = manager.create("strong message".to_string()); p1.use_product("Hello, world.".to_string()); let p2 = manager.create("warning box".to_string()); p2.use_product("Hello, world.".to_string()); let p3 = manager.create("slash box".to_string()); p3.use_product("Hello, world.".to_string()); }
use super::database::{create_kv_db, create_sql_db}; use crate::{ _utils::error::BootError, account::repository::AccountRepository, ai::service::AIService, auth::service::AuthService, config::service::ConfigService, email::service::EmailService, post::repository::PostRepository, search::service::SearchService, security::service::SecurityService, tag::repository::TagRepository, task::repository::TaskRepository, }; use std::sync::Arc; #[derive(Clone)] pub struct AppState { // @TODO-ZM: remove this from app state pub main_kv_db: Arc<sled::Db>, pub post_repository: Arc<PostRepository>, pub tag_repository: Arc<TagRepository>, pub account_repository: Arc<AccountRepository>, pub config_service: Arc<ConfigService>, pub task_repository: Arc<TaskRepository>, pub search_service: Arc<SearchService>, pub email_service: Arc<EmailService>, pub security_service: Arc<SecurityService>, pub ai_service: Arc<AIService>, pub auth_service: Arc<AuthService>, } pub async fn create_app_state() -> Result<AppState, BootError> { let config_service = Arc::new(ConfigService::new()); let main_sql_db = Arc::new( create_sql_db( super::database::SQLDBName::Main, config_service.get_config().sqlite_base_url, ) .await?, ); let search_sql_db = Arc::new( create_sql_db( super::database::SQLDBName::Search, config_service.get_config().sqlite_base_url, ) .await?, ); let main_kv_db = Arc::new(create_kv_db(format!("{}/main", config_service.get_config().kv_db_dir)).await?); let rate_limit_kv_db = Arc::new( create_kv_db(format!( "{}/rate_limit", config_service.get_config().kv_db_dir )) .await?, ); let search_service = Arc::new(SearchService::new(Arc::clone(&search_sql_db))); let post_repository = Arc::new(PostRepository::new(Arc::clone(&main_sql_db))); let tag_repository = Arc::new(TagRepository::new(Arc::clone(&main_sql_db))); let account_repository = Arc::new(AccountRepository::new(Arc::clone(&main_sql_db))); let task_repository = Arc::new(TaskRepository::new(Arc::clone(&main_sql_db))); let email_service = Arc::new(EmailService::new(Arc::clone(&config_service))); let auth_service = Arc::new(AuthService::new( Arc::clone(&config_service), Arc::clone(&main_kv_db), )); let security_service = Arc::new(SecurityService::new(Arc::clone(&rate_limit_kv_db))); let ai_service = Arc::new(AIService::new(Arc::clone(&config_service))); Ok(AppState { main_kv_db: Arc::clone(&main_kv_db), post_repository: Arc::clone(&post_repository), tag_repository: Arc::clone(&tag_repository), account_repository: Arc::clone(&account_repository), config_service: Arc::clone(&config_service), task_repository: Arc::clone(&task_repository), search_service: Arc::clone(&search_service), email_service: Arc::clone(&email_service), security_service: Arc::clone(&security_service), ai_service: Arc::clone(&ai_service), auth_service: Arc::clone(&auth_service), }) }
use actix_web::http::StatusCode; use mocks; use serde_json::{json, Value}; use shakesemon::Pokemon; #[actix_rt::test] async fn success_responses() { // Arrange let _pokeapi_mocks = mocks::pokeapi::Mocks::start().await; let _translation_mocks = mocks::translation::Mocks::start().await; let address = spawn_app(); let client = reqwest::Client::new(); let name = "pikachu"; // Act let response = client // Use the returned application address .get(&format!("{}/pokemon/{}", &address, name)) .send() .await .expect("Failed to execute request."); // Assert assert!(response.status().is_success()); let pokemon = response .json::<Pokemon>() .await .expect("Request returned invalid pokemon data"); assert_eq!(pokemon.name, name, "Incorrect name serialized for {}", name); assert_eq!(pokemon.description, mocks::translation::PIKACHU_TRANSLATION); } #[actix_rt::test] async fn test_error_on_rate_limit() { // Arrange let _pokeapi_mocks = mocks::pokeapi::Mocks::start().await; let _translation_mocks = mocks::translation::Mocks::start().await; let address = spawn_app(); let client = reqwest::Client::new(); let response = client // Use the returned application address .get(&format!("{}/pokemon/squirtle", &address)) .send() .await .expect("Failed to execute request."); assert_eq!(response.status(), StatusCode::TOO_MANY_REQUESTS); assert_eq!( response.json::<Value>().await.unwrap(), json!({ "error": { "code": 429, "message": "Too Many Requests: Rate limit of 5 requests per hour exceeded. Please wait for 46 minutes and 9 seconds." } }) ); } #[actix_rt::test] async fn test_error_on_no_description() { // Arrange let _pokeapi_mocks = mocks::pokeapi::Mocks::start().await; let _translation_mocks = mocks::translation::Mocks::start().await; let address = spawn_app(); let client = reqwest::Client::new(); let test_cases = vec!["nodescription", "noenglishdescription"]; for name in test_cases { let response = client // Use the returned application address .get(&format!("{}/pokemon/{}", &address, &name)) .send() .await .expect("Failed to execute request."); assert_eq!(response.status(), StatusCode::NOT_FOUND); assert_eq!( response.json::<Value>().await.unwrap(), json!({ "error": { "code": 404, "message": "Pokemon Description Not Found" } }) ); } } #[actix_rt::test] async fn test_error_on_unknown_pokemon() { // Arrange let _pokeapi_mocks = mocks::pokeapi::Mocks::start().await; let _translation_mocks = mocks::translation::Mocks::start().await; let address = spawn_app(); let client = reqwest::Client::new(); let response = client // Use the returned application address .get(&format!("{}/pokemon/invalidpokemonname", &address)) .send() .await .expect("Failed to execute request."); assert_eq!(response.status(), StatusCode::NOT_FOUND); assert_eq!( response.json::<Value>().await.unwrap(), json!({ "error": { "code": 404, "message": "Pokemon Not Found" } }) ); } use std::net::TcpListener; fn spawn_app() -> String { let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind random port"); // We retrieve the port assigned to us by the OS let port = listener.local_addr().unwrap().port(); let server = shakesemon::run(listener).expect("Failed to bind address"); let _ = tokio::spawn(server); // We return the application address to the caller! format!("http://127.0.0.1:{}", port) }
use std::fs; use std::io::prelude::*; use std::net::{TcpListener, TcpStream}; const GET:&[u8; 16] = b"GET / HTTP/1.1\r\n"; const STATUS_OK:&str = "HTTP/1.1 200 OK"; const STATUS_ERROR:&str = "HTTP/1.1 404 NOT FOUND"; fn main() { let listener = TcpListener::bind("127.0.0.1:8080").unwrap(); for stream in listener.incoming() { let stream = stream.unwrap(); handle_connection(stream); } } fn web_response(mut stream : TcpStream, content : String, status_line : String) { let response = format!( "{}\r\nContent-Length:{}\r\n\r\n{}", status_line, content.len(), content ); stream.write(response.as_bytes()).unwrap(); stream.flush().unwrap(); } fn handle_connection(mut stream: TcpStream) { let mut buffer = [0; 1024]; stream.read(&mut buffer).unwrap(); if buffer.starts_with(GET) { let content = fs::read_to_string("index.html").unwrap(); web_response(stream, content, STATUS_OK.to_string()); } else { let content = fs::read_to_string("404.html").unwrap(); web_response(stream, content, STATUS_ERROR.to_string()); } }
use std::io::{Stdout, Write}; use termion::cursor::Goto; use termion::raw::RawTerminal; pub type Term = RawTerminal<Stdout>; pub fn clear_screen(stdout: &mut Term) { write!(stdout, "{}", termion::clear::All).unwrap(); } pub fn goto(stdout: &mut Term, x: u16, y: u16) { write!(stdout, "{}", Goto(x, y)).unwrap(); } pub fn hide_cursor(stdout: &mut Term) { write!(stdout, "{}", termion::cursor::Hide).unwrap(); } pub fn get_size() -> (u16, u16) { termion::terminal_size().unwrap() }
trait HasArea { fn area(&self) -> f64; } struct Circle { x: f64, y: f64, r: f64, } impl HasArea for Circle { fn area(&self) -> f64 { 3.14 * self.r * self.r } } struct Square { x: f64, y: f64, side: f64 } impl HasArea for Square { fn area(&self) -> f64 { self.side * self.side } } fn print_area<T: HasArea>(shape: T) { println!("area of the shape is {}",shape.area()) } fn main() { let circle = Circle{x: 0.0f64, y:0.0f64, r:7.0f64}; print_area(circle); let sqauare = Square{x:0.0f64, y:0.0f64, side: 5.0f64}; print_area(sqauare); }
// brain teaser: given a sorted int array with repeated numbers and a digit K, what's the most // efficient way of determining the number of times K appears in the array fn occurances_of_k(array: &[u32], k: u32) -> u32 { let m = binary_search(array, k); if array[m] != k { return 0; } let (low, high) = array.split_at(m); let l = if low.first() == Some(&k) && low.last() == Some(&k) { low.len() } else if low.last() == Some(&k) { low.len() - binary_search_end(low, k) } else { 0 }; let h = if high.first() == Some(&k) && high.last() == Some(&k) { high.len() } else if high.first() == Some(&k) { binary_search_end(high, k) + 1 } else { 0 }; (l + h) as u32 } fn binary_search(array: &[u32], k: u32) -> usize { let mut low = 0; let mut high = array.len() - 1; let mut mid; while low < high { mid = low + ((high - low) / 2); let v = array[mid]; if v < k { low = mid + 1; } else if v > k { high = mid - 1; } else { return mid; } } low } fn binary_search_end(array: &[u32], k: u32) -> usize { let mut low = 0; let mut high = array.len() - 1; let mut mid = 0; while low < high { mid = low + ((high - low) / 2); let (l, m, _h) = (array[low], array[mid], array[high]); if m == k { if l == k { low = mid; if array[mid + 1] != k { return mid; } } else { high = mid; if array[mid - 1] != k { return mid; } } } else { if l == k { high = mid; if array[mid - 1] == k { return mid - 1; } } else { low = mid; if array[mid + 1] == k { return mid + 1; } } } } mid } #[cfg(test)] mod tests { use super::binary_search; #[test] fn simple_binary_search() { let array = vec![0, 1, 5, 10, 15, 23, 42, 78, 78]; assert_eq!(binary_search(&array, 15), 4); assert_eq!(binary_search(&array, 0), 0); assert_eq!(binary_search(&array, 42), 6); assert_eq!(binary_search(&array, 78), 7); assert_eq!(binary_search(&array, 22), 5); } use super::binary_search_end; #[test] fn simple_binary_search_end() { let array = vec![0, 0, 0, 0, 0, 23, 42, 78, 78]; assert_eq!(binary_search_end(&array, 0), 4); let array = vec![0, 0, 0, 0, 0, 23, 42, 78, 78]; assert_eq!(binary_search_end(&array, 78), 7); } use super::occurances_of_k; #[test] fn simple_occurances_of_k() { let array = vec![0, 1, 1, 1, 1, 23, 42, 78, 78]; assert_eq!(occurances_of_k(&array, 1), 4); let array = vec![0, 0, 0, 0, 0, 23, 42, 78, 78, 90]; assert_eq!(occurances_of_k(&array, 78), 2); let array = vec![0, 0, 0, 0, 0, 23, 42, 78, 78]; assert_eq!(occurances_of_k(&array, 0), 5); let array = vec![0, 0, 0, 0, 0, 23, 42, 78, 78]; assert_eq!(occurances_of_k(&array, 7), 0); let array = vec![0, 0, 0, 0, 0, 23, 42, 78, 78]; assert_eq!(occurances_of_k(&array, 78), 2); } }
use clap::Shell; include!("src/cli.rs"); fn main() { let mut app = cli_app(); let home_dir = match dirs::home_dir() { Some(home_dir) => home_dir, None => { println!("Couldn't resolve home directory for completions when building cli"); std::process::exit(1); } }; app.gen_completions("vid", Shell::Bash, home_dir); }
use std::{borrow::Borrow, collections::BTreeSet, fmt, ops::Deref}; use crate::utils::{apply::*, joined_by::*}; /// Обобщённое выражение. Обобщённость нужна для возможности как задать положения в парсинге, так и для возмоности задания обычного выражения. Была выбрана такая обобщённость вместо копипасты данной структуры отдельно для парсинга. #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum ExpressionMeta<Arg> { /// В математике называется "переменной", но здесь это называется паттерном. Матчится с чем угодно, именованная часть выражения. В выражении выглядит как: `a`, `b`, `c`. // TODO переименовать в Any Pattern { name: String }, /// Любая функция с неизвестным именем с конкретным числом аргументов. В выражении выглядит как: `$f(a, b)`. AnyFunction { name: String, args: Vec<Arg> }, /// Функция с именем и определённым набором аргументов. В выражении выглядит как: `a+b`, `sin(1)`. NamedFunction { name: String, args: Vec<Arg> }, /// Именованная константа. В выражении выглядит как: `$false`, `$true`, `$i`, `$undefined`. NamedValue { name: String }, /// Числовая константа. В выражении выглядит как: `1`, `1000`. IntegerValue { value: i64 }, } // TODO применить где-нибудь #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Copy)] pub enum ExpressionKind { Pattern, AnyFunction, NamedFunction, NamedValue, IntegerValue, } impl<Arg> From<&ExpressionMeta<Arg>> for ExpressionKind { fn from(expr: &ExpressionMeta<Arg>) -> Self { use ExpressionMeta::*; match expr { Pattern { .. } => ExpressionKind::Pattern, AnyFunction { .. } => ExpressionKind::AnyFunction, NamedFunction { .. } => ExpressionKind::NamedFunction, NamedValue { .. } => ExpressionKind::NamedValue, IntegerValue { .. } => ExpressionKind::IntegerValue, } } } /// Ввиду обобщённости `ExpressionMeta`, нужно как-то получать его обратно когда обращаешься к `args`, поэтому сделан такой трейт. pub trait GetInnerExpression: Sized { fn get_inner_expression(self) -> ExpressionMeta<Self>; fn get_inner_expression_ref(&self) -> &ExpressionMeta<Self>; fn get_inner_expression_mut(&mut self) -> &mut ExpressionMeta<Self>; } /// Выражение без дополнительной информации. #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct Expression(pub ExpressionMeta<Expression>); impl GetInnerExpression for Expression { fn get_inner_expression(self) -> ExpressionMeta<Self> { self.0 } fn get_inner_expression_ref(&self) -> &ExpressionMeta<Self> { &self.0 } fn get_inner_expression_mut(&mut self) -> &mut ExpressionMeta<Self> { &mut self.0 } } /// Положение в выражении. #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct ExprPositionOwned(pub Vec<usize>); /// Положение в выражении для передачи в функции. Аналог `[usize]`. #[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] #[repr(transparent)] pub struct ExprPosition(pub [usize]); impl ExprPosition { /// Создать ссылку на `ExprPosition` из слайса на `usize`. pub fn from_slice(slice: &[usize]) -> &Self { unsafe { &*(slice as *const [usize] as *const ExprPosition) } } /// Создать мутабельную ссылку на `ExprPosition` из слайса на `usize`. pub fn from_slice_mut(slice: &mut [usize]) -> &mut Self { unsafe { &mut *(slice as *mut [usize] as *mut ExprPosition) } } pub fn cut_to_error(&self, error_in: PositionError) -> &Self { Self::from_slice(&self.0[..=error_in.0]) } } impl Borrow<ExprPosition> for ExprPositionOwned { fn borrow(&self) -> &ExprPosition { ExprPosition::from_slice(self.0.borrow()) } } impl Deref for ExprPositionOwned { type Target = ExprPosition; fn deref(&self) -> &Self::Target { &self.borrow() } } /// Показывает в каком положении в массиве `ExprPosition` не было найдено то что нужно. #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Copy)] pub struct PositionError(usize); impl<Arg> detail::ExpressionExtensionInner for Arg where Arg: GetInnerExpression { fn get_inner<'a>(&'a self, position: &ExprPosition, deep: usize) -> Result<&'a Self, PositionError> { use ExpressionMeta::*; match &position.0 { [start, tail @ ..] => match self.get_inner_expression_ref() { AnyFunction { name: _, args } | NamedFunction { name: _, args } => args .get(*start) .ok_or(PositionError(deep))? .get_inner(ExprPosition::from_slice(tail), deep + 1), Pattern { .. } | NamedValue { .. } | IntegerValue { .. } => Err(PositionError(deep)), }, [] => Ok(self), } } fn get_mut_inner<'a>(&'a mut self, position: &ExprPosition, deep: usize) -> Result<&'a mut Self, PositionError> { use ExpressionMeta::*; match &position.0 { [start, tail @ ..] => match self.get_inner_expression_mut() { AnyFunction { name: _, args } | NamedFunction { name: _, args } => args .get_mut(*start) .ok_or(PositionError(deep))? .get_mut_inner(ExprPosition::from_slice(tail), deep + 1), Pattern { .. } | NamedValue { .. } | IntegerValue { .. } => Err(PositionError(deep)), }, [] => Ok(self), } } fn travel_positions_inner<F: FnMut(&Self, &ExprPosition)>( &self, current_position: &mut ExprPositionOwned, f: &mut F, ) { use ExpressionMeta::*; f(self, (&*current_position).borrow()); let mut process_args = |args: &[Arg]| { args.iter().enumerate().for_each(|(pos, arg)| { current_position.0.push(pos); arg.travel_positions_inner(current_position, f); current_position.0.pop().unwrap(); }) }; match self.get_inner_expression_ref() { AnyFunction { name: _, args } | NamedFunction { name: _, args } => process_args(&*args), Pattern { name: _ } | NamedValue { name: _ } | IntegerValue { value: _ } => {}, } } } mod detail { use super::*; pub trait ExpressionExtensionInner: GetInnerExpression { fn get_inner<'a>(&'a self, position: &ExprPosition, deep: usize) -> Result<&'a Self, PositionError>; fn get_mut_inner<'a>(&'a mut self, position: &ExprPosition, deep: usize) -> Result<&'a mut Self, PositionError>; fn travel_positions_inner<F: FnMut(&Self, &ExprPosition)>( &self, current_position: &mut ExprPositionOwned, f: &mut F, ); } } pub trait ExpressionExtension: GetInnerExpression { fn get<'a>(&'a self, position: &ExprPosition) -> Result<&'a Self, PositionError>; fn get_mut<'a>(&'a mut self, position: &ExprPosition) -> Result<&'a mut Self, PositionError>; fn travel<F: FnMut(&Self)>(&self, f: &mut F); fn travel_mut<'a, F: for<'b> FnMut(&'b mut Self)>(&'a mut self, f: &mut F); fn travel_positions<F: FnMut(&Self, &ExprPosition)>(&self, f: F); fn get_pattern_names(&self) -> BTreeSet<String>; fn get_anyfunction_names(&self) -> BTreeSet<AnyFunctionNames>; fn retype<Y, T, FD, FS>(self, destructure: &FD, structure: &FS) -> T where T: GetInnerExpression, FD: Fn(Self) -> (Y, ExpressionMeta<Self>), FS: Fn(Y, ExpressionMeta<T>) -> T; } impl<Arg> ExpressionExtension for Arg where Arg: GetInnerExpression + detail::ExpressionExtensionInner { /// Получить ссылку на внутреннюю часть выражения. fn get<'a>(&'a self, position: &ExprPosition) -> Result<&'a Self, PositionError> { self.get_inner(position, 0) } /// Получить изменяемую ссылку на внутреннюю часть выражения. fn get_mut<'a>(&'a mut self, position: &ExprPosition) -> Result<&'a mut Self, PositionError> { self.get_mut_inner(position, 0) } /// Обход всего выражения с передачей позиции. fn travel<F: FnMut(&Self)>(&self, f: &mut F) { use ExpressionMeta::*; f(self); match self.get_inner_expression_ref() { AnyFunction { name: _, args } | NamedFunction { name: _, args } => args.iter().for_each(|arg| { arg.travel(f); }), Pattern { name: _ } | NamedValue { name: _ } | IntegerValue { value: _ } => {}, } } /// Обход всего выражения с передачей позиции и изменением выражения. fn travel_mut<'a, F: for<'b> FnMut(&'b mut Self)>(&'a mut self, f: &mut F) { use ExpressionMeta::*; f(self); match self.get_inner_expression_mut() { AnyFunction { name: _, args } | NamedFunction { name: _, args } => args.iter_mut().for_each(|arg| { arg.travel_mut(f); }), Pattern { name: _ } | NamedValue { name: _ } | IntegerValue { value: _ } => {}, } } /// Обход всего выражения с передачей позиции. fn travel_positions<F: FnMut(&Self, &ExprPosition)>(&self, mut f: F) { let mut current_position = Vec::new().apply(ExprPositionOwned); self.travel_positions_inner(&mut current_position, &mut f); } /// Возвращает имена всех паттернов в выражении. fn get_pattern_names(&self) -> BTreeSet<String> { let mut result = BTreeSet::new(); self.travel(&mut |expr| { if let ExpressionMeta::Pattern { name } = expr.get_inner_expression_ref() { result.insert(name.clone()); } }); result } /// Возвращает имена и количество аргументов всех anyfunction в выражении. Если в выражении имеется `$f` от двух аргументов и от трёх, то возвратятся оба. fn get_anyfunction_names(&self) -> BTreeSet<AnyFunctionNames> { let mut result = BTreeSet::new(); self.travel(&mut |expr| { if let ExpressionMeta::AnyFunction { name, args } = expr.get_inner_expression_ref() { result.insert(AnyFunctionNames { name: name.clone(), arguments_count: args.len() }); } }); result } fn retype<Y, T, FD, FS>(self, destructure: &FD, structure: &FS) -> T where T: GetInnerExpression, FD: Fn(Self) -> (Y, ExpressionMeta<Self>), FS: Fn(Y, ExpressionMeta<T>) -> T, { use ExpressionMeta::*; let (y, expr) = destructure(self); structure(y, match expr { AnyFunction { name, args } => { AnyFunction { name, args: args.into_iter().map(|x| x.retype(destructure, structure)).collect() } }, NamedFunction { name, args } => { NamedFunction { name, args: args.into_iter().map(|x| x.retype(destructure, structure)).collect() } }, Pattern { name } => Pattern { name }, NamedValue { name } => NamedValue { name }, IntegerValue { value } => IntegerValue { value }, }) } } #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)] pub struct AnyFunctionNames { pub name: String, pub arguments_count: usize, } impl fmt::Display for AnyFunctionNames { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}{{{}}}", self.name, self.arguments_count) } } impl fmt::Display for Expression { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use ExpressionMeta::*; match &self.0 { AnyFunction { name, args } => write!(f, "${}({})", name, args.iter().joined_by(", ")), NamedFunction { name, args } => match &name[..] { "+" | "-" | "*" | "/" | "!=" | "=" | "<" | ">" | "<=" | ">=" | "|" | "&" => { write!(f, "({})", args.iter().joined_by(name)) }, _ => write!(f, "{}({})", name, args.iter().joined_by(", ")), }, Pattern { name } => write!(f, "{}", name), NamedValue { name } => write!(f, "${}", name), IntegerValue { value } => write!(f, "{}", value), } } }
use std::sync::Arc; use itertools::izip; use crate::prelude::*; use super::utils::*; use crate::camera::Camera; use crate::math::*; use crate::sampler::Sampler; use crate::scene::Scene; use crate::bxdf::TransportMode; use crate::spectrum::utils::*; use super::{ ParIntegratorData, SamplerIntegrator }; pub struct NormalParIntegratorData; impl ParIntegratorData for NormalParIntegratorData { fn li(&self, mut ray: RayDifferential, scene: &Scene, sampler: &mut dyn Sampler, arena: &(), depth: i32) -> Spectrum { let mut l = Spectrum::new(1.0); if let Some(isect) = scene.intersect(&mut ray) { let n = isect.n.unwrap(); let n = Spectrum::from_rgb([n.x, n.y, n.z], SpectrumType::Reflectance); l = n; } l } } pub struct NormalIntegrator { camera: Arc<dyn Camera + Send + Sync>, sampler: Box<dyn Sampler>, } impl NormalIntegrator { pub fn new(camera: Arc<dyn Camera + Send + Sync>, sampler: Box<dyn Sampler>) -> Self { Self { camera, sampler, } } } impl SamplerIntegrator for NormalIntegrator { type ParIntegratorData = NormalParIntegratorData; fn camera(&self) -> Arc<dyn Camera + Send + Sync> { self.camera.clone() } fn sampler(&self) -> &dyn Sampler { self.sampler.as_ref() } fn sampler_mut(&mut self) -> &mut dyn Sampler { self.sampler.as_mut() } fn par_data(&self) -> Self::ParIntegratorData { NormalParIntegratorData } }
use nom; use std::fs::File; use std::io::BufReader; use std::io::Read; pub fn demo() { let f : File = File::open("png_example.png").expect("Could not open example file"); let mut reader = BufReader::new(f); let mut bytes : Vec<u8> = vec![]; let _ = reader.read_to_end(&mut bytes).expect("Could not read file"); match png_file(&bytes[..]) { nom::IResult::Error(_) => println!("Could not parse file"), nom::IResult::Done(rest, png_file_result) => println!("{:?} and {} bytes", png_file_result, rest.len()), nom::IResult::Incomplete(needed) => println!("Tried to parse, but needed {:?}", needed), } } #[derive(Debug)] struct PngHeader { width: u32, // the field in PNGs has the same max as i32 because not every language supports u32 height: u32, bit_depth: u8, color_type: ColorType, filter_method: u8, interlace_method: u8, } #[derive(Debug)] struct PngFile { header: PngHeader, palette: Option<Vec<RgbTriple>>, } #[derive(Debug)] enum ColorType { Grayscale, RGBTriple, PaletteIndex, GrayscaleWithAlpha, RGBTripleWithAlpha, } // TODO: Can I do this with pure nom? fn parse_color_type(byte: u8) -> Result<ColorType, ()> { match byte { 0 => Ok(ColorType::Grayscale), 2 => Ok(ColorType::RGBTriple), 3 => Ok(ColorType::PaletteIndex), 4 => Ok(ColorType::GrayscaleWithAlpha), 6 => Ok(ColorType::RGBTripleWithAlpha), _ => Err(()), } } named!(color_type_grayscale<&[u8], ColorType>, do_parse!(a: tag!(&[0][..]) >> (ColorType::Grayscale)) ); named!(color_type_rgb_triple<&[u8], ColorType>, do_parse!(a: tag!(&[2][..]) >> (ColorType::RGBTriple)) ); named!(color_type_palette_index<&[u8], ColorType>, do_parse!(a: tag!(&[3][..]) >> (ColorType::PaletteIndex)) ); named!(color_type_grayscale_with_alpha<&[u8], ColorType>, do_parse!(a: tag!(&[4][..]) >> (ColorType::GrayscaleWithAlpha)) ); named!(color_type_rgb_triple_with_alpha<&[u8], ColorType>, do_parse!(a: tag!(&[6][..]) >> (ColorType::RGBTripleWithAlpha)) ); named!(color_type<&[u8], ColorType>, alt!(color_type_grayscale | color_type_rgb_triple | color_type_palette_index | color_type_grayscale_with_alpha | color_type_rgb_triple_with_alpha ) ); named!(take_an_int(&[u8]) -> i32, do_parse!(a: i32!( nom::Endianness::Little) >> ( a ))); static PNG_FILE_SIGNATURE : [u8; 8] = [ 137, 80, 78, 71, 13, 10, 26, 10 ]; named!(png_signature<&[u8], &[u8]>, tag!(&PNG_FILE_SIGNATURE[..])); // makes a function called `png_header` // with type // for<'r> fn(&'r [u8]) -> nom::IResult<&'r [u8], png_demo::PngHeader> named!(png_header( &[u8] ) -> PngHeader, do_parse!( _signature: tag!(&PNG_FILE_SIGNATURE[..]) >> _chunk_length: take!(4) >> _chunk_type: take!(4) >> width: u32!(nom::Endianness::Big) >> height: u32!(nom::Endianness::Big) >> bit_depth: take!(1) >> color_type: color_type >> filter_method: take!(1) >> interlace_method: take!(1) >> ( PngHeader { width: width, height: height, bit_depth: bit_depth[0], color_type: color_type, filter_method: filter_method[0], interlace_method: filter_method[0], } ) ) ); #[derive(Debug)] struct RgbTriple { red: u8, green: u8, blue: u8, } named!(rgb_triple ( &[u8]) -> RgbTriple, do_parse!( red: take!(1) >> green: take!(1) >> blue: take!(1) >> ( RgbTriple { red: red[0], green: green[0], blue: blue[0], } ) ) ); named!(palette_vector (&[u8]) -> Vec<RgbTriple>, many!(rgb_triple)); named!(palette_tag, tag!(&b"PLTE"[..])); named!(palette_chunk<&[u8], Vec<RgbTriple>>, do_parse!( length: u32!(nom::Endianness::Big) >> _tag: tag!(&b"PLTE"[..]) >> data: many_m_n!(length as usize, length as usize, rgb_triple) >> ( data ) ) ); named!(png_file (&[u8]) -> PngFile, do_parse!( header: png_header >> _header_crc: take!(4) >> palette: opt!(palette_chunk) >> ( PngFile { header: header, palette: palette, } ) ) );
use std::cell::RefCell; use std::collections::{HashMap, HashSet}; use std::io::{BufRead, BufReader, Cursor}; use std::rc::Rc; use criterion::{criterion_group, criterion_main, Criterion}; use differential_dataflow::input::Input; use differential_dataflow::operators::{Iterate, Join, Threshold}; use hydroflow::hydroflow_syntax; use hydroflow::scheduled::graph_ext::GraphExt; lazy_static::lazy_static! { static ref EDGES: HashMap<usize, Vec<usize>> = { let cursor = Cursor::new(include_bytes!("reachability_edges.txt")); let reader = BufReader::new(cursor); let mut edges = HashMap::new(); for line in reader.lines() { let line = line.unwrap(); let mut nums = line.split_whitespace(); let a = nums.next().unwrap().parse().unwrap(); let b = nums.next().unwrap().parse().unwrap(); assert!(nums.next().is_none()); edges.entry(a).or_insert_with(Vec::new).push(b); } edges }; static ref EDGE_VEC: Vec<(usize, usize)> = { let cursor = Cursor::new(include_bytes!("reachability_edges.txt")); let reader = BufReader::new(cursor); reader.lines().map(|line| { let line = line.unwrap(); let mut v = line.split_whitespace().map(|n| n.parse::<usize>().unwrap()); (v.next().unwrap(), v.next().unwrap()) }).collect() }; static ref REACHABLE: HashSet<usize> = { let cursor = Cursor::new(include_bytes!("reachability_reachable.txt")); let reader = BufReader::new(cursor); let mut set = HashSet::new(); for line in reader.lines() { let line = line.unwrap(); set.insert(line.parse().unwrap()); } set }; } fn benchmark_timely(c: &mut Criterion) { use timely::dataflow::operators::{ Capture, Concat, ConnectLoop, Feedback, Filter, Map, ToStream, }; let edges = &*EDGES; let reachable = &*REACHABLE; c.bench_function("reachability/timely", |b| { b.iter(|| { let edges = edges.clone(); let receiver = timely::example(|scope| { let mut seen = HashSet::new(); let (handle, stream) = scope.feedback(1); let stream_out = (1_usize..=1) .to_stream(scope) .concat(&stream) .flat_map(move |x| edges.get(&x).cloned().into_iter().flatten()) .filter(move |x| seen.insert(*x)); stream_out.connect_loop(handle); stream_out.capture() }); let reached: HashSet<_> = receiver .iter() .filter_map(|e| match e { timely::dataflow::operators::capture::event::Event::Messages(_, vec) => { Some(vec) } _ => None, }) .flatten() .collect(); assert_eq!(&reached, reachable); }); }); } fn benchmark_differential(c: &mut Criterion) { c.bench_function("reachability/differential", |b| { b.iter(move || { timely::execute_directly(move |worker| { let probe = worker.dataflow::<u32, _, _>(|scope| { let edges = scope.new_collection_from(EDGE_VEC.iter().cloned()).1; let roots = scope.new_collection_from(vec![1]).1; let reachable = roots.iterate(|reach| { edges .enter(&reach.scope()) .semijoin(reach) .map(|(_src, dst)| dst) .concat(reach) .distinct() }); reachable.probe() }); worker.step_while(|| !probe.done()); }); }); }); } fn benchmark_hydroflow_scheduled(c: &mut Criterion) { use hydroflow::scheduled::graph::Hydroflow; use hydroflow::scheduled::handoff::{Iter, VecHandoff}; use hydroflow::{var_args, var_expr}; let edges = &*EDGES; let reachable = &*REACHABLE; c.bench_function("reachability/hydroflow/scheduled", |b| { b.iter(|| { // A dataflow that represents graph reachability. let mut df = Hydroflow::new(); type Hoff = VecHandoff<usize>; let (reachable_out, union_lhs) = df.make_edge::<_, Hoff>("reachable_out -> union_lhs"); let (neighbors_out, union_rhs) = df.make_edge::<_, Hoff>("neighbors_out -> union_rhs"); let (union_out, distinct_in) = df.make_edge::<_, Hoff>("union_out -> distinct_in"); let (distinct_out, tee_in) = df.make_edge::<_, Hoff>("distinct_out -> tee_in"); let (tee_out1, neighbors_in) = df.make_edge::<_, Hoff>("tee_out1 -> neighbors_in"); let (tee_out2, sink_in) = df.make_edge::<_, Hoff>("tee_out2 -> sink_in"); df.add_subgraph_source( "initially reachable source", reachable_out, move |_ctx, send| { send.give(Some(1)); }, ); let seen_handle = df.add_state::<RefCell<HashSet<usize>>>(Default::default()); df.add_subgraph( "distinct", var_expr!(distinct_in), var_expr!(distinct_out), move |context, var_args!(recv), var_args!(send)| { let mut seen_state = context.state_ref(seen_handle).borrow_mut(); let iter = recv .take_inner() .into_iter() .filter(|v| seen_state.insert(*v)); send.give(Iter(iter)); }, ); df.add_subgraph_2in_out( "union", union_lhs, union_rhs, union_out, |_ctx, recv1, recv2, send| { send.give(Iter(recv1.take_inner().into_iter())); send.give(Iter(recv2.take_inner().into_iter())); }, ); df.add_subgraph_in_out( "get neighbors", neighbors_in, neighbors_out, move |_ctx, recv, send| { for v in recv.take_inner() { if let Some(neighbors) = edges.get(&v) { send.give(Iter(neighbors.iter().copied())); } } }, ); df.add_subgraph_in_2out( "tee", tee_in, tee_out1, tee_out2, |_ctx, recv, send1, send2| { for v in recv.take_inner() { send1.give(Some(v)); send2.give(Some(v)); } }, ); let reachable_verts = Rc::new(RefCell::new(HashSet::new())); let reachable_inner = reachable_verts.clone(); df.add_subgraph_sink("output sink", sink_in, move |_ctx, recv| { (*reachable_inner).borrow_mut().extend(recv.take_inner()); }); df.run_available(); assert_eq!(&*reachable_verts.borrow(), reachable); }); }); } fn benchmark_hydroflow(c: &mut Criterion) { use hydroflow::pusherator::for_each::ForEach; use hydroflow::pusherator::{IteratorToPusherator, PusheratorBuild}; use hydroflow::scheduled::graph::Hydroflow; use hydroflow::scheduled::handoff::VecHandoff; use hydroflow::{var_args, var_expr}; let edges = &*EDGES; let reachable = &*REACHABLE; c.bench_function("reachability/hydroflow", |b| { b.iter(|| { // A dataflow that represents graph reachability. let mut df = Hydroflow::new(); let (reachable_out, origins_in) = df.make_edge::<_, VecHandoff<usize>>("reachable -> origins"); let (did_reach_out, possible_reach_in) = df.make_edge::<_, VecHandoff<usize>>("did_reach -> possible_reach"); let (output_out, sink_in) = df.make_edge::<_, VecHandoff<usize>>("output -> sink"); df.add_subgraph_source( "initially reachable source", reachable_out, move |_ctx, send| { send.give(Some(1)); }, ); let seen_handle = df.add_state::<RefCell<HashSet<usize>>>(Default::default()); df.add_subgraph( "main", var_expr!(origins_in, possible_reach_in), var_expr!(did_reach_out, output_out), move |context, var_args!(origins, did_reach_recv), var_args!(did_reach_send, output)| { let origins = origins.take_inner().into_iter(); let possible_reach = did_reach_recv .take_inner() .into_iter() .filter_map(|v| edges.get(&v)) .flatten() .copied(); let mut seen_state = context.state_ref(seen_handle).borrow_mut(); let pull = origins .chain(possible_reach) .filter(|v| seen_state.insert(*v)); let pivot = pull .pull_to_push() .tee(ForEach::new(|v| { did_reach_send.give(Some(v)); })) .for_each(|v| { output.give(Some(v)); }); pivot.run(); }, ); let reachable_verts = Rc::new(RefCell::new(HashSet::new())); let reachable_inner = reachable_verts.clone(); df.add_subgraph_sink("output sink", sink_in, move |_ctx, recv| { (*reachable_inner).borrow_mut().extend(recv.take_inner()); }); df.run_available(); assert_eq!(&*reachable_verts.borrow(), reachable); }); }); } #[allow(clippy::map_clone)] fn benchmark_hydroflow_surface_cheating(c: &mut Criterion) { c.bench_function("reachability/hydroflow/surface_cheating", |b| { b.iter_batched( || { let reachable_verts = Rc::new(RefCell::new(HashSet::new())); let df = { let reachable_inner = reachable_verts.clone(); hydroflow_syntax! { origin = source_iter([1]); reached_vertices = union(); origin -> reached_vertices; my_cheaty_join = reached_vertices -> filter_map(|v| EDGES.get(&v)) -> flatten() -> map(|&v| v); my_cheaty_join -> filter(|&v| reachable_inner.borrow_mut().insert(v)) -> reached_vertices; } }; (df, reachable_verts) }, |(mut df, reachable_verts)| { df.run_available(); assert_eq!(&*reachable_verts.borrow(), &*REACHABLE); }, criterion::BatchSize::LargeInput, ); }); } fn benchmark_hydroflow_surface(c: &mut Criterion) { c.bench_function("reachability/hydroflow/surface", |b| { let edges: Vec<_> = EDGES .iter() .flat_map(|(&k, v)| v.iter().map(move |v| (k, *v))) .collect(); b.iter_batched( || { let reachable_verts = Rc::new(RefCell::new(HashSet::new())); let df = { let edges = edges.clone(); let reachable_inner = reachable_verts.clone(); hydroflow_syntax! { origin = source_iter(vec![1]); stream_of_edges = source_iter(edges); reached_vertices = union(); origin -> reached_vertices; my_join_tee = join() -> flat_map(|(src, ((), dst))| [src, dst]) -> tee(); reached_vertices -> map(|v| (v, ())) -> [0]my_join_tee; stream_of_edges -> [1]my_join_tee; my_join_tee -> reached_vertices; my_join_tee -> for_each(|x| { reachable_inner.borrow_mut().insert(x); }); } }; (df, reachable_verts) }, |(mut df, reachable_verts)| { df.run_available(); assert_eq!(&*reachable_verts.borrow(), &*REACHABLE); }, criterion::BatchSize::LargeInput, ); }); } criterion_group!( reachability, benchmark_timely, benchmark_differential, benchmark_hydroflow_scheduled, benchmark_hydroflow, benchmark_hydroflow_surface, benchmark_hydroflow_surface_cheating, ); criterion_main!(reachability);
use std::cell::RefCell; use std::collections::BTreeMap; use std::rc::Rc; struct Scope<T> { data: BTreeMap<String, T>, parent: Option<PScope<T>>, root: Option<PScope<T>>, } impl<T> Scope<T> { fn new() -> Self { Self { data: BTreeMap::new(), parent: None, root: None, } } pub fn set_local(&mut self, id: String, val: T) { self.data.insert(id, val); } pub fn set_global(&mut self, id: String, val: T) { match &self.root { Some(v) => v.set(id, val), None => drop(self.data.insert(id, val)), } } pub fn set(&mut self, id: String, val: T) { if let Some(_) = self.data.get(&id) { self.data.insert(id, val); return; } match &self.parent { Some(p) => { if let Some(v) = p.try_replace(&id, val) { self.data.insert(id, v); } } None => drop(self.data.insert(id, val)), } } /// Some<T> means not set, use T, to insert in local scope fn try_replace(&mut self, id: &str, val: T) -> Option<T> { if let Some(v) = self.data.get_mut(id) { *v = val; return None; } match &self.parent { Some(p) => p.p.borrow_mut().try_replace(id, val), None => Some(val), } } pub fn update<F: Fn(&mut T) -> A, A>(&mut self, k: &str, f: F) -> Option<A> { if let Some(v) = self.data.get_mut(k) { return Some(f(v)); } match &self.parent { Some(v) => v.update(k, f), None => None, } } } impl<T: Clone> Scope<T> { pub fn get(&self, k: &str) -> Option<T> { if let Some(v) = self.data.get(k) { return Some(v.clone()); } match &self.parent { Some(p) => p.get(k), None => None, } } } pub struct PScope<T> { p: Rc<RefCell<Scope<T>>>, } impl<T> Clone for PScope<T> { fn clone(&self) -> Self { PScope { p: self.p.clone() } } } /// /// ```rust /// use scope_store::PScope; /// let root = PScope::new(); /// /// let a1 = root.child(); /// /// a1.set_global("a".to_string(), 23); /// assert_eq!(root.get("a"), Some(23)); /// assert_eq!(root.get("b"), None); /// /// root.update("a", |n| *n += 1); /// assert_eq!(a1.get("a"), Some(24)); /// /// let b1 = a1.child(); /// let a2 = root.child(); /// a1.set_local("cat".to_string(), 7); /// assert_eq!(b1.get("cat"), Some(7)); /// assert_eq!(a2.get("cat"), None); /// /// a1.set_global("dog".to_string(), 8); /// assert_eq!(b1.get("dog"), Some(8)); /// assert_eq!(a2.get("dog"), Some(8)); /// /// assert_eq!( /// b1.update("cat", |n| { /// *n += 3; /// *n /// }), /// Some(10) /// ); /// assert_eq!(a1.get("cat"),Some(10)); /// ``` /// impl<T> PScope<T> { pub fn new() -> Self { PScope { p: Rc::new(RefCell::new(Scope::new())), } } pub fn set_local(&self, id: String, val: T) { self.p.borrow_mut().set_local(id, val); } pub fn set_global(&self, id: String, val: T) { self.p.borrow_mut().set_global(id, val); } pub fn set(&self, id: String, val: T) { self.p.borrow_mut().set(id, val); } pub fn try_replace(&self, id: &str, val: T) -> Option<T> { self.p.borrow_mut().try_replace(id, val) } pub fn update<F: Fn(&mut T) -> A, A>(&self, id: &str, f: F) -> Option<A> { self.p.borrow_mut().update(id, f) } pub fn child(&self) -> Self { let root = match &self.p.borrow().root { Some(r) => Some(r.clone()), None => Some(self.clone()), }; let parent = Some(self.clone()); PScope { p: Rc::new(RefCell::new(Scope { data: BTreeMap::new(), root, parent, })), } } } impl<T: Clone> PScope<T> { pub fn get(&self, id: &str) -> Option<T> { self.p.borrow().get(id) } } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { let root = PScope::new(); let a1 = root.child(); a1.set_global("a".to_string(), 23); assert_eq!(root.get("a"), Some(23)); assert_eq!(root.get("b"), None); root.update("a", |n| *n += 1); assert_eq!(a1.get("a"), Some(24)); let b1 = a1.child(); let a2 = root.child(); a1.set_local("cat".to_string(), 7); assert_eq!(b1.get("cat"), Some(7)); assert_eq!(a2.get("cat"), None); a1.set_global("dog".to_string(), 8); assert_eq!(b1.get("dog"), Some(8)); assert_eq!(a2.get("dog"), Some(8)); assert_eq!( b1.update("cat", |n| { *n += 3; *n }), Some(10) ); } }
use super::{osgood, Local, Valuable, V8}; pub struct FunctionCallbackInfo { info_: *const V8::FunctionCallbackInfo, } impl FunctionCallbackInfo { pub fn new(info_: *const V8::FunctionCallbackInfo) -> FunctionCallbackInfo { FunctionCallbackInfo { info_ } } pub fn length(&self) -> i32 { unsafe { self.info_.as_ref().unwrap().length_ } } pub fn get(&self, i: i32) -> Result<Local<V8::Value>, String> { if self.length() == 0 || i < 0 || i > self.length() { Err(String::from("OOB")) } else { Ok(unsafe { osgood::info_get_arg(self.info_, i).into() }) } } pub fn set_return_value(&self, ret_val: &impl Valuable) { unsafe { osgood::info_set_return_value(self.info_, ret_val.as_value().into()); } } }
use postgres::{Client, NoTls}; pub fn get_event_store_db_connection() -> Result<Client, postgres::Error> { Client::connect( crate::SECRETS .get("event_store_connection_string") .map_or(&"", |s| &s), NoTls, ) } pub fn get_user_query_db_connection() -> Result<Client, postgres::Error> { Client::connect( crate::SECRETS .get("user_query_connection_string") .map_or(&"", |s| &s), NoTls, ) }
use hpdf_sys::{ HPDF_Doc, HPDF_Page }; use paper::{ Page }; use ffi; use std::cell::{ RefCell, RefMut }; pub struct Document { handle: HPDF_Doc, pages: RefCell<Vec<HPDF_Page>> } impl Document { pub fn new() -> Result<Document, String> { let handle = ffi::hpdf_new(); match handle.is_null() { true => { let message = String::new(); Err(message) }, false => { let handle = Document { handle: handle, pages: RefCell::new(Vec::new()) }; Ok(handle) } } } pub fn has_handle(&self) -> bool { if ffi::hpdf_has_doc(self.handle) == 1 { return true } false } pub fn create_page(&self) -> Result<HPDF_Page, String> { match self.has_handle() { true => { Ok(ffi::page::hpdf_add_page(self.handle)) }, false => { let message = String::new(); Err(message) } } } pub fn add_page(&self) { let new_page = self.create_page().ok().unwrap(); // A wrapper to T contained inside RefCell<T> let mut pages_ref: RefMut<Vec<HPDF_Page>> = self.pages.borrow_mut(); (*pages_ref).push(new_page); } pub fn set_page_size(&self, page: HPDF_Page, page_size: Page ) -> () { } pub fn save_to_file(&self, filename: &str) -> Result<(), String>{ ffi::hpdf_save_to_file(self.handle, filename) } } impl Drop for Document { fn drop(&mut self) { println!("dropping document"); ffi::hpdf_free(self.handle); } }
use crate::prelude::*; use super::{ TextureMapping2d, Mapping2d }; use crate::interaction::SurfaceInteraction; pub struct UvMapping2d { pub su: Float, pub sv: Float, pub du: Float, pub dv: Float, } impl TextureMapping2d for UvMapping2d { fn map(&self, si: &SurfaceInteraction<'_>) -> Mapping2d { let dstdx = Vector2f::new(self.su * si.dudx, self.sv * si.dvdx); let dstdy = Vector2f::new(self.su * si.dudy, self.sv * si.dvdy); let point = Point2f::new( self.su * si.uv[0] + self.du, self.sv * si.uv[1] + self.dv, ); Mapping2d { dstdx, dstdy, point } } }
//! Restart of Interrupted Transfer (REST) //! To avoid having to resend the entire file if the file is only //! partially transferred, both sides need some way to agree on where in //! the data stream to restart the data transfer. //! //! See also: <https://cr.yp.to/ftp/retr.html> //! use crate::{ auth::UserDetail, server::controlchan::{ error::ControlChanError, handler::{CommandContext, CommandHandler}, Reply, ReplyCode, }, storage::{Metadata, StorageBackend, FEATURE_RESTART}, }; use async_trait::async_trait; #[derive(Debug)] pub struct Rest { offset: u64, } impl Rest { pub fn new(offset: u64) -> Self { Rest { offset } } } #[async_trait] impl<Storage, User> CommandHandler<Storage, User> for Rest where User: UserDetail, Storage: StorageBackend<User> + 'static, Storage::Metadata: 'static + Metadata, { #[tracing_attributes::instrument] async fn handle(&self, args: CommandContext<Storage, User>) -> Result<Reply, ControlChanError> { if args.storage_features & FEATURE_RESTART == 0 { return Ok(Reply::new(ReplyCode::CommandNotImplemented, "Not supported by the selected storage back-end.")); } let mut session = args.session.lock().await; session.start_pos = self.offset; let msg = format!("Restarting at {}. Now send STORE or RETRIEVE.", self.offset); Ok(Reply::new(ReplyCode::FileActionPending, &msg)) } }
use super::*; use arbitrary::Arbitrary; #[derive(Arbitrary, Debug)] pub struct FromVecHarnessParams { edges: Vec<Result<StringQuadruple, String>>, nodes: Option<Vec<Result<(String, Option<String>), String>>>, directed: bool, ignore_duplicated_nodes: bool, ignore_duplicated_edges: bool, numeric_edge_types_ids: bool, numeric_node_ids: bool, numeric_node_types_ids: bool } pub fn from_vec_harness(data: FromVecHarnessParams) -> Result<(), String> { let mut g = graph::Graph::from_string_unsorted( data.edges.iter().cloned(), match &data.nodes { Some(ns) => Some(ns.iter().cloned()), None => None, }, data.directed, false, "Graph".to_owned(), data.ignore_duplicated_nodes, data.ignore_duplicated_edges, false, false, data.numeric_edge_types_ids, data.numeric_node_ids, data.numeric_node_types_ids, true, true )?; // We ignore this error because we execute only the fuzzing to find // the panic situations that are NOT just errors, but unhandled errors. let _ = graph::test_utilities::default_test_suite(&mut g, false); Ok(()) }
use rand::Rng; use std::{thread, time}; use std::vec::Vec; use ansi_escapes::*; // specific characters const GRID_SIZE: (usize, usize) = (52, 52); const PROBABILITY: f32 = 0.4; const FPS: f32 = 10.0; fn print_grid(grid_size: (usize, usize), grid: &Vec<Vec<char>>){ /* print the grid in the terminal input grid_size: (usize, usize) grid: &Vec<Vec<char>> output None */ print!(" "); for _ in 0..(grid_size.0+2)*2 {print!("\u{2588}");} println!(""); for i in 0..grid_size.1 { print!(" \u{2588}\u{2588}"); for j in 0..grid_size.0 { print!("{}{}", grid[i][j], grid[i][j]); } println!("\u{2588}\u{2588}"); } print!(" "); for _ in 0..(grid_size.0+2)*2 {print!("\u{2588}");} println!("\n"); } fn update_grid(grid_size: (usize, usize), current_grid: &Vec<Vec<char>>, new_grid: &mut Vec<Vec<char>>) { /* update grid state input grid_size: (usize, usize) current_grid: &Vec<Vec<char>> new_grid: &mut Vec<Vec<char>> output None */ for i in 0..grid_size.1 { for j in 0..grid_size.0 { let x = nb_alive_neighbor(&current_grid, (i, j)); if current_grid[i][j] == '\u{2588}' { // is alive if x < 2 || x > 3 { // cell die new_grid[i][j] = ' '; } } else { // cell is dead if x == 3 { // cell becomes alive new_grid[i][j] = '\u{2588}'; } } } } } fn nb_alive_neighbor(grid: &Vec<Vec<char>>, cell: (usize, usize)) -> u8 { /* Takes a cell index as input and return its number of alive neighbor. input grid: &[[char; 32]; 32] cell: (usize, usize) output u8 */ let mut nb_alive_neighbor: u8 = 0; // check top if cell.0 != 0 { if grid[cell.0-1][cell.1] == '\u{2588}' { nb_alive_neighbor += 1; } // check top right if cell.1 != grid[0].len()-1 { if grid[cell.0-1][cell.1+1] == '\u{2588}' { nb_alive_neighbor += 1; } } // check top left if cell.1 != 0 { if grid[cell.0-1][cell.1-1] == '\u{2588}' { nb_alive_neighbor += 1; } } } // check bottom if cell.0 != grid.len()-1 { if grid[cell.0+1][cell.1] == '\u{2588}' { nb_alive_neighbor += 1; } // check bottom left if cell.1 != 0 { if grid[cell.0+1][cell.1-1] == '\u{2588}' { nb_alive_neighbor += 1; } } // check bottom right if cell.1 != grid[0].len()-1 { if grid[cell.0+1][cell.1+1] == '\u{2588}' { nb_alive_neighbor += 1; } } } // check left if cell.1 != 0 { if grid[cell.0][cell.1-1] == '\u{2588}' { nb_alive_neighbor += 1; } } // check right if cell.1 != grid[0].len()-1 { if grid[cell.0][cell.1+1] == '\u{2588}' { nb_alive_neighbor += 1; } } return nb_alive_neighbor; } fn initialize_grid(probability: f32) -> Vec<Vec<char>> { /* Initialize a new grid of cells according to probability. input probability: f32 output Vec<Vec<i64>> */ let mut rng = rand::thread_rng(); (0..GRID_SIZE.0).map(|_| { (0..GRID_SIZE.1).map(|_| { // cell is alive if rng.gen::<f32>() < probability { '\u{2588}' } // cell is dead else { ' ' } }).collect() }).collect() } //////////////////////////////////////////////////////////////////////////////// // main //////////////////////////////////////////////////////////////////////////////// fn main(){ println!("\n\n Welcome to the game of life !\n"); let mut current_grid = initialize_grid(PROBABILITY); let mut new_grid: Vec<Vec<char>>=vec![vec![' '; GRID_SIZE.1]; GRID_SIZE.0]; let delay = time::Duration::from_millis((1.0/FPS * 1000.0) as u64); loop { thread::sleep(delay); print_grid(GRID_SIZE, &current_grid); update_grid(GRID_SIZE, &current_grid, &mut new_grid); current_grid = new_grid.to_owned(); // erase terminal for _ in 0..GRID_SIZE.0+3 { print!("{}{}", CursorUp(1), EraseLine); } } }
use hacl_star_sys as ffi; use crate::And; pub const KEY_LENGTH : usize = 32; pub const NONCE_LENGTH: usize = 12; pub const MAC_LENGTH : usize = 16; pub type ChaCha20Poly1305<'a> = And<&'a Key, &'a Nonce>; define!{ pub struct Key/key(pub [u8; KEY_LENGTH]); pub struct Nonce/nonce(pub [u8; NONCE_LENGTH]); } impl Key { #[inline] pub fn nonce<'a>(&'a self, n: &'a [u8; NONCE_LENGTH]) -> ChaCha20Poly1305<'a> { And(self, nonce(n)) } } impl<'a> ChaCha20Poly1305<'a> { pub fn encrypt(self, aad: &[u8], m: &mut [u8], mac: &mut [u8; MAC_LENGTH]) { unsafe { ffi::chacha20poly1305::Hacl_Chacha20Poly1305_aead_encrypt( m.as_mut_ptr(), mac.as_mut_ptr(), m.as_ptr() as _, m.len() as _, aad.as_ptr() as _, aad.len() as _, (self.0).0.as_ptr() as _, (self.1).0.as_ptr() as _ ); } } pub fn decrypt(self, aad: &[u8], c: &mut [u8], mac: &[u8; MAC_LENGTH]) -> bool { unsafe { ffi::chacha20poly1305::Hacl_Chacha20Poly1305_aead_decrypt( c.as_mut_ptr(), c.as_ptr() as _, c.len() as _, mac.as_ptr() as _, aad.as_ptr() as _, aad.len() as _, (self.0).0.as_ptr() as _, (self.1).0.as_ptr() as _ ) == 0 } } }
use crate::bot::Bot; use crate::error::ApiResult; pub mod bot; pub mod error; pub mod method; pub mod typing; pub trait TelegramApiMethod: serde::Serialize { const METHOD : &'static str; type Response: serde::de::DeserializeOwned; fn get_method(&self) -> &'static str { Self::METHOD } }
pub(crate) mod dns_seeding; pub(crate) mod dump_peer_store; pub(crate) mod outbound_peer; pub(crate) mod protocol_type_checker;
fn main() { println!("Hello, world!"); // ERROR:"cannot assign twice to immutable variable" // let message = "hello"; // message = "world"; // println!("{}",message); //--------------------- // mutable variables can be assigned twice let mut number = 34; number = 06; println!("{}", number); //--------------------- let mut message = String::from("hello"); // message#1 message.push_str(", world!"); println!("{}", message); { let message = String::from("Arda"); // message#2 } // message#2 is destroyed // message#1 println!("{}", message); //--------------------- let x = 5; let y = x; println!("x={}, y={}", x, y); //--------------------- // ERROR:"value borrowed here after move" // let message1 = String::from("hello"); // let message2 = message1; // println!("{}, world!", message1); //--------------------- let message1 = String::from("hello"); let message2 = message1.clone(); println!("{}, world!, {} mars!", message1, message1); //--------------------- let mut message1 = String::from("hello"); let message2 = message1.clone(); message1 = "hi".to_string(); println!("{}, world!, {} mars!", message1, message1); //--------------------- let message1 = String::from("hello"); takes_ownership(message1); let number = 100; makes_copy(number); println!("{}", number); let message1 = String::from("HELLO WORLD"); let message2 = takes_and_gives_back(message1); println!("{}",message2); } fn takes_ownership(some_string: String) { println!("{}", some_string); } fn makes_copy(mut some_integer: i32) { println!("First value :{}", some_integer); some_integer += 100; println!("After +=100 :{}", some_integer); } fn takes_and_gives_back(mut some_string: String) -> String { some_string.push_str("!"); some_string }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ //! Module introduces a proc macro for sql_common::mysql. extern crate proc_macro; use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, DeriveInput}; /// The proc macro allows to derive an implementation of mysql_client::OptionalTryFromRowField /// trait for the type if that type implements mysql_async::FromValueOpt. #[proc_macro_derive(OptTryFromRowField)] pub fn derive_tryfrom_rowfield(input: TokenStream) -> TokenStream { let parsed_input = parse_macro_input!(input as DeriveInput); let name = parsed_input.ident; let expanded = quote! { impl mysql::OptionalTryFromRowField for #name { fn try_from_opt(field: mysql::RowField) -> Result<Option<Self>, mysql::MysqlError> { mysql::opt_try_from_rowfield(field) } } }; expanded.into() }
use std::collections::HashSet; use svm_types::{SectionKind, Template, TemplateAddr}; /// `Env` storage serialization types use crate::env::ExtAccount; /// Serializing an [`Template`] into its binary representation. pub trait TemplateSerializer { #[allow(missing_docs)] fn serialize(template: &Template) -> Vec<u8>; } /// Deserializing stored [`Template`] into its in-memory representation. pub trait TemplateDeserializer { #[allow(missing_docs)] fn deserialize(bytes: &[u8], interests: Option<HashSet<SectionKind>>) -> Option<Template>; } /// Serializing an `Account` into its binary representation. pub trait AccountSerializer { #[allow(missing_docs)] fn serialize(account: &ExtAccount) -> Vec<u8>; } /// Deserializing stored `Account` into its in-memory representation. pub trait AccountDeserializer { #[allow(missing_docs)] fn deserialize(bytes: &[u8]) -> Option<ExtAccount>; fn deserialize_template_addr(bytes: &[u8]) -> Option<TemplateAddr> { Self::deserialize(bytes).map(|account| account.template_addr().clone()) } } pub trait EnvSerializers { /// [`Template`]'s Serializer type TemplateSerializer: TemplateSerializer; /// [`Template`]'s Deserializer type TemplateDeserializer: TemplateDeserializer; /// [`Account`]'s Serializer type AccountSerializer: AccountSerializer; /// [`Account`]'s Deserializer type AccountDeserializer: AccountDeserializer; }
// Copyright 2021 Red Hat, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod active_connection; mod connection; mod dbus; mod dbus_proxy; mod dbus_value; mod error; mod nm_api; pub use crate::active_connection::NmActiveConnection; pub use crate::connection::{ NmConnection, NmSettingBridge, NmSettingConnection, NmSettingIp, NmSettingIpMethod, }; pub use crate::error::{ErrorKind, NmError}; pub use crate::nm_api::NmApi;
#![allow(clippy::inline_always)] #![allow(clippy::needless_pass_by_value)] #[cfg(test)] mod benchmark { use crate::ebr; use crate::{HashIndex, HashMap, TreeIndex}; use std::collections::hash_map::RandomState; use std::hash::{BuildHasher, Hash}; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering::Relaxed; use std::sync::{Arc, Barrier}; use std::thread; use std::time::{Duration, Instant}; #[derive(Clone)] struct Workload { size: usize, insert_local: usize, insert_remote: usize, scan: usize, read_local: usize, read_remote: usize, remove_local: usize, remove_remote: usize, } impl Workload { pub fn max_per_op_size(&self) -> usize { self.insert_local.max( self.insert_remote.max( self.read_local.max( self.read_remote .max(self.remove_local.max(self.remove_remote)), ), ), ) } pub fn has_remote_op(&self) -> bool { self.insert_remote > 0 || self.read_remote > 0 || self.remove_remote > 0 } } trait BenchmarkOperation< K: Clone + Eq + Hash + Ord + Send + Sync, V: Clone + Send + Sync + Unpin, H: BuildHasher, > { fn insert_test(&self, k: K, v: V) -> bool; fn read_test(&self, k: &K) -> bool; fn scan_test(&self) -> usize; fn remove_test(&self, k: &K) -> bool; } impl< K: Clone + Eq + Hash + Ord + Send + Sync, V: Clone + Send + Sync + Unpin, H: BuildHasher, > BenchmarkOperation<K, V, H> for HashMap<K, V, H> { #[inline(always)] fn insert_test(&self, k: K, v: V) -> bool { self.insert(k, v).is_ok() } #[inline(always)] fn read_test(&self, k: &K) -> bool { self.read(k, |_, _| ()).is_some() } #[inline(always)] fn scan_test(&self) -> usize { let mut scanned = 0; self.for_each(|_, _| scanned += 1); scanned } #[inline(always)] fn remove_test(&self, k: &K) -> bool { self.remove(k).is_some() } } impl< K: Clone + Eq + Hash + Ord + Send + Sync, V: Clone + Send + Sync + Unpin, H: 'static + BuildHasher, > BenchmarkOperation<K, V, H> for HashIndex<K, V, H> { #[inline(always)] fn insert_test(&self, k: K, v: V) -> bool { self.insert(k, v).is_ok() } #[inline(always)] fn read_test(&self, k: &K) -> bool { self.read(k, |_, _| ()).is_some() } #[inline(always)] fn scan_test(&self) -> usize { let barrier = ebr::Barrier::new(); self.iter(&barrier).count() } #[inline(always)] fn remove_test(&self, k: &K) -> bool { self.remove(k) } } impl< K: Clone + Eq + Hash + Ord + Send + Sync, V: Clone + Send + Sync + Unpin, H: BuildHasher, > BenchmarkOperation<K, V, H> for TreeIndex<K, V> { #[inline(always)] fn insert_test(&self, k: K, v: V) -> bool { self.insert(k, v).is_ok() } #[inline(always)] fn read_test(&self, k: &K) -> bool { self.read(k, |_, _| ()).is_some() } #[inline(always)] fn scan_test(&self) -> usize { let ebr_barrier = ebr::Barrier::new(); self.iter(&ebr_barrier).count() } #[inline(always)] fn remove_test(&self, k: &K) -> bool { self.remove(k) } } trait ConvertFromUsize { fn convert(from: usize) -> Self; } impl ConvertFromUsize for usize { #[inline(always)] fn convert(from: usize) -> usize { from } } impl ConvertFromUsize for String { #[inline(always)] fn convert(from: usize) -> String { from.to_string() } } fn perform< K: Clone + ConvertFromUsize + Eq + Hash + Ord + Send + Sync, V: Clone + ConvertFromUsize + Send + Sync + Unpin, C: BenchmarkOperation<K, V, RandomState> + 'static + Send + Sync, >( num_threads: usize, start_index: usize, container: Arc<C>, workload: Workload, ) -> (Duration, usize) { let barrier = Arc::new(Barrier::new(num_threads + 1)); let total_num_operations = Arc::new(AtomicUsize::new(0)); let mut thread_handles = Vec::with_capacity(num_threads); for thread_id in 0..num_threads { let container_copied = container.clone(); let barrier_copied = barrier.clone(); let total_num_operations_copied = total_num_operations.clone(); let workload_copied = workload.clone(); thread_handles.push(thread::spawn(move || { let mut num_operations = 0; let per_op_workload_size = workload_copied.max_per_op_size(); let per_thread_workload_size = workload_copied.size * per_op_workload_size; barrier_copied.wait(); for _ in 0..workload_copied.scan { num_operations += container_copied.scan_test(); } for i in 0..per_thread_workload_size { let remote_thread_id = if num_threads < 2 { 0 } else { (thread_id + 1 + i % (num_threads - 1)) % num_threads }; assert!(num_threads < 2 || thread_id != remote_thread_id); for j in 0..workload_copied.insert_local { let local_index = thread_id * per_thread_workload_size + i * per_op_workload_size + j + start_index; let result = container_copied.insert_test(K::convert(local_index), V::convert(i)); assert!(result || workload_copied.has_remote_op()); num_operations += 1; } for j in 0..workload_copied.insert_remote { let remote_index = remote_thread_id * per_thread_workload_size + i * per_op_workload_size + j + start_index; container_copied.insert_test(K::convert(remote_index), V::convert(i)); num_operations += 1; } for j in 0..workload_copied.read_local { let local_index = thread_id * per_thread_workload_size + i * per_op_workload_size + j + start_index; let result = container_copied.read_test(&K::convert(local_index)); assert!(result || workload_copied.has_remote_op()); num_operations += 1; } for j in 0..workload_copied.read_remote { let remote_index = remote_thread_id * per_thread_workload_size + i * per_op_workload_size + j + start_index; container_copied.read_test(&K::convert(remote_index)); num_operations += 1; } for j in 0..workload_copied.remove_local { let local_index = thread_id * per_thread_workload_size + i * per_op_workload_size + j + start_index; let result = container_copied.remove_test(&K::convert(local_index)); assert!(result || workload_copied.has_remote_op()); num_operations += 1; } for j in 0..workload_copied.remove_remote { let remote_index = remote_thread_id * per_thread_workload_size + i * per_op_workload_size + j + start_index; container_copied.remove_test(&K::convert(remote_index)); num_operations += 1; } } barrier_copied.wait(); total_num_operations_copied.fetch_add(num_operations, Relaxed); })); } barrier.wait(); let start_time = Instant::now(); barrier.wait(); let end_time = Instant::now(); for handle in thread_handles { handle.join().unwrap(); } ( end_time.saturating_duration_since(start_time), total_num_operations.load(Relaxed), ) } #[allow(clippy::too_many_lines)] fn hashmap_benchmark< T: 'static + ConvertFromUsize + Clone + Eq + Hash + Ord + Send + Sync + Unpin, >( workload_size: usize, num_threads: Vec<usize>, ) { for num_threads in num_threads { let hashmap: Arc<HashMap<usize, usize, RandomState>> = Arc::new(HashMap::default()); // 1. insert-local let insert = Workload { size: workload_size, insert_local: 1, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashmap.clone(), insert.clone()); println!( "hashmap-insert-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashmap.len(), workload_size * num_threads); // 2. scan let scan = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 1, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashmap.clone(), scan.clone()); println!( "hashmap-scan: {}, {:?}, {}", num_threads, duration, total_num_operations ); // 3. read-local let read = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 1, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashmap.clone(), read.clone()); println!( "hashmap-read-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); // 4. remove-local let remove = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 1, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashmap.clone(), remove.clone()); println!( "hashmap-remove-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashmap.len(), 0); // 5. insert-local-remote let insert = Workload { size: workload_size, insert_local: 1, insert_remote: 1, scan: 0, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashmap.clone(), insert.clone()); println!( "hashmap-insert-local-remote: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashmap.len(), workload_size * num_threads); // 6. mixed let mixed = Workload { size: workload_size, insert_local: 1, insert_remote: 1, scan: 0, read_local: 1, read_remote: 1, remove_local: 1, remove_remote: 1, }; let (duration, total_num_operations) = perform( num_threads, workload_size * num_threads, hashmap.clone(), mixed.clone(), ); println!( "hashmap-mixed: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashmap.len(), workload_size * num_threads); // 7. remove-local-remote let remove = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 1, remove_remote: 1, }; let (duration, total_num_operations) = perform(num_threads, 0, hashmap.clone(), remove.clone()); println!( "hashmap-remove-local-remote: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashmap.len(), 0); } } #[allow(clippy::too_many_lines)] fn hashindex_benchmark< T: 'static + ConvertFromUsize + Clone + Eq + Hash + Ord + Send + Sync + Unpin, >( workload_size: usize, num_threads: Vec<usize>, ) { for num_threads in num_threads { let hashindex: Arc<HashIndex<T, T, RandomState>> = Arc::new(HashIndex::default()); // 1. insert-local let insert = Workload { size: workload_size, insert_local: 1, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashindex.clone(), insert.clone()); println!( "hashindex-insert-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashindex.len(), workload_size * num_threads); // 2. scan let scan = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 1, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashindex.clone(), scan.clone()); println!( "hashindex-scan: {}, {:?}, {}", num_threads, duration, total_num_operations ); // 3. read-local let read = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 1, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashindex.clone(), read.clone()); println!( "hashindex-read-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); // 4. remove-local let remove = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 1, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashindex.clone(), remove.clone()); println!( "hashindex-remove-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashindex.len(), 0); // 5. insert-local-remote let insert = Workload { size: workload_size, insert_local: 1, insert_remote: 1, scan: 0, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, hashindex.clone(), insert.clone()); println!( "hashindex-insert-local-remote: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashindex.len(), workload_size * num_threads); // 6. mixed let mixed = Workload { size: workload_size, insert_local: 1, insert_remote: 1, scan: 0, read_local: 1, read_remote: 1, remove_local: 1, remove_remote: 1, }; let (duration, total_num_operations) = perform( num_threads, workload_size * num_threads, hashindex.clone(), mixed.clone(), ); println!( "hashindex-mixed: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashindex.len(), workload_size * num_threads); // 7. remove-local-remote let remove = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 1, remove_remote: 1, }; let (duration, total_num_operations) = perform(num_threads, 0, hashindex.clone(), remove.clone()); println!( "hashindex-remove-local-remote: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(hashindex.len(), 0); } } #[allow(clippy::too_many_lines)] fn treeindex_benchmark< T: 'static + ConvertFromUsize + Clone + Hash + Ord + Send + Sync + Unpin, >( workload_size: usize, num_threads: Vec<usize>, ) { for num_threads in num_threads { let treeindex: Arc<TreeIndex<T, T>> = Arc::new(TreeIndex::default()); // 1. insert-local let insert = Workload { size: workload_size, insert_local: 1, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, treeindex.clone(), insert.clone()); println!( "treeindex-insert-local: {}, {:?}, {}, depth = {}", num_threads, duration, total_num_operations, treeindex.depth() ); // 2. scan let scan = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 1, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, treeindex.clone(), scan.clone()); println!( "treeindex-scan: {}, {:?}, {}", num_threads, duration, total_num_operations ); // 3. read-local let read = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 1, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, treeindex.clone(), read.clone()); println!( "treeindex-read-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); // 4. remove-local let remove = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 1, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, treeindex.clone(), remove.clone()); println!( "treeindex-remove-local: {}, {:?}, {}", num_threads, duration, total_num_operations ); assert_eq!(treeindex.len(), 0); // 5. insert-local-remote let insert = Workload { size: workload_size, insert_local: 1, insert_remote: 1, scan: 0, read_local: 0, read_remote: 0, remove_local: 0, remove_remote: 0, }; let (duration, total_num_operations) = perform(num_threads, 0, treeindex.clone(), insert.clone()); println!( "treeindex-insert-local-remote: {}, {:?}, {}, depth = {}", num_threads, duration, total_num_operations, treeindex.depth() ); // 6. mixed let mixed = Workload { size: workload_size, insert_local: 1, insert_remote: 1, scan: 0, read_local: 1, read_remote: 1, remove_local: 1, remove_remote: 1, }; let (duration, total_num_operations) = perform( num_threads, treeindex.len(), treeindex.clone(), mixed.clone(), ); println!( "treeindex-mixed: {}, {:?}, {}", num_threads, duration, total_num_operations ); // 7. remove-local-remote let remove = Workload { size: workload_size, insert_local: 0, insert_remote: 0, scan: 0, read_local: 0, read_remote: 0, remove_local: 1, remove_remote: 1, }; let (duration, total_num_operations) = perform(num_threads, 0, treeindex.clone(), remove.clone()); println!( "treeindex-remove-local-remote: {}, {:?}, {}", num_threads, duration, total_num_operations ); } } #[test] fn hashmap_benchmarks() { hashmap_benchmark::<String>(16384, vec![1, 2, 4]); hashmap_benchmark::<usize>(65536, vec![1, 2, 4]); } #[test] fn hashindex_benchmarks() { hashindex_benchmark::<String>(16384, vec![1, 2, 4]); hashindex_benchmark::<usize>(65536, vec![1, 2, 4]); } #[test] fn treeindex_benchmarks() { treeindex_benchmark::<String>(16384, vec![1, 2, 4]); treeindex_benchmark::<usize>(65536, vec![1, 2, 4]); } #[test] #[ignore] fn full_scale_benchmarks() { hashmap_benchmark::<usize>(1024 * 1024 * 128, vec![11, 11, 11, 22, 22, 22, 44, 44, 44]); println!("----"); hashindex_benchmark::<usize>(1024 * 1024 * 4, vec![11, 11, 11, 22, 22, 22, 44, 44, 44]); println!("----"); treeindex_benchmark::<usize>(1024 * 1024 * 4, vec![11, 11, 11, 22, 22, 22, 44, 44, 44]); println!("----"); } }
// Copyright (C) 2015-2021 Swift Navigation Inc. // Contact: https://support.swiftnav.com // // This source is subject to the license found in the file 'LICENSE' which must // be be distributed together with this source. All other rights reserved. // // THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, // EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. //**************************************************************************** // Automatically generated from yaml/swiftnav/sbp/solution_meta.yaml // with generate.py. Please do not hand edit! //****************************************************************************/ //! Standardized Metadata messages for Fuzed Solution from Swift Navigation //! devices. #[allow(unused_imports)] use std::convert::TryFrom; #[allow(unused_imports)] use byteorder::{LittleEndian, ReadBytesExt}; #[allow(unused_imports)] use crate::serialize::SbpSerialize; #[allow(unused_imports)] use crate::SbpString; /// Instruments the physical type of GNSS sensor input to the fuzed solution /// /// Metadata around the GNSS sensors involved in the fuzed solution. /// Accessible through sol_in[N].flags in a MSG_SOLN_META. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct GNSSInputType { /// flags that store all relevant info specific to this sensor type. pub flags: u8, } impl GNSSInputType { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<GNSSInputType, crate::Error> { Ok( GNSSInputType{ flags: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<GNSSInputType>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(GNSSInputType::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<GNSSInputType>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(GNSSInputType::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for GNSSInputType { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.flags.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.flags.sbp_size(); size } } /// Provides detail about the IMU sensor, its timestamping mode, and its quality for input to the fuzed solution /// /// Metadata around the IMU sensors involved in the fuzed solution. Accessible /// through sol_in[N].flags in a MSG_SOLN_META. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct IMUInputType { /// Instrument time, grade, and architecture for a sensor. pub flags: u8, } impl IMUInputType { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<IMUInputType, crate::Error> { Ok( IMUInputType{ flags: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<IMUInputType>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(IMUInputType::parse(buf)?); } Ok(v) } pub fn parse_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<IMUInputType>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(IMUInputType::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for IMUInputType { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.flags.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.flags.sbp_size(); size } } /// Solution Sensors Metadata /// /// This message contains all metadata about the sensors received and/or used /// in computing the sensorfusion solution. It focuses primarily, but not /// only, on GNSS metadata. Regarding the age of the last received valid GNSS /// solution, the highest two bits are time status, indicating whether age /// gnss can or can not be used to retrieve time of measurement (noted TOM, /// also known as time of validity) If it can, subtract 'age gnss' from 'tow' /// in navigation messages to get TOM. Can be used before alignment is /// complete in the Fusion Engine, when output solution is the last received /// valid GNSS solution and its tow is not a TOM. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSolnMeta { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// GPS time of week rounded to the nearest millisecond pub tow: u32, /// Position Dilution of Precision as per last available DOPS from PVT /// engine (0xFFFF indicates invalid) pub pdop: u16, /// Horizontal Dilution of Precision as per last available DOPS from PVT /// engine (0xFFFF indicates invalid) pub hdop: u16, /// Vertical Dilution of Precision as per last available DOPS from PVT /// engine (0xFFFF indicates invalid) pub vdop: u16, /// Age of corrections as per last available AGE_CORRECTIONS from PVT engine /// (0xFFFF indicates invalid) pub age_corrections: u16, /// Age and Time Status of the last received valid GNSS solution. pub age_gnss: u32, /// Array of Metadata describing the sensors potentially involved in the /// solution. Each element in the array represents a single sensor type and /// consists of flags containing (meta)data pertaining to that specific /// single sensor. Refer to each (XX)InputType descriptor in the present /// doc. pub sol_in: Vec<SolutionInputType>, } impl MsgSolnMeta { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSolnMeta, crate::Error> { Ok( MsgSolnMeta{ sender_id: None, tow: _buf.read_u32::<LittleEndian>()?, pdop: _buf.read_u16::<LittleEndian>()?, hdop: _buf.read_u16::<LittleEndian>()?, vdop: _buf.read_u16::<LittleEndian>()?, age_corrections: _buf.read_u16::<LittleEndian>()?, age_gnss: _buf.read_u32::<LittleEndian>()?, sol_in: SolutionInputType::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSolnMeta { fn get_message_name(&self) -> &'static str { "MSG_SOLN_META" } fn get_message_type(&self) -> u16 { 65294 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } #[cfg(feature = "swiftnav-rs")] fn gps_time( &self, ) -> Option<std::result::Result<crate::time::MessageTime, crate::time::GpsTimeError>> { let tow_s = (self.tow as f64) / 1000.0; let gps_time = match crate::time::GpsTime::new(0, tow_s) { Ok(gps_time) => gps_time.tow(), Err(e) => return Some(Err(e.into())), }; Some(Ok(crate::time::MessageTime::Rover(gps_time.into()))) } } impl super::ConcreteMessage for MsgSolnMeta { const MESSAGE_TYPE: u16 = 65294; const MESSAGE_NAME: &'static str = "MSG_SOLN_META"; } impl TryFrom<super::SBP> for MsgSolnMeta { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSolnMeta(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSolnMeta { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.tow.append_to_sbp_buffer(buf); self.pdop.append_to_sbp_buffer(buf); self.hdop.append_to_sbp_buffer(buf); self.vdop.append_to_sbp_buffer(buf); self.age_corrections.append_to_sbp_buffer(buf); self.age_gnss.append_to_sbp_buffer(buf); self.sol_in.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.tow.sbp_size(); size += self.pdop.sbp_size(); size += self.hdop.sbp_size(); size += self.vdop.sbp_size(); size += self.age_corrections.sbp_size(); size += self.age_gnss.sbp_size(); size += self.sol_in.sbp_size(); size } } /// Deprecated /// /// Deprecated. /// /// This message contains all metadata about the sensors received and/or used /// in computing the Fuzed Solution. It focuses primarily, but not only, on /// GNSS metadata. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct MsgSolnMetaDepA { #[cfg_attr(feature = "sbp_serde", serde(skip_serializing))] pub sender_id: Option<u16>, /// Position Dilution of Precision as per last available DOPS from PVT /// engine (0xFFFF indicates invalid) pub pdop: u16, /// Horizontal Dilution of Precision as per last available DOPS from PVT /// engine (0xFFFF indicates invalid) pub hdop: u16, /// Vertical Dilution of Precision as per last available DOPS from PVT /// engine (0xFFFF indicates invalid) pub vdop: u16, /// Number of satellites as per last available solution from PVT engine pub n_sats: u8, /// Age of corrections as per last available AGE_CORRECTIONS from PVT engine /// (0xFFFF indicates invalid) pub age_corrections: u16, /// State of alignment and the status and receipt of the alignment inputs pub alignment_status: u8, /// Tow of last-used GNSS position measurement pub last_used_gnss_pos_tow: u32, /// Tow of last-used GNSS velocity measurement pub last_used_gnss_vel_tow: u32, /// Array of Metadata describing the sensors potentially involved in the /// solution. Each element in the array represents a single sensor type and /// consists of flags containing (meta)data pertaining to that specific /// single sensor. Refer to each (XX)InputType descriptor in the present /// doc. pub sol_in: Vec<SolutionInputType>, } impl MsgSolnMetaDepA { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<MsgSolnMetaDepA, crate::Error> { Ok( MsgSolnMetaDepA{ sender_id: None, pdop: _buf.read_u16::<LittleEndian>()?, hdop: _buf.read_u16::<LittleEndian>()?, vdop: _buf.read_u16::<LittleEndian>()?, n_sats: _buf.read_u8()?, age_corrections: _buf.read_u16::<LittleEndian>()?, alignment_status: _buf.read_u8()?, last_used_gnss_pos_tow: _buf.read_u32::<LittleEndian>()?, last_used_gnss_vel_tow: _buf.read_u32::<LittleEndian>()?, sol_in: SolutionInputType::parse_array(_buf)?, } ) } } impl super::SBPMessage for MsgSolnMetaDepA { fn get_message_name(&self) -> &'static str { "MSG_SOLN_META_DEP_A" } fn get_message_type(&self) -> u16 { 65295 } fn get_sender_id(&self) -> Option<u16> { self.sender_id } fn set_sender_id(&mut self, new_id: u16) { self.sender_id = Some(new_id); } fn to_frame(&self) -> std::result::Result<Vec<u8>, crate::FramerError> { let mut frame = Vec::new(); self.write_frame(&mut frame)?; Ok(frame) } fn write_frame(&self, frame: &mut Vec<u8>) -> std::result::Result<(), crate::FramerError> { crate::write_frame(self, frame) } } impl super::ConcreteMessage for MsgSolnMetaDepA { const MESSAGE_TYPE: u16 = 65295; const MESSAGE_NAME: &'static str = "MSG_SOLN_META_DEP_A"; } impl TryFrom<super::SBP> for MsgSolnMetaDepA { type Error = super::TryFromSBPError; fn try_from(msg: super::SBP) -> Result<Self, Self::Error> { match msg { super::SBP::MsgSolnMetaDepA(m) => Ok(m), _ => Err(super::TryFromSBPError), } } } impl crate::serialize::SbpSerialize for MsgSolnMetaDepA { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.pdop.append_to_sbp_buffer(buf); self.hdop.append_to_sbp_buffer(buf); self.vdop.append_to_sbp_buffer(buf); self.n_sats.append_to_sbp_buffer(buf); self.age_corrections.append_to_sbp_buffer(buf); self.alignment_status.append_to_sbp_buffer(buf); self.last_used_gnss_pos_tow.append_to_sbp_buffer(buf); self.last_used_gnss_vel_tow.append_to_sbp_buffer(buf); self.sol_in.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.pdop.sbp_size(); size += self.hdop.sbp_size(); size += self.vdop.sbp_size(); size += self.n_sats.sbp_size(); size += self.age_corrections.sbp_size(); size += self.alignment_status.sbp_size(); size += self.last_used_gnss_pos_tow.sbp_size(); size += self.last_used_gnss_vel_tow.sbp_size(); size += self.sol_in.sbp_size(); size } } /// Provides detail about the Odometry sensor, its timestamping mode, and its quality for input to the fuzed solution /// /// Metadata around the Odometry sensors involved in the fuzed solution. /// Accessible through sol_in[N].flags in a MSG_SOLN_META. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct OdoInputType { /// Instrument ODO rate, grade, and quality. pub flags: u8, } impl OdoInputType { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<OdoInputType, crate::Error> { Ok( OdoInputType{ flags: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<OdoInputType>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(OdoInputType::parse(buf)?); } Ok(v) } pub fn parse_array_limit(buf: &mut &[u8], n: usize) -> Result<Vec<OdoInputType>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(OdoInputType::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for OdoInputType { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.flags.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.flags.sbp_size(); size } } /// Flags for a given solution input type /// /// Metadata describing which sensors were involved in the solution. The /// structure is fixed no matter what the actual sensor type is. The /// sensor_type field tells you which sensor we are talking about. It also /// tells you whether the sensor data was actually used or not. The flags /// field, always a u8, contains the sensor-specific data. The content of /// flags, for each sensor type, is described in the relevant structures in /// this section. /// #[cfg_attr(feature = "sbp_serde", derive(serde::Serialize))] #[derive(Debug, Clone)] #[allow(non_snake_case)] pub struct SolutionInputType { /// The type of sensor pub sensor_type: u8, /// Refer to each InputType description pub flags: u8, } impl SolutionInputType { #[rustfmt::skip] pub fn parse(_buf: &mut &[u8]) -> Result<SolutionInputType, crate::Error> { Ok( SolutionInputType{ sensor_type: _buf.read_u8()?, flags: _buf.read_u8()?, } ) } pub fn parse_array(buf: &mut &[u8]) -> Result<Vec<SolutionInputType>, crate::Error> { let mut v = Vec::new(); while buf.len() > 0 { v.push(SolutionInputType::parse(buf)?); } Ok(v) } pub fn parse_array_limit( buf: &mut &[u8], n: usize, ) -> Result<Vec<SolutionInputType>, crate::Error> { let mut v = Vec::new(); for _ in 0..n { v.push(SolutionInputType::parse(buf)?); } Ok(v) } } impl crate::serialize::SbpSerialize for SolutionInputType { #[allow(unused_variables)] fn append_to_sbp_buffer(&self, buf: &mut Vec<u8>) { self.sensor_type.append_to_sbp_buffer(buf); self.flags.append_to_sbp_buffer(buf); } fn sbp_size(&self) -> usize { let mut size = 0; size += self.sensor_type.sbp_size(); size += self.flags.sbp_size(); size } }
#[derive(Serialize,Deserialize, PartialEq,Eq, Debug)] struct CrateName { name: Option<String>, done: bool, } #[derive(Serialize,Deserialize,PartialEq,Eq, Debug)] pub struct Manager { list: HashMap<u32, CrateName>, achivement: u32, obtain_file: String, number_of_crate: u32, }
//! An easy to use library for pretty print tables of Rust `struct`s and `enum`s. //! //! The library supports different approaches of table building. //! You can use [`Tabled`] trait if the data type is known. //! Or you can use [`Builder`] to construct the table from scratch. //! //! ## Usage //! //! If you want to build a table for your custom type. //! A starting point is to a anotate your type with `#[derive(Tabled)]`. //! //! Then to provide your collection to [`Table::new`] and you will be set to render table. //! #![cfg_attr(all(feature = "derive", feature = "std"), doc = "```")] #![cfg_attr(not(all(feature = "derive", feature = "std")), doc = "```ignore")] //! use tabled::{Tabled, Table}; //! //! #[derive(Tabled)] //! struct Language { //! name: &'static str, //! designed_by: &'static str, //! invented_year: usize, //! } //! //! let languages = vec![ //! Language{ //! name: "C", //! designed_by: "Dennis Ritchie", //! invented_year: 1972 //! }, //! Language{ //! name: "Rust", //! designed_by: "Graydon Hoare", //! invented_year: 2010 //! }, //! Language{ //! name: "Go", //! designed_by: "Rob Pike", //! invented_year: 2009 //! }, //! ]; //! //! let table = Table::new(languages).to_string(); //! //! let expected = "+------+----------------+---------------+\n\ //! | name | designed_by | invented_year |\n\ //! +------+----------------+---------------+\n\ //! | C | Dennis Ritchie | 1972 |\n\ //! +------+----------------+---------------+\n\ //! | Rust | Graydon Hoare | 2010 |\n\ //! +------+----------------+---------------+\n\ //! | Go | Rob Pike | 2009 |\n\ //! +------+----------------+---------------+"; //! //! assert_eq!(table, expected); //! ``` //! //! Not all types can derive [`Tabled`] trait though. //! The example below can't be compiled. //! //! ```rust,compile_fail //! # use tabled::Tabled; //! #[derive(Tabled)] //! struct SomeType { //! field1: SomeOtherType, //! } //! //! struct SomeOtherType; //! ``` //! //! Because `tabled` must know what we're up to print as a field, so //! each (almost) field must implement [`std::fmt::Display`]. //! //! ### Default implementations //! //! [`Table`] can be build from vast majority of Rust's standard types. //! This allows you to run the following code. //! #![cfg_attr(feature = "std", doc = "```")] #![cfg_attr(not(feature = "std"), doc = "```ignore")] //! use tabled::{Tabled, Table}; //! let table = Table::new(&[1, 2, 3]); //! # let expected = "+-----+\n\ //! # | i32 |\n\ //! # +-----+\n\ //! # | 1 |\n\ //! # +-----+\n\ //! # | 2 |\n\ //! # +-----+\n\ //! # | 3 |\n\ //! # +-----+"; //! # assert_eq!(table.to_string(), expected); //! ``` //! //! ### Dynamic table //! //! When you data scheme is not known at compile time. //! You most likely will not able to relay on [`Tabled`] trait. //! //! So one option would be is to use [`Builder`]. //! #![cfg_attr(feature = "std", doc = "```")] #![cfg_attr(not(feature = "std"), doc = "```ignore")] //! use std::iter; //! //! use tabled::{ //! builder::Builder, //! settings::{Modify, object::Rows, Alignment, Style} //! }; //! //! let (x, y) = (3, 10); //! //! let mut builder = Builder::default(); //! //! let header = iter::once(String::from("i")) //! .chain((0..y) //! .map(|i| i.to_string())); //! builder.set_header(header); //! //! for i in 0..x { //! let row = iter::once(i) //! .chain((0..y).map(|j| i * j)) //! .map(|i| i.to_string()); //! builder.push_record(row); //! } //! //! let table = builder.build() //! .with(Style::rounded()) //! .with(Modify::new(Rows::new(1..)).with(Alignment::left())) //! .to_string(); //! //! assert_eq!( //! table, //! concat!( //! "╭───┬───┬───┬───┬───┬───┬────┬────┬────┬────┬────╮\n", //! "│ i │ 0 │ 1 │ 2 │ 3 │ 4 │ 5 │ 6 │ 7 │ 8 │ 9 │\n", //! "├───┼───┼───┼───┼───┼───┼────┼────┼────┼────┼────┤\n", //! "│ 0 │ 0 │ 0 │ 0 │ 0 │ 0 │ 0 │ 0 │ 0 │ 0 │ 0 │\n", //! "│ 1 │ 0 │ 1 │ 2 │ 3 │ 4 │ 5 │ 6 │ 7 │ 8 │ 9 │\n", //! "│ 2 │ 0 │ 2 │ 4 │ 6 │ 8 │ 10 │ 12 │ 14 │ 16 │ 18 │\n", //! "╰───┴───┴───┴───┴───┴───┴────┴────┴────┴────┴────╯", //! ) //! ); //! ``` //! //! ### Build table using [`row!`] and [`col!`] macros. //! #![cfg_attr(all(feature = "macros", feature = "std"), doc = "```")] #![cfg_attr(not(all(feature = "macros", feature = "std")), doc = "```ignore")] //! use tabled::{row, col}; //! //! let table = row![ //! col!["Hello", "World", "!"], //! col!["Hello"; 3], //! col!["World"; 3], //! ]; //! //! assert_eq!( //! table.to_string(), //! concat!( //! "+-----------+-----------+-----------+\n", //! "| +-------+ | +-------+ | +-------+ |\n", //! "| | Hello | | | Hello | | | World | |\n", //! "| +-------+ | +-------+ | +-------+ |\n", //! "| | World | | | Hello | | | World | |\n", //! "| +-------+ | +-------+ | +-------+ |\n", //! "| | ! | | | Hello | | | World | |\n", //! "| +-------+ | +-------+ | +-------+ |\n", //! "+-----------+-----------+-----------+", //! ) //! ); //! ``` //! //! ### Settings //! //! You can use many settings which is found in [`tabled::settings`] module. //! //! # Advanced //! //! ## Alloc //! //! [`Table`] keeps data buffered, which sometimes not ideal choise. //! For such reason there is [`IterTable`] and [`CompactTable`]. //! //! ### Less allocations //! //! [`IterTable`] stands on a middle ground between [`Table`] and [`CompactTable`]. //! //! It does allocate memory but in a much smaller chunks that a [`Table`] does. //! The benefit is that it can be used interchangebly with [`Table`]. //! #![cfg_attr(feature = "std", doc = "```")] #![cfg_attr(not(feature = "std"), doc = "```ignore")] //! use tabled::tables::IterTable; //! //! let iterator = (0..3).map(|row| (0..4).map(move |col| format!("{}-{}", row, col))); //! //! let table = IterTable::new(iterator).to_string(); //! //! assert_eq!( //! table, //! "+-----+-----+-----+-----+\n\ //! | 0-0 | 0-1 | 0-2 | 0-3 |\n\ //! +-----+-----+-----+-----+\n\ //! | 1-0 | 1-1 | 1-2 | 1-3 |\n\ //! +-----+-----+-----+-----+\n\ //! | 2-0 | 2-1 | 2-2 | 2-3 |\n\ //! +-----+-----+-----+-----+", //! ); //! ``` //! //! ## Alloc free (`#nostd`) //! //! [`CompactTable`] can be configured ('1) to not make any allocations. //! But the price is that the set of settings which can be applied to it is limited. //! //! It also can be printed directly to [`fmt::Write`] to not have any intermidiaries. //! //! '1. It does not make any allocations in case you provide it with `width` and `count_rows`. //! //! ``` //! use tabled::{settings::Style, tables::CompactTable}; //! use core::fmt::{Write, Result}; //! //! struct StubWriter; //! //! impl Write for StubWriter { //! fn write_str(&mut self, _: &str) -> Result { //! Ok(()) //! } //! } //! //! let data = [ //! ["FreeBSD", "1993", "William and Lynne Jolitz", "?"], //! ["OpenBSD", "1995", "Theo de Raadt", ""], //! ["HardenedBSD", "2014", "Oliver Pinter and Shawn Webb", ""], //! ]; //! //! let table = CompactTable::from(data).with(Style::psql()); //! //! table.fmt(StubWriter); //! ``` //! //! ## More information //! //! You can find more examples of settings and attributes in //! [README.md](https://github.com/zhiburt/tabled/blob/master/README.md) //! //! [`Builder`]: crate::builder::Builder //! [`IterTable`]: crate::tables::IterTable //! [`CompactTable`]: crate::tables::CompactTable //! [`fmt::Write`]: core::fmt::Write //! [`row!`]: crate::row //! [`col!`]: crate::col #![cfg_attr(not(any(feature = "std", test)), no_std)] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] #![doc( html_logo_url = "https://raw.githubusercontent.com/zhiburt/tabled/86ac146e532ce9f7626608d7fd05072123603a2e/assets/tabled-gear.svg" )] #![deny(unused_must_use)] #![warn( missing_docs, rust_2018_idioms, rust_2018_compatibility, missing_debug_implementations, unreachable_pub, future_incompatible, single_use_lifetimes, trivial_casts, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results, unused_variables, variant_size_differences )] #![allow(clippy::uninlined_format_args)] #[cfg(feature = "std")] mod tabled; #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub mod builder; pub mod settings; pub mod tables; #[cfg(feature = "macros")] #[cfg_attr(docsrs, doc(cfg(feature = "macros")))] pub mod macros; pub mod grid; #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] pub use crate::{tabled::Tabled, tables::Table}; /// A derive to implement a [`Tabled`] trait. /// /// The macros available only when `derive` feature in turned on (and it is by default). /// /// To be able to use the derive each field must implement `std::fmt::Display`. /// The following example will cause a error because of that. /// /// ```rust,compile_fail /// use tabled::Tabled; /// #[derive(Tabled)] /// struct SomeType { /// field1: SomeOtherType, /// } /// /// struct SomeOtherType; /// ``` /// /// Bellow you'll find available options for it. /// /// ### Override a column name /// /// You can use a `#[tabled(rename = "")]` attribute to override a column name. /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// struct Person { /// #[tabled(rename = "Name")] /// first_name: &'static str, /// #[tabled(rename = "Surname")] /// last_name: &'static str, /// } /// ``` /// /// ### Hide a column /// /// You can mark fields as hidden in which case they fill be ignored and not be present on a sheet. /// /// A similar affect could be achieved by the means of a `Disable` setting. /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// struct Person { /// id: u8, /// #[tabled(skip)] /// number: &'static str, /// name: &'static str, /// } /// ``` /// /// ### Set column order /// /// You can change the order in which they will be displayed in table. /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// struct Person { /// id: u8, /// #[tabled(order = 0)] /// number: &'static str, /// #[tabled(order = 1)] /// name: &'static str, /// } /// ``` /// /// ### Format fields /// /// As was said already, using `#[derive(Tabled)]` is possible only when all fields implement a `Display` trait. /// However, this may be often not the case for example when a field uses the `Option` type. There's 2 common ways how to solve this: /// /// - Implement `Tabled` trait manually for a type. /// - Wrap `Option` to something like `DisplayedOption<T>(Option<T>)` and implement a Display trait for it. /// /// Alternatively, you can use the `#[tabled(display_with = "func")]` attribute for the field to specify a display function. /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// pub struct MyRecord { /// pub id: i64, /// #[tabled(display_with = "display_option")] /// pub valid: Option<bool> /// } /// /// fn display_option(o: &Option<bool>) -> String { /// match o { /// Some(s) => format!("is valid thing = {}", s), /// None => format!("is not valid"), /// } /// } /// ``` /// /// It's also possible to change function argument to be `&self`, /// using `#[tabled(display_with("some_function", self))]` /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// pub struct MyRecord { /// pub id: i64, /// #[tabled(display_with("Self::display_valid", self))] /// pub valid: Option<bool> /// } /// /// impl MyRecord { /// fn display_valid(&self) -> String { /// match self.valid { /// Some(s) => format!("is valid thing = {}", s), /// None => format!("is not valid"), /// } /// } /// } /// ``` /// /// ### Format headers /// /// Beside `#[tabled(rename = "")]` you can change a format of a column name using /// `#[tabled(rename_all = "UPPERCASE")]`. /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// #[tabled(rename_all = "CamelCase")] /// struct Person { /// id: u8, /// number: &'static str, /// name: &'static str, /// #[tabled(rename_all = "snake_case")] /// middle_name: &'static str, /// } /// ``` /// /// ### Inline /// /// It's possible to inline internal data if it implements the `Tabled` trait using `#[tabled(inline)]`. /// You can also set a prefix which will be used for all inlined elements by `#[tabled(inline("prefix>>"))]`. /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// struct Person { /// id: u8, /// name: &'static str, /// #[tabled(inline)] /// ed: Education, /// } /// /// #[derive(Tabled)] /// struct Education { /// uni: &'static str, /// graduated: bool, /// } /// ``` /// /// And it works for enums as well. /// /// ```rust,no_run /// use tabled::Tabled; /// /// #[derive(Tabled)] /// enum Vehicle { /// #[tabled(inline("Auto::"))] /// Auto { /// model: &'static str, /// engine: &'static str, /// }, /// #[tabled(inline)] /// Bikecycle( /// &'static str, /// #[tabled(inline)] Bike, /// ), /// } /// /// #[derive(Tabled)] /// struct Bike { /// brand: &'static str, /// price: f32, /// } /// ``` #[cfg(feature = "derive")] #[cfg_attr(docsrs, doc(cfg(feature = "derive")))] pub use tabled_derive::Tabled;
use chatbox_lib::ChatBox; use futures::executor; use tokio::stream::{Stream, StreamExt}; use tokio::stream::iter; mod reader; use stream_u8_lib::StreamReader; fn main() { let vector = vec![1,2,3]; let cb = ChatBox::<String>::new(); let stream_ft = async { let mut stream = StreamReader::<u32>::new(vec![1,2,3,4,5], 5); assert_eq!(stream.next().await, Some(vec![1,2])); }; executor::block_on(stream_ft); }
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::ALTPADCFGC { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `PAD11_SR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PAD11_SRR { #[doc = "Enables Slew rate control on pad value."] SR_EN, #[doc = r" Reserved"] _Reserved(bool), } impl PAD11_SRR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PAD11_SRR::SR_EN => true, PAD11_SRR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PAD11_SRR { match value { true => PAD11_SRR::SR_EN, i => PAD11_SRR::_Reserved(i), } } #[doc = "Checks if the value of the field is `SR_EN`"] #[inline] pub fn is_sr_en(&self) -> bool { *self == PAD11_SRR::SR_EN } } #[doc = r" Value of the field"] pub struct PAD11_DS1R { bits: bool, } impl PAD11_DS1R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = "Possible values of the field `PAD10_SR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PAD10_SRR { #[doc = "Enables Slew rate control on pad value."] SR_EN, #[doc = r" Reserved"] _Reserved(bool), } impl PAD10_SRR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PAD10_SRR::SR_EN => true, PAD10_SRR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PAD10_SRR { match value { true => PAD10_SRR::SR_EN, i => PAD10_SRR::_Reserved(i), } } #[doc = "Checks if the value of the field is `SR_EN`"] #[inline] pub fn is_sr_en(&self) -> bool { *self == PAD10_SRR::SR_EN } } #[doc = r" Value of the field"] pub struct PAD10_DS1R { bits: bool, } impl PAD10_DS1R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = "Possible values of the field `PAD9_SR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PAD9_SRR { #[doc = "Enables Slew rate control on pad value."] SR_EN, #[doc = r" Reserved"] _Reserved(bool), } impl PAD9_SRR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PAD9_SRR::SR_EN => true, PAD9_SRR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PAD9_SRR { match value { true => PAD9_SRR::SR_EN, i => PAD9_SRR::_Reserved(i), } } #[doc = "Checks if the value of the field is `SR_EN`"] #[inline] pub fn is_sr_en(&self) -> bool { *self == PAD9_SRR::SR_EN } } #[doc = r" Value of the field"] pub struct PAD9_DS1R { bits: bool, } impl PAD9_DS1R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = "Possible values of the field `PAD8_SR`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PAD8_SRR { #[doc = "Enables Slew rate control on pad value."] SR_EN, #[doc = r" Reserved"] _Reserved(bool), } impl PAD8_SRR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PAD8_SRR::SR_EN => true, PAD8_SRR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PAD8_SRR { match value { true => PAD8_SRR::SR_EN, i => PAD8_SRR::_Reserved(i), } } #[doc = "Checks if the value of the field is `SR_EN`"] #[inline] pub fn is_sr_en(&self) -> bool { *self == PAD8_SRR::SR_EN } } #[doc = r" Value of the field"] pub struct PAD8_DS1R { bits: bool, } impl PAD8_DS1R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = "Values that can be written to the field `PAD11_SR`"] pub enum PAD11_SRW { #[doc = "Enables Slew rate control on pad value."] SR_EN, } impl PAD11_SRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PAD11_SRW::SR_EN => true, } } } #[doc = r" Proxy"] pub struct _PAD11_SRW<'a> { w: &'a mut W, } impl<'a> _PAD11_SRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PAD11_SRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Enables Slew rate control on pad value."] #[inline] pub fn sr_en(self) -> &'a mut W { self.variant(PAD11_SRW::SR_EN) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 28; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PAD11_DS1W<'a> { w: &'a mut W, } impl<'a> _PAD11_DS1W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PAD10_SR`"] pub enum PAD10_SRW { #[doc = "Enables Slew rate control on pad value."] SR_EN, } impl PAD10_SRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PAD10_SRW::SR_EN => true, } } } #[doc = r" Proxy"] pub struct _PAD10_SRW<'a> { w: &'a mut W, } impl<'a> _PAD10_SRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PAD10_SRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Enables Slew rate control on pad value."] #[inline] pub fn sr_en(self) -> &'a mut W { self.variant(PAD10_SRW::SR_EN) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 20; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PAD10_DS1W<'a> { w: &'a mut W, } impl<'a> _PAD10_DS1W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PAD9_SR`"] pub enum PAD9_SRW { #[doc = "Enables Slew rate control on pad value."] SR_EN, } impl PAD9_SRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PAD9_SRW::SR_EN => true, } } } #[doc = r" Proxy"] pub struct _PAD9_SRW<'a> { w: &'a mut W, } impl<'a> _PAD9_SRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PAD9_SRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Enables Slew rate control on pad value."] #[inline] pub fn sr_en(self) -> &'a mut W { self.variant(PAD9_SRW::SR_EN) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 12; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PAD9_DS1W<'a> { w: &'a mut W, } impl<'a> _PAD9_DS1W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PAD8_SR`"] pub enum PAD8_SRW { #[doc = "Enables Slew rate control on pad value."] SR_EN, } impl PAD8_SRW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PAD8_SRW::SR_EN => true, } } } #[doc = r" Proxy"] pub struct _PAD8_SRW<'a> { w: &'a mut W, } impl<'a> _PAD8_SRW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PAD8_SRW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Enables Slew rate control on pad value."] #[inline] pub fn sr_en(self) -> &'a mut W { self.variant(PAD8_SRW::SR_EN) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _PAD8_DS1W<'a> { w: &'a mut W, } impl<'a> _PAD8_DS1W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 28 - Pad 11 slew rate selection."] #[inline] pub fn pad11_sr(&self) -> PAD11_SRR { PAD11_SRR::_from({ const MASK: bool = true; const OFFSET: u8 = 28; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 24 - Pad 11 high order drive strength selection. Used in conjunction with PAD11STRNG field to set the pad drive strength."] #[inline] pub fn pad11_ds1(&self) -> PAD11_DS1R { let bits = { const MASK: bool = true; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PAD11_DS1R { bits } } #[doc = "Bit 20 - Pad 10 slew rate selection."] #[inline] pub fn pad10_sr(&self) -> PAD10_SRR { PAD10_SRR::_from({ const MASK: bool = true; const OFFSET: u8 = 20; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 16 - Pad 10 high order drive strength selection. Used in conjunction with PAD10STRNG field to set the pad drive strength."] #[inline] pub fn pad10_ds1(&self) -> PAD10_DS1R { let bits = { const MASK: bool = true; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PAD10_DS1R { bits } } #[doc = "Bit 12 - Pad 9 slew rate selection."] #[inline] pub fn pad9_sr(&self) -> PAD9_SRR { PAD9_SRR::_from({ const MASK: bool = true; const OFFSET: u8 = 12; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 8 - Pad 9 high order drive strength selection. Used in conjunction with PAD9STRNG field to set the pad drive strength."] #[inline] pub fn pad9_ds1(&self) -> PAD9_DS1R { let bits = { const MASK: bool = true; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PAD9_DS1R { bits } } #[doc = "Bit 4 - Pad 8 slew rate selection."] #[inline] pub fn pad8_sr(&self) -> PAD8_SRR { PAD8_SRR::_from({ const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 0 - Pad 8 high order drive strength selection. Used in conjunction with PAD8STRNG field to set the pad drive strength."] #[inline] pub fn pad8_ds1(&self) -> PAD8_DS1R { let bits = { const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }; PAD8_DS1R { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 28 - Pad 11 slew rate selection."] #[inline] pub fn pad11_sr(&mut self) -> _PAD11_SRW { _PAD11_SRW { w: self } } #[doc = "Bit 24 - Pad 11 high order drive strength selection. Used in conjunction with PAD11STRNG field to set the pad drive strength."] #[inline] pub fn pad11_ds1(&mut self) -> _PAD11_DS1W { _PAD11_DS1W { w: self } } #[doc = "Bit 20 - Pad 10 slew rate selection."] #[inline] pub fn pad10_sr(&mut self) -> _PAD10_SRW { _PAD10_SRW { w: self } } #[doc = "Bit 16 - Pad 10 high order drive strength selection. Used in conjunction with PAD10STRNG field to set the pad drive strength."] #[inline] pub fn pad10_ds1(&mut self) -> _PAD10_DS1W { _PAD10_DS1W { w: self } } #[doc = "Bit 12 - Pad 9 slew rate selection."] #[inline] pub fn pad9_sr(&mut self) -> _PAD9_SRW { _PAD9_SRW { w: self } } #[doc = "Bit 8 - Pad 9 high order drive strength selection. Used in conjunction with PAD9STRNG field to set the pad drive strength."] #[inline] pub fn pad9_ds1(&mut self) -> _PAD9_DS1W { _PAD9_DS1W { w: self } } #[doc = "Bit 4 - Pad 8 slew rate selection."] #[inline] pub fn pad8_sr(&mut self) -> _PAD8_SRW { _PAD8_SRW { w: self } } #[doc = "Bit 0 - Pad 8 high order drive strength selection. Used in conjunction with PAD8STRNG field to set the pad drive strength."] #[inline] pub fn pad8_ds1(&mut self) -> _PAD8_DS1W { _PAD8_DS1W { w: self } } }
#[path = "config/dynamic.rs"] mod dynamic; #[path = "config/dependent.rs"] mod dependent; #[path = "config/static_config.rs"] mod static_config; struct Cleanup; impl Cleanup { fn new() -> Self { std::fs::copy( "tests/temp/example_config.json", "tests/temp/example_config.json.backup", ) .unwrap(); Cleanup } } impl Drop for Cleanup { fn drop(&mut self) { std::fs::rename( "tests/temp/example_config.json.backup", "tests/temp/example_config.json", ) .unwrap(); } } #[test] #[cfg(debug_assertions)] fn test_in_debug_mode() { run_loading_tests("Renamed Config", "Alternate Config"); } #[test] #[cfg(not(debug_assertions))] fn test_in_release_mode() { run_loading_tests("Example Config", "Example Config"); } fn run_loading_tests(dependent_renamed: &str, dependent_alternate: &str) { let _cleanup = Cleanup::new(); // Test loading let dynamic_conf = dynamic::DynamicConfig::load(); let dependent_conf = dependent::DependentConfig::load(); let static_conf = static_config::StaticConfig::load(); assert_eq!(dynamic_conf.name, "Example Config"); assert_eq!(dependent_conf.name, "Example Config"); assert_eq!(static_conf.name, "Example Config"); std::fs::write( "tests/temp/example_config.json", br#"{ "name": "Renamed Config" }"#, ) .unwrap(); // Test reloading with changes let dynamic_conf = dynamic::DynamicConfig::load(); let dependent_conf = dependent::DependentConfig::load(); let static_conf = static_config::StaticConfig::load(); assert_eq!(dynamic_conf.name, "Renamed Config"); assert_eq!(dependent_conf.name, dependent_renamed); assert_eq!(static_conf.name, "Example Config"); // Test loading from a file let dynamic_conf = dynamic::DynamicConfig::load_from("tests/temp/alternate_config.json".as_ref()).unwrap(); let dependent_conf = dependent::DependentConfig::load_from("tests/temp/alternate_config.json".as_ref()).unwrap(); let static_conf = static_config::StaticConfig::load_from("tests/temp/alternate_config.json".as_ref()) .unwrap(); assert_eq!(dynamic_conf.name, "Alternate Config"); assert_eq!(dependent_conf.name, dependent_alternate); assert_eq!(static_conf.name, "Example Config"); }
#[macro_use] extern crate rustlearn; fn main() { let value = fmt!(100); println!("{}", value); }
use problem::{Problem, solve}; struct Day10; impl Problem for Day10 { type Input = Vec<i32>; type Part1Output = u32; type Part2Output = u64; type Error = (); fn part_1(input: &Self::Input) -> Result<Self::Part1Output, Self::Error> { let mut sorted = input.clone(); sorted.push(0); sorted.as_mut_slice().sort(); sorted.push(sorted[sorted.len() - 1] + 3); let mut count_1 = 0; let mut count_3 = 0; for i in 1..sorted.len() { match sorted[i] - sorted[i - 1] { 1 => count_1 += 1, 3 => count_3 += 1, _ => (), } } Ok(count_1 * count_3) } fn part_2(input: &Self::Input) -> Result<Self::Part2Output, Self::Error> { let mut sorted = input.clone(); sorted.push(0); sorted.as_mut_slice().sort(); sorted.push(sorted[sorted.len() - 1] + 3); let mut counts = vec![0; sorted.len()]; counts[0] = 1; for i in 1..sorted.len() { for j in 0..i { if (sorted[i] - sorted[j]).abs() <= 3 { counts[i] += counts[j]; } } } Ok(counts[sorted.len() - 1]) } } fn main() { solve::<Day10>("input").unwrap(); }
use std::net::{TcpListener}; use std::{thread}; use std::sync::mpsc::channel; use std::env; extern crate byteorder; extern crate toml; extern crate serde_derive; extern crate serde_json; extern crate crc; mod housekeeping; mod payload; mod flag; mod telemetry; mod config; use config::Config as Config; mod spacepacket; use crate::spacepacket::SpacePacket; fn handle_command_one(_tx: std::sync::mpsc::Sender<telemetry::TlmCmds>, packet: spacepacket::SpacePacket) { let _destination = String::from_utf8_lossy( &packet.payload[1..]); // eprintln!("destination: {}", destination); // let _result = tx.send( telemetry::TlmCmds::DestinationCommand{ addr: destination.to_string()}); } fn handle_command_two(tx: std::sync::mpsc::Sender<housekeeping::HkCmds>, _packet: SpacePacket) { let _result = tx.send(housekeeping::HkCmds::SleepCommand{interval: 30}); } fn handle_command_three(tx: std::sync::mpsc::Sender<payload::PayloadCmds>, _packet: spacepacket::SpacePacket) { let _result = tx.send(payload::PayloadCmds::SleepCommand{interval: 30}); } fn handle_command_four(tx: std::sync::mpsc::Sender<telemetry::TlmCmds>, _packet: spacepacket::SpacePacket) { let _result = tx.send( telemetry::TlmCmds::ShutdownCommand); } fn handle_command_five(tx: std::sync::mpsc::Sender<telemetry::TlmCmds>, _packet: spacepacket::SpacePacket) { let _result = tx.send( telemetry::TlmCmds::SleepCommand{interval: 30}); } fn handle_command_six(tx: std::sync::mpsc::Sender<flag::FlagCmds>, _packet: spacepacket::SpacePacket) { let _result = tx.send( flag::FlagCmds::SleepCommand{interval: 30}); } // gently shutdown the app by telling all the threads to end fn shutdown_app( hk_send: std::sync::mpsc::Sender<housekeeping::HkCmds>, pl_send: std::sync::mpsc::Sender<payload::PayloadCmds>, flag_send: std::sync::mpsc::Sender<flag::FlagCmds>, tlm_send: std::sync::mpsc::Sender<telemetry::TlmCmds> ) { let _result = hk_send.send( housekeeping::HkCmds::ShutdownCommand); let _result = pl_send.send( payload::PayloadCmds::ShutdownCommand); let _result = flag_send.send( flag::FlagCmds::ShutdownCommand); let _result = tlm_send.send( telemetry::TlmCmds::ShutdownCommand); } // simple timeout thread that will forcefully terminate the app when it expires fn do_timeout( timeout: u32 ) { std::thread::sleep(std::time::Duration::new(timeout as u64,0)); eprintln!("Its time to die"); std::process::exit(0); } fn process_commands(config: Config, hkrx: std::sync::mpsc::Sender<housekeeping::HkCmds>, tlmrx: std::sync::mpsc::Sender<telemetry::TlmCmds>, plrx: std::sync::mpsc::Sender<payload::PayloadCmds> , flagrx: std::sync::mpsc::Sender<flag::FlagCmds> ) { let key = "SERVICE_HOST"; let host = match env::var_os(key) { Some(val) => val.into_string().unwrap(), None => String::from("0.0.0.0"), }; let key = "SERVICE_PORT"; let port = match env::var_os(key) { Some(val) => val.into_string().unwrap(), None => String::from("4321"), }; // tell the competitor where the docker container will be listening let bind_str = format!("{}:{}", host, port); // but the service will be listening inside on port 5063 let listener = TcpListener::bind("0.0.0.0:5063").expect("couldn't listen on that port"); let hk_apid = config.housekeeping.apid; let pl_apid = config.payload.apid; let tlm_apid = config.telemetry.apid; let flag_apid = config.flag.apid; println!("Connect to = {}", bind_str); loop { let (mut stream, _addr) = listener.accept().expect("an error with the accept call"); let _result = tlmrx.send( telemetry::TlmCmds::DestinationCommand{ addr: stream.try_clone().unwrap()}); loop { let sp = match spacepacket::read_space_packet(&mut stream) { Ok(n) => n, Err(_e) => break, }; if sp.version != 0 { eprintln!("Bad space packet version"); continue; } let bytes = sp.to_vec(); if sp.apid == hk_apid { eprintln!("got a task for housekeeping apid: {}", hk_apid); let _result = hkrx.send(housekeeping::HkCmds::RawCommand{raw: bytes}); } else if sp.apid == pl_apid { eprintln!("got a task for payload {}", pl_apid); let _result = plrx.send(payload::PayloadCmds::RawCommand{raw: bytes}); } else if sp.apid == tlm_apid { eprintln!("got a task for the tlm"); let _result = tlmrx.send(telemetry::TlmCmds::RawCommand{raw: bytes}); } else if sp.apid == flag_apid { eprintln!("got a task for the flag"); let _result = flagrx.send(flag::FlagCmds::RawCommand{raw: bytes}); } else if sp.apid == 7 { match sp.payload[0] { 0 => handle_command_one( tlmrx.clone(), sp ), 1 => handle_command_two( hkrx.clone(), sp ), 2 => handle_command_three( plrx.clone(), sp ), 3 => handle_command_four( tlmrx.clone(), sp ), 4 => { shutdown_app( hkrx, plrx, flagrx, tlmrx); return; }, 5 => handle_command_five( tlmrx.clone(), sp), 6 => handle_command_six( flagrx.clone(), sp), _ => handle_command_four( tlmrx.clone(), sp), } } else { eprintln!("Bad APID ignored"); } } } } fn main() { let config = match config::load_config() { Some(n) => n, None => return, }; // config.save_config(); let key = "TIMEOUT"; let timeout_str = match env::var_os(key) { Some(val) => val.into_string().unwrap(), None => String::from("180"), }; let timeout = timeout_str.parse::<u32>().unwrap(); // eprintln!("Timeout = {}", timeout); let _timeout_handle = thread::spawn(move || { do_timeout(timeout); }); let (sendtlmtx, sendtlmrx) = channel(); let (to_tlmtx, to_tlmrx) = channel(); let (from_tlmtx, _from_tlmrx) = channel(); let tlm_config = config.clone(); let tlm_handle = thread::spawn(move || { telemetry::do_telemetry(tlm_config, sendtlmrx, to_tlmrx, from_tlmtx); }); // setup a pair of channels for the housekeeping thread and start that thread let (to_hktx, to_hkrx) = channel(); let (from_hktx, _fromhkrx) = channel(); let tlmsend = sendtlmtx.clone(); let hk_config = config.clone(); let hk_handle = thread::spawn(move || { housekeeping::do_housekeeping(hk_config, tlmsend, to_hkrx, from_hktx); }); let (to_pltx, to_plrx) = channel(); let (from_pltx, _from_plrx) = channel(); let tlmsend = sendtlmtx.clone(); let pl_config = config.clone(); let pl_handle = thread::spawn(move || { payload::do_payload(pl_config, tlmsend, to_plrx, from_pltx); }); let (to_flagtx, to_flagrx) = channel(); let (from_flagtx, _from_flagrx) = channel(); let tlmsend = sendtlmtx.clone(); let pl_config = config.clone(); let flag_handle = thread::spawn(move || { flag::do_flag(pl_config, tlmsend, to_flagrx, from_flagtx); }); // let startup_delay_interval = time::Duration::from_millis(2000); // thread::sleep(startup_delay_interval); // do_query(); process_commands( config, to_hktx, to_tlmtx, to_pltx, to_flagtx) ; tlm_handle.join().unwrap(); pl_handle.join().unwrap(); flag_handle.join().unwrap(); hk_handle.join().unwrap(); }
#![feature( proc_macro_diagnostic, proc_macro_quote, proc_macro_span, proc_macro_hygiene )] extern crate proc_macro; use proc_macro::{ quote, Delimiter, Diagnostic, Ident, Level, Literal, Punct, Spacing, Span, TokenStream, TokenTree, }; use std::convert::Into; use std::iter::FromIterator; fn generate_module_list_impl(token_stream: TokenStream) -> Result<TokenStream, Diagnostic> { // parse input struct Module { name: Ident, parts: Vec<Ident>, parser: Option<Ident>, } let mut modules: Vec<Module> = Vec::new(); let mut tokens = token_stream.into_iter().peekable(); let const_ident = match tokens.next() { Some(TokenTree::Ident(const_ident)) => const_ident, Some(token) => return Err(token.span().error("expected constant name")), None => return Err(Diagnostic::new(Level::Error, "expected arguments")), }; match tokens.next() { Some(TokenTree::Punct(punct)) if punct.as_char() == ';' && punct.spacing() == Spacing::Alone => {} Some(token) => return Err(token.span().error("expected ;")), None => {} } while let Some(module_ident) = tokens.next() { if let TokenTree::Ident(module_ident) = module_ident { module_ident.span().warning("found name"); let group = match tokens.next() { Some(TokenTree::Group(group)) if group.delimiter() == Delimiter::Bracket => group, Some(ident) => return Err(ident.span().error("expected brackets [ ]")), None => return Err(Diagnostic::new(Level::Error, "unexpected end of input")), }; let mut group_tokens = group.stream().into_iter(); let mut parts = Vec::new(); while let Some(part_ident) = group_tokens.next() { if let TokenTree::Ident(part_ident) = part_ident { parts.push(part_ident); } else { return Err(part_ident.span().error("expected part name")); } match group_tokens.next() { None => {} Some(TokenTree::Punct(punct)) if punct.as_char() == ',' && punct.spacing() == Spacing::Alone => {} Some(token) => return Err(token.span().error("expected ,")), } } let parser = match tokens.peek().clone() { Some(TokenTree::Punct(punct)) if punct.as_char() == ':' && punct.spacing() == Spacing::Alone => { let punct = tokens.next().unwrap(); if let Some(TokenTree::Ident(input_ident)) = tokens.next() { Some(input_ident) } else { return Err(punct.span().error("expected parser name after")); } } _ => None, }; modules.push(Module { name: module_ident, parts, parser, }); match tokens.next() { None => {} Some(TokenTree::Punct(punct)) if punct.as_char() == ',' && punct.spacing() == Spacing::Alone => {} Some(token) => { return Err(token.span().error("expected ,")); } } } else { return Err(module_ident.span().error("expected module name")); } } let mut tokens: Vec<TokenTree> = Vec::new(); // emit module imports for module in &modules { tokens.push(Ident::new("mod", Span::call_site()).into()); tokens.push(module.name.clone().into()); tokens.push(Punct::new(';', Spacing::Alone).into()); } // emit table let mut table_tokens = TokenStream::new(); for module in modules { let module_name: TokenTree = Literal::string(&module.name.to_string()).into(); // emit parts let mut part_tokens = TokenStream::new(); for part in &module.parts { let part_name: TokenTree = Literal::string(&part.to_string()).into(); let call_part = TokenStream::from_iter(vec![ TokenTree::Ident(module.name.clone()), Punct::new(':', Spacing::Joint).into(), Punct::new(':', Spacing::Joint).into(), part.clone().into(), ]); match module.parser.clone() { Some(parser) => { let call_parser = TokenStream::from_iter(vec![ TokenTree::Ident(module.name.clone()), Punct::new(':', Spacing::Joint).into(), Punct::new(':', Spacing::Joint).into(), parser.clone().into(), ]); part_tokens.extend(quote!( ($part_name, |input: &str| -> Result<String> { use ::anyhow::anyhow; let input = input.trim(); let input = $call_parser(input) .map_err(|err| anyhow!("parse error {:?}", err)) .and_then(|(remainder, v)| { if remainder == "" { Ok(v) } else { Err(anyhow!("input partially parsed, remainder: {:#?}", remainder)) } })?; Ok(format!("{}", $call_part(input)?)) }), )) } None => { part_tokens.extend(quote!( ($part_name, |input: &str| -> Result<String> { let input = input.trim(); Ok(format!("{}", $call_part(input)?)) }), )); } } } table_tokens.extend(quote!( ($module_name, &[ $part_tokens ]), )); } // emit DAY_LIST tokens.push(Ident::new("const", Span::call_site()).into()); tokens.push(const_ident.into()); tokens.push(Punct::new(':', Spacing::Alone).into()); tokens.extend( quote!(&[(&'static str, &[(&'static str, fn(&str) -> anyhow::Result<String>)])] = { use std::fmt::Write; use anyhow::Result; &[ $table_tokens ] };), ); let tokens = TokenStream::from_iter(tokens); // println!("{}", tokens); Ok(tokens) } #[proc_macro] pub fn generate_module_list(token_stream: TokenStream) -> TokenStream { generate_module_list_impl(token_stream).unwrap_or_else(|diag| { diag.emit(); TokenStream::new() }) }
//! Utility functions and data type definitions pub mod hasher; pub mod keys; pub mod permissions;
extern crate crossbeam; extern crate pipeline; extern crate time; use pipeline::queue::multiqueue::{MultiReader, MultiWriter, multiqueue}; use crossbeam::scope; use std::sync::atomic::{AtomicUsize, Ordering, fence}; use std::sync::Barrier; //prevent any inlining shenanigans #[inline(never)] fn precise_time_ns() -> u64 { time::precise_time_ns() } #[inline(never)] fn waste_50_ns(val: &AtomicUsize) { val.store(0, Ordering::Release); fence(Ordering::SeqCst); } fn recv(bar: &Barrier, reader: MultiReader<Option<u64>>) { let mut total_time = 0; let mut succ = 0; let tries = 10000; for _ in 0..tries { let start = precise_time_ns(); let end = precise_time_ns(); if (end >= start) { succ += 1; total_time += (end - start); } } let to_subtract = total_time / succ; bar.wait(); let mut v = Vec::with_capacity(100000); loop { if let Some(popped) = reader.pop() { match popped { None => break, Some(pushed) => { let current_time = precise_time_ns(); if (current_time >= pushed) { let diff = current_time - pushed; if (diff > to_subtract) { v.push(diff - to_subtract); } } } } } } for val in v { println!("{}", val); } } fn Send(bar: &Barrier, writer: MultiWriter<Option<u64>>, num_push: usize, num_us: usize) { bar.wait(); let val: AtomicUsize = AtomicUsize::new(0); for _ in 0..num_push { loop { let topush = Some(precise_time_ns()); if let Ok(_) = writer.push(topush) { break; } } for _ in 0..(num_us*20) { waste_50_ns(&val); } } writer.push(None); } fn main() { let (writer, reader) = multiqueue(20000); let bar = Barrier::new(2); let bref = &bar; scope(|scope| { scope.spawn(move || { Send(bref, writer, 100000, 40); }); recv(bref, reader); }); }
extern crate advent_of_code_2018; extern crate combine; use advent_of_code_2018::Mat; use combine::*; use combine::parser::char::{char, spaces}; use combine::parser::range::take_while1; use combine::stream::state::State; use std::io::{self, Read}; use std::collections::BTreeSet; struct Claim { id: usize, x: usize, y: usize, width: usize, height: usize, } fn parse(input: &str) -> Vec<Claim> { let num = || from_str(take_while1(|c: char| c.is_digit(10))); let id = || (char('#'), num()).map(|(_, num)| num); let pos = || (num().skip(char(',')), num()); let size = || (num().skip(char('x')), num()); let line = || (id().skip(spaces()), char('@').skip(spaces()), pos().skip((char(':'), spaces())), size().skip(spaces())).map(|(id, _, pos, size)| { Claim { id: id, x: pos.0, y: pos.1, width: size.0, height: size.1, } }); let mut file = (many1(line()), eof()).map(|(i, _)| i); match file.parse(State::new(input)) { Ok((val, _)) => val, Err(err) => { println!("Error: {}", err); Vec::new() }, } } fn get_num_overlapping(claims: &[Claim]) -> u32 { let mut matrix = Mat::<u8>::new(1000, 1000, 0); for claim in claims { for y in claim.y..(claim.y + claim.height) { for x in claim.x..(claim.x + claim.width) { matrix[(x,y)] = std::cmp::min(matrix[(x,y)] + 1, 255); } } } let mut total = 0; for y in 0..1000 { for x in 0..1000 { if matrix[(x,y)] > 1 { total += 1; } } } total } fn get_non_overlapping(claims: &[Claim]) -> Option<usize> { let mut non_overlapping = BTreeSet::new(); for claim in claims { non_overlapping.insert(claim.id); } let mut matrix = Mat::<usize>::new(1000, 1000, std::usize::MAX); for claim in claims { for y in claim.y..(claim.y + claim.height) { for x in claim.x..(claim.x + claim.width) { let current = matrix[(x,y)]; if current != std::usize::MAX { non_overlapping.remove(&current); non_overlapping.remove(&claim.id); } else { matrix[(x,y)] = claim.id; } } } } if non_overlapping.len() == 1 { non_overlapping.iter().next().map(|v| *v) } else { None } } fn main() { let stdin = io::stdin(); let mut locked = stdin.lock(); let mut input = String::new(); if locked.read_to_string(&mut input).is_ok() { let claims = parse(&input); println!("part 1: {}", get_num_overlapping(&claims)); let non_overlapping = get_non_overlapping(&claims); match non_overlapping { Some(val) => println!("part 2: {}", val), None => println!("part 2: no unique non-overlapping claim"), } } }
use std::sync::Arc; use lambda_http::http; use tracing::info; use tracing::instrument; use htsget_http::{post as htsget_post, Endpoint, PostRequest}; use htsget_search::htsget::HtsGet; use crate::handlers::handle_response; use crate::{Body, Response}; /// Post request reads endpoint #[instrument(skip(searcher))] pub async fn post<H: HtsGet + Send + Sync + 'static>( id_path: String, searcher: Arc<H>, query: PostRequest, endpoint: Endpoint, ) -> http::Result<Response<Body>> { info!(query = ?query, "POST request"); handle_response(htsget_post(searcher, query, id_path, endpoint).await) }
use std::collections::HashMap; // bringing HashMap into scope // we can provide new names //use std::fmt::Result; //use std::io::Result as ioResult; use rand::Rng; /* this is ugly use std::cmp::Ordering; use std::io; */ //better use std::{cmp::Ordering, io}; //fn function1() -> Result {} // this returns a Result from the fmt module //fn function2() -> ioResult<()> {} // returns a result from the io module use std::collections::*; // bring all publics in collections into scope fn main() { println!("Hello, world!"); let mut map = HashMap::new(); map.insert(1,2); let secret_number = rand::thread_rng().gen_range(1,101); }
// Copyright (C) 2021 Subspace Labs, Inc. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Various cryptographic utilities used across Subspace Network. extern crate alloc; pub mod kzg; use crate::{Blake2b256Hash, Blake3Hash}; use alloc::format; use alloc::string::String; use alloc::vec::Vec; use blake2::digest::typenum::U32; use blake2::digest::{FixedOutput, Update}; use blake2::{Blake2b, Blake2bMac, Digest}; use blst_rust::types::fr::FsFr; use core::cmp::Ordering; use core::hash::{Hash, Hasher}; use core::mem; use derive_more::{AsMut, AsRef, Deref, DerefMut, From, Into}; use parity_scale_codec::{Decode, Encode, EncodeLike, Input, MaxEncodedLen}; use scale_info::{Type, TypeInfo}; /// BLAKE2b-256 hashing of a single value. pub fn blake2b_256_hash(data: &[u8]) -> Blake2b256Hash { let mut state = Blake2b::<U32>::new(); Update::update(&mut state, data); state.finalize_fixed().into() } /// BLAKE2b-256 hashing of a single value truncated to 254 bits. /// /// TODO: We probably wouldn't need this eventually pub fn blake2b_256_254_hash(data: &[u8]) -> Blake2b256Hash { let mut hash = blake2b_256_hash(data); // Erase last 2 bits to effectively truncate the hash (number is interpreted as little-endian) hash[31] &= 0b00111111; hash } /// BLAKE2b-256 hashing of a single value truncated to 254 bits. /// /// TODO: We probably wouldn't need this eventually pub fn blake2b_256_254_hash_to_scalar(data: &[u8]) -> Scalar { let mut hash = blake2b_256_hash(data); // Erase last 2 bits to effectively truncate the hash (number is interpreted as little-endian) hash[31] &= 0b00111111; Scalar::try_from(hash) .expect("Last bit erased, thus hash is guaranteed to fit into scalar; qed") } /// BLAKE2b-256 keyed hashing of a single value. /// /// PANIC: Panics if key is longer than 64 bytes. pub fn blake2b_256_hash_with_key(key: &[u8], data: &[u8]) -> Blake2b256Hash { let mut state = Blake2bMac::<U32>::new_with_salt_and_personal(key, &[], &[]) .expect("Only panics when key is over 64 bytes as specified in function description"); Update::update(&mut state, data); state.finalize_fixed().into() } /// BLAKE2b-256 hashing of a list of values. pub fn blake2b_256_hash_list(data: &[&[u8]]) -> Blake2b256Hash { let mut state = Blake2b::<U32>::new(); for d in data { Update::update(&mut state, d); } state .finalize() .try_into() .expect("Initialized with correct length; qed") } /// BLAKE3 hashing of a single value. pub fn blake3_hash(data: &[u8]) -> Blake3Hash { *blake3::hash(data).as_bytes() } /// BLAKE3 hashing of a single value in parallel (only useful for large values well above 128kiB). #[cfg(feature = "parallel")] pub fn blake3_hash_parallel(data: &[u8]) -> Blake3Hash { let mut state = blake3::Hasher::new(); state.update_rayon(data); *state.finalize().as_bytes() } /// BLAKE3 hashing of a list of values. pub fn blake3_hash_list(data: &[&[u8]]) -> Blake3Hash { let mut state = blake3::Hasher::new(); for d in data { state.update(d); } *state.finalize().as_bytes() } /// Representation of a single BLS12-381 scalar value. #[derive(Debug, Default, Copy, Clone, Eq, PartialEq, From, Into, AsRef, AsMut, Deref, DerefMut)] #[repr(transparent)] pub struct Scalar(FsFr); impl Hash for Scalar { fn hash<H: Hasher>(&self, state: &mut H) { self.to_bytes().hash(state) } } impl PartialOrd<Self> for Scalar { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Ord for Scalar { fn cmp(&self, other: &Self) -> Ordering { self.to_bytes().cmp(&other.to_bytes()) } } impl Encode for Scalar { fn size_hint(&self) -> usize { Self::FULL_BYTES } fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R { f(&self.to_bytes()) } #[inline] fn encoded_size(&self) -> usize { Self::FULL_BYTES } } impl EncodeLike for Scalar {} impl Decode for Scalar { fn decode<I: Input>(input: &mut I) -> Result<Self, parity_scale_codec::Error> { Self::try_from(&<[u8; Self::FULL_BYTES]>::decode(input)?).map_err(|error_code| { parity_scale_codec::Error::from("Failed to create scalar from bytes") .chain(format!("Error code: {error_code}")) }) } #[inline] fn encoded_fixed_size() -> Option<usize> { Some(Self::FULL_BYTES) } } impl TypeInfo for Scalar { type Identity = Self; fn type_info() -> Type { Type::builder() .path(scale_info::Path::new(stringify!(Scalar), module_path!())) .docs(&["BLS12-381 scalar"]) .composite(scale_info::build::Fields::named().field(|f| { f.ty::<[u8; Self::FULL_BYTES]>() .name(stringify!(inner)) .type_name("FsFr") })) } } impl MaxEncodedLen for Scalar { #[inline] fn max_encoded_len() -> usize { Self::FULL_BYTES } } #[cfg(feature = "serde")] mod scalar_serde { use serde::de::Error; use serde::{Deserialize, Deserializer, Serialize, Serializer}; // Custom wrapper so we don't have to write serialization/deserialization code manually #[derive(Serialize, Deserialize)] struct Scalar(#[serde(with = "hex::serde")] pub(super) [u8; super::Scalar::FULL_BYTES]); impl Serialize for super::Scalar { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { Scalar(self.to_bytes()).serialize(serializer) } } impl<'de> Deserialize<'de> for super::Scalar { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let Scalar(bytes) = Scalar::deserialize(deserializer)?; Self::try_from(bytes).map_err(D::Error::custom) } } } impl From<&[u8; Self::SAFE_BYTES]> for Scalar { #[inline] fn from(value: &[u8; Self::SAFE_BYTES]) -> Self { let mut bytes = [0u8; Self::FULL_BYTES]; bytes[..Self::SAFE_BYTES].copy_from_slice(value); Self::try_from(bytes).expect("Safe bytes always fit into scalar and thus succeed; qed") } } impl From<[u8; Self::SAFE_BYTES]> for Scalar { #[inline] fn from(value: [u8; Self::SAFE_BYTES]) -> Self { Self::from(&value) } } impl TryFrom<&[u8; Self::FULL_BYTES]> for Scalar { type Error = String; #[inline] fn try_from(value: &[u8; Self::FULL_BYTES]) -> Result<Self, Self::Error> { Self::try_from(*value) } } impl TryFrom<[u8; Self::FULL_BYTES]> for Scalar { type Error = String; #[inline] fn try_from(value: [u8; Self::FULL_BYTES]) -> Result<Self, Self::Error> { FsFr::from_scalar(value) .map_err(|error_code| { format!("Failed to create scalar from bytes with code: {error_code}") }) .map(Scalar) } } impl From<&Scalar> for [u8; Scalar::FULL_BYTES] { #[inline] fn from(value: &Scalar) -> Self { value.0.to_scalar() } } impl From<Scalar> for [u8; Scalar::FULL_BYTES] { #[inline] fn from(value: Scalar) -> Self { Self::from(&value) } } impl Scalar { /// How many full bytes can be stored in BLS12-381 scalar (for instance before encoding). It is /// actually 254 bits, but bits are mut harder to work with and likely not worth it. /// /// NOTE: After encoding more bytes can be used, so don't rely on this as the max number of /// bytes stored within at all times! pub const SAFE_BYTES: usize = 31; /// How many bytes Scalar contains physically, use [`Self::SAFE_BYTES`] for the amount of data /// that you can put into it safely (for instance before encoding). pub const FULL_BYTES: usize = 32; /// Convert scalar into bytes pub fn to_bytes(&self) -> [u8; Scalar::FULL_BYTES] { self.into() } /// Convenient conversion from slice of scalar to underlying representation for efficiency /// purposes. #[inline] pub fn slice_to_repr(value: &[Self]) -> &[FsFr] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from slice of underlying representation to scalar for efficiency /// purposes. #[inline] pub fn slice_from_repr(value: &[FsFr]) -> &[Self] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from slice of optional scalar to underlying representation for efficiency /// purposes. pub fn slice_option_to_repr(value: &[Option<Self>]) -> &[Option<FsFr>] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from slice of optional underlying representation to scalar for efficiency /// purposes. pub fn slice_option_from_repr(value: &[Option<FsFr>]) -> &[Option<Self>] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from mutable slice of scalar to underlying representation for /// efficiency purposes. #[inline] pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [FsFr] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from mutable slice of underlying representation to scalar for /// efficiency purposes. #[inline] pub fn slice_mut_from_repr(value: &mut [FsFr]) -> &mut [Self] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from optional mutable slice of scalar to underlying representation for /// efficiency purposes. pub fn slice_option_mut_to_repr(value: &mut [Option<Self>]) -> &mut [Option<FsFr>] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from optional mutable slice of underlying representation to scalar for /// efficiency purposes. pub fn slice_option_mut_from_repr(value: &mut [Option<FsFr>]) -> &mut [Option<Self>] { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } /// Convenient conversion from vector of scalar to underlying representation for efficiency /// purposes. pub fn vec_to_repr(value: Vec<Self>) -> Vec<FsFr> { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory // layout, original vector is not dropped unsafe { let mut value = mem::ManuallyDrop::new(value); Vec::from_raw_parts( value.as_mut_ptr() as *mut FsFr, value.len(), value.capacity(), ) } } /// Convenient conversion from vector of underlying representation to scalar for efficiency /// purposes. pub fn vec_from_repr(value: Vec<FsFr>) -> Vec<Self> { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory // layout, original vector is not dropped unsafe { let mut value = mem::ManuallyDrop::new(value); Vec::from_raw_parts( value.as_mut_ptr() as *mut Self, value.len(), value.capacity(), ) } } /// Convenient conversion from vector of optional scalar to underlying representation for /// efficiency purposes. pub fn vec_option_to_repr(value: Vec<Option<Self>>) -> Vec<Option<FsFr>> { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory // layout, original vector is not dropped unsafe { let mut value = mem::ManuallyDrop::new(value); Vec::from_raw_parts( value.as_mut_ptr() as *mut Option<FsFr>, value.len(), value.capacity(), ) } } /// Convenient conversion from vector of optional underlying representation to scalar for /// efficiency purposes. pub fn vec_option_from_repr(value: Vec<Option<FsFr>>) -> Vec<Option<Self>> { // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory // layout, original vector is not dropped unsafe { let mut value = mem::ManuallyDrop::new(value); Vec::from_raw_parts( value.as_mut_ptr() as *mut Option<Self>, value.len(), value.capacity(), ) } } }
#[macro_use] extern crate failure; extern crate glob; #[macro_use] extern crate log; extern crate loggerv; extern crate serde_json; extern crate tera; use failure::Error; use glob::glob; use serde_json::Value; use std::{fs::File, path::Path}; use tera::Tera; pub fn parse_data_file<P: AsRef<Path>>(path: &P) -> Result<Value, Error> { let file = File::open(path)?; let data: Value = serde_json::from_reader(file)?; Ok(data) } pub fn parse_data_glob(data_glob: &str) -> Result<Value, Error> { let mut data = Value::Null; for file in glob(data_glob).unwrap() { match file { Ok(path) => { let read_data = parse_data_file(&path)?; data = merge_data(&data, &read_data).unwrap() }, Err(e) => { warn!("Data path matched but was unreadable -- got error: {}", e); }, } } Ok(data) } fn merge_data(bottom: &Value, top: &Value) -> Option<Value> { match (bottom, top) { (Value::Object(ref b), Value::Object(ref t)) => { let mut result = b.clone(); for (k, v) in t { result.insert(k.clone(), merge_data(b.get(k).unwrap_or(&Value::Null), v)?); } Some(result.into()) } (Value::Array(ref b), Value::Array(ref t)) => { let mut result = b.clone(); for i in t { result.push(i.clone()); } Some(result.into()) } (_, h) => { Some(h.clone()) } } } pub fn compile_templates(path: &str) -> Result<Tera, Error> { match Tera::new(path) { Ok(t) => Ok(t), Err(e) => { error!("Failed to compile templates under '{}' -- got error {}", path, e); Err(format_err!("Failed to compile templates")) } } } pub fn render_template(templates: Tera, data: Value, name: &str) -> Result<String, Error> { match templates.render(name, &data) { Ok(o) => Ok(o), Err(e) => { error!("Failed to render template '{}' -- got error {}", name, e); Err(format_err!("Failed to render template")) } } }
use ress::prelude::*; use walkdir::WalkDir; use std::{env::args, fs::read_to_string}; fn main() { let mut args = args(); let _ = args.next(); let start = args .next() .expect("No directory provided as starting location."); println!("static REGEXES: &[&str] = &["); let mut set = std::collections::HashSet::new(); for path in WalkDir::new(start) { if let Ok(entry) = path { let path = entry.path(); if path.is_file() { if let Some(ext) = path.extension() { if ext == "js" { if let Ok(js) = read_to_string(path) { let s = Scanner::new(&js); for item in s { if let Ok(item) = item { if item.token.is_regex() { let s = js[item.span.start..item.span.end].to_string(); if set.insert(s) { println!( " r#\"{}\"#,", &js[item.span.start..item.span.end] ); } } } } } } } } } } println!("];"); }
use super::manager::{TemplateManager, TemplateManagerError}; use super::template::Template; #[cfg(feature = "provider-jolimail")] pub mod jolimail; pub mod local; const CONFIG_TEMPLATE_PROVIDER: &'static str = "TEMPLATE_PROVIDER"; #[derive(Clone, Debug)] pub enum TemplateProviderError { ConfigurationInvalid(String), } #[derive(Clone, Debug)] pub enum TemplateProvider { #[cfg(feature = "provider-jolimail")] Jolimail(jolimail::JolimailTemplateProvider), Local(local::LocalTemplateProvider), } impl TemplateProvider { pub fn from_env() -> Result<Self, TemplateProviderError> { match std::env::var(CONFIG_TEMPLATE_PROVIDER) .unwrap_or("local".into()) .as_str() { #[cfg(feature = "provider-jolimail")] "jolimail" => Ok(Self::Jolimail( jolimail::JolimailTemplateProvider::from_env()?, )), _ => Ok(Self::Local(local::LocalTemplateProvider::from_env()?)), } } fn get_manager(&self) -> Box<&dyn TemplateManager> { match self { #[cfg(feature = "provider-jolimail")] Self::Jolimail(manager) => Box::new(manager), Self::Local(manager) => Box::new(manager), } } pub async fn find_by_name(&self, name: &str) -> Result<Template, TemplateManagerError> { self.get_manager().find_by_name(name).await } } // LCOV_EXCL_START #[cfg(test)] impl TemplateProvider { #[cfg(feature = "provider-jolimail")] fn is_jolimail(&self) -> bool { match self { Self::Jolimail(_) => true, _ => false, } } fn is_local(&self) -> bool { match self { Self::Local(_) => true, _ => false, } } } #[cfg(test)] mod tests { use super::*; use crate::test_util::TempEnvVar; #[test] #[serial] fn template_provider_from_env_local() { let _env_provider = TempEnvVar::new(CONFIG_TEMPLATE_PROVIDER).with("local"); let _env_root = TempEnvVar::new(local::CONFIG_PROVIDER_LOCAL_ROOT).with("./template"); let provider = TemplateProvider::from_env(); assert!(provider.is_ok()); assert!(provider.unwrap().is_local()); } #[cfg(feature = "provider-jolimail")] #[test] #[serial] fn template_provider_from_env_jolimail() { let _env_provider = TempEnvVar::new(CONFIG_TEMPLATE_PROVIDER).with("jolimail"); let _env_base_url = TempEnvVar::new(jolimail::CONFIG_BASE_URL).with("http://localhost"); let provider = TemplateProvider::from_env(); assert!(provider.is_ok()); assert!(provider.unwrap().is_jolimail()); } } // LCOV_EXCL_END
#![warn(unused_crate_dependencies)] #![warn(clippy::pedantic)] #![warn(clippy::cargo)] #![allow(clippy::module_name_repetitions)] use heroku_nodejs_utils::package_json::{PackageJson, PackageJsonError}; use layers::{ManagerLayer, ShimLayer}; use libcnb::build::{BuildContext, BuildResult, BuildResultBuilder}; use libcnb::data::build_plan::BuildPlanBuilder; use libcnb::data::layer_name; use libcnb::detect::{DetectContext, DetectResult, DetectResultBuilder}; use libcnb::generic::GenericMetadata; use libcnb::generic::GenericPlatform; use libcnb::layer_env::Scope; use libcnb::{buildpack_main, Buildpack, Env}; use libherokubuildpack::log::log_header; #[cfg(test)] use libcnb_test as _; #[cfg(test)] use test_support as _; #[cfg(test)] use ureq as _; mod cfg; mod cmd; mod errors; mod layers; pub(crate) struct CorepackBuildpack; impl Buildpack for CorepackBuildpack { type Platform = GenericPlatform; type Metadata = GenericMetadata; type Error = CorepackBuildpackError; fn detect(&self, context: DetectContext<Self>) -> libcnb::Result<DetectResult, Self::Error> { // Corepack requires the `packageManager` key from `package.json`. // This buildpack won't be detected without it. let pkg_json_path = context.app_dir.join("package.json"); if pkg_json_path.exists() { let pkg_json = PackageJson::read(pkg_json_path).map_err(CorepackBuildpackError::PackageJson)?; cfg::get_supported_package_manager(&pkg_json).map_or_else( || DetectResultBuilder::fail().build(), |pkg_mgr| { DetectResultBuilder::pass() .build_plan( BuildPlanBuilder::new() .requires("node") .requires(&pkg_mgr) .provides(pkg_mgr) .build(), ) .build() }, ) } else { DetectResultBuilder::fail().build() } } fn build(&self, context: BuildContext<Self>) -> libcnb::Result<BuildResult, Self::Error> { let pkg_mgr = PackageJson::read(context.app_dir.join("package.json")) .map_err(CorepackBuildpackError::PackageJson)? .package_manager .ok_or(CorepackBuildpackError::PackageManagerMissing)?; let env = &Env::from_current(); let corepack_version = cmd::corepack_version(env).map_err(CorepackBuildpackError::CorepackVersion)?; log_header(format!( "Installing {} {} via corepack {corepack_version}", pkg_mgr.name, pkg_mgr.version )); let shims_layer = context.handle_layer(layer_name!("shim"), ShimLayer { corepack_version })?; cmd::corepack_enable(&pkg_mgr.name, &shims_layer.path.join("bin"), env) .map_err(CorepackBuildpackError::CorepackEnable)?; let mgr_layer = context.handle_layer( layer_name!("mgr"), ManagerLayer { package_manager: pkg_mgr, }, )?; let mgr_env = mgr_layer.env.apply(Scope::Build, env); cmd::corepack_prepare(&mgr_env).map_err(CorepackBuildpackError::CorepackPrepare)?; BuildResultBuilder::new().build() } fn on_error(&self, err: libcnb::Error<Self::Error>) { errors::on_error(err); } } #[derive(Debug)] pub(crate) enum CorepackBuildpackError { PackageManagerMissing, PackageJson(PackageJsonError), ShimLayer(std::io::Error), ManagerLayer(std::io::Error), CorepackVersion(cmd::Error), CorepackEnable(cmd::Error), CorepackPrepare(cmd::Error), } impl From<CorepackBuildpackError> for libcnb::Error<CorepackBuildpackError> { fn from(e: CorepackBuildpackError) -> Self { libcnb::Error::BuildpackError(e) } } buildpack_main!(CorepackBuildpack);
use super::*; use std::hash::{Hash, Hasher}; impl Hash for RsDict { fn hash<H: Hasher>(&self, state: &mut H) { self.iter().for_each(|x| x.hash(state)); } }
#![allow(dead_code)] /// 基本数据类型 /// bool pub fn learn_bool() { println!("------------------------"); let x = true; let y: bool = !x; // 取反 let z = x && y; // 逻辑与,短路 println!("{}", z); let z = x || y; // 逻辑或,短路 println!("{}", z); let z = x & y; // 按位与,不带短路 println!("{}", z); let z = x | y; // 按位或,不带短路 println!("{}", z); let z = x ^ y; // 按位异或,不带短路 println!("{}", z); let z = 1 < 2; println!("{}", z); } pub fn learn_char() { println!("------------------------"); // 字符类型由 char 表示。 // 它可以描述任何一个符合 unicode 标准的字符值。在代码中,单个的字符字面量用单引号包围 // 1 个 char 占用 4 个字节 let love = '❤'; println!("{}", love); let c1 = '\n'; let c2 = '\x7f'; let c3 = '\u{7fff}'; println!("{} {} {}", c1, c2, c3); // 对于 ASCII 字符其实只需占用一个字节的空间, // 因此 Rust 提供了单字节字符字面量来表示 ASCII 字符 // 可以使用一个字母 b 在字符或者字符串前面,代表这个字面量存储在 u8 类型数组中 let x: u8 = 1; let y: u8 = b'A'; let s: &[u8; 5] = b"hello"; let r: &[u8; 14] = br#"hello \n world"#; println!("{} {} {:?} {:?}", x, y, s, r); } /// 整数类型 /// 各种整数类型之间的主要区分特征是:有符号/无符号,占据空间大小 /// 所谓有符号/无符号,指的是如何理解内存空间中的bit 表达的含义 /// 如果一个变量是有符号类型,那么它的最高位的那一个bit 就是“符号位”, /// 表示该数为正值还是负值。如果一个变量是无符号类型,那么它的最高位和 /// 其他位一样,表示该数的大小。 /// 需要特别关注的是 isize 和 usize 类型。它们占据的空间是不定的, /// 与指针占据的空间一致,与所在的平台相关。如果是 32 位系统上,则是 32 位大小; /// 如果是 64 位系统上,则是 64 位大小。在 C++ 中与它们相对应的类似类型是 int_ptr /// 和 uint_ptr。 Rust 的这一策略与 C 语言不同,C 语言标准中对许多类型的大小并没有做强制 /// 规定,比如 int 、long 、double 等类型,在不同平台上都可能是不同的大小,这给许多程 /// 序员带来了不必要的麻烦。相反,在语言标准中规定好各个类型的大小,让编译器针对不同 /// 平台做适配,生成不同的代码,是更合理的选择。 pub fn integer_type() { println!("------------------------"); // 默认为十进制 // 0x 16 // 0o 8 // 0b 2 let var1: i32 = 32; let var2: i32 = 0xFF; let var3: i32 = 0o55; let var4: i32 = 0b1001; println!("{} {} {} {}", var1, var2, var3, var4); // 在所有的数字字面量中,可以在任意地方添加任意的下划线,以方便阅读 let var5 = 0x_1234_ABCD; // 字面量后面可以跟后缀,可代表该数字的具体类型,从而省略掉显示类型标记 let var6 = 123usize; let var7 = 0x_ff_u8; let var8 = 32; println!("{} {} {} {}", var5, var6, var7, var8); // 在 Rust 中,我们可以为任何一个类型添加方法,整型也不例外 // 比如在标准库中,整数类型有一个方法是 pow ,它可以计算 n 次幕 let x: i32 = 9; println!("9 power 3 = {}", x.pow(3)); // 甚至可以不使用变量,直接对整型字面量调用函数 println!("9 power 3 = {}", 9_i32.pow(3)); } pub fn integer_overflow() { // 整数溢出 // Rust 在这个问题上选择的处理方式为: // 默认情况下,在 debug 模式下编译器会自动插入整数溢出检查, // 一旦发生溢出,则会引发 panic;在 release 模式下,不检查整数溢出, // 而是采用自动舍弃高位的方式 // 加法运算,有溢出风险 let m: i8 = 123; let n: i8 = 113; // 编译同样不通过 1.45.0 // println!("{}", m + n); // 编译不通过 // println!("{}", 123_i8 + 113_i8); // 如果在某些场景下,用户确实需要更精细地自主控制整数溢出的行为,可以调用标准库中的 // checked_*、saturating_* 和 wrapping_* 系列函数 let i = 100_i8; println!("checked {:?}", i.checked_add(i)); println!("saturating {:?}", i.saturating_add(i)); println!("wrapping {:?}", i.wrapping_add(i)); // checke_* 系列函数返回的类型是 Option<_>,当出现溢出的时候,返回值是 None; // saturating_* 系列函数返回类型是整数,如果溢出,则给出该类型可表示范围的"最大/最小"值; // wrapping_* 系列函数则是直接抛弃已经溢出的最高位,将剩下的部分返回 // 在对安全性要求非常高的情况下,强烈建议用户尽量使用这几个方法替代默认的算术运算符来做数学运算 // 在很多情况下,整数溢出应该被处理为截断,即丢弃最高位 // 标准库还提供了一个叫作 std::num::Wrapping<T> 的类型。 // 它重载了基本的运算符,可以被当成仅普通整数使用。 // 凡是被它包裹起来的整数,任何时候出现溢出都是截断行为。 use std::num::Wrapping; let big = Wrapping(std::u32::MAX); let sum = big + Wrapping(1_u32); println!("{}", sum); } pub fn float_type() { let f1 = 123.0f64; let f2 = 0.1f64; let f3 = 0.1f32; let f4 = 12E+99_f64; let f5: f64 = 2.; println!("{} {} {} {} {}", f1, f2, f3, f4, f5); // 与整数类型相比, Rust 的浮点数类型相对复杂得多 // 浮点数的麻烦之处在于:它不仅可以表达正常的数值,还可以表达不正常的数值。 // 在标准库中,有一个 std::num::FpCategory 枚举,表示了浮点数可能的状态: // enum FpCategory { // Nan, // Infinite, // Zero, // Subnormal, // Normal, // } // 演示 Subnormal // 变量 small 初始化为一个非常小的浮点数 let mut small = std::f32::EPSILON; // 不断循环, 让 small 越来越趋近于 0 , 直到最后等于 0 的状态 while small > 0.0 { small = small / 2.0; println!("{} {:?}", small, small.classify()); } // 演示 Infinite Nan let x = 1.0f32 / 0.0; let y = 0.0f32 / 0.0; println!("{} {}", x, y); // inf 数学运算 let inf = std::f32::INFINITY; println!("{} {} {}", inf * 0.0, 1.0 / inf, inf / inf); // NaN 这个特殊值有个特殊的麻烦,主要问题还在于它不具备"全序"的特点 let nan = std::f32::NAN; println!("{} {} {}", nan < nan, nan > nan, nan == nan) // 一个数字可以不等于自己 // 因为 NaN 的存在,浮点数是不具备"全序关系"(total order)的 // 全序/偏序 Ord/PartialOrd } pub fn point_type() { // 无 GC 的编程语言,如 C 、C++ 以及 Rust,对数据的组织操作有更多的自由度,具体表现为: // 1. 同一个类型,某些时候可以指定它在栈上,某些时候可以指定它在堆上。 // 内存分配方式可以取决于使用方式,与类型本身无关。 // 2. 既可以直接访问数据,也可以通过指针间接访问数据。可以针对任何一个对象取得指向它的指针。 // 3. 既可以在复合数据类型中直接嵌入别的类型的实体,也可以使用指针,间接指向别的类型。 // 4. 甚至可能在复合数据类型末尾嵌入不定长数据构造出不定长的复合数据类型。 // Rust 里面也有指针类型,而且不止一种指针类型。 // 1. Box<T> 指向类型 T 的、具有所有权的指针,有权释放内存 // 2. &T 指向类型 T 的借用指针,也称为引用,无权释放内存,无权写数据 // 3. &mut T 指向类型 T 的 mut 型借用指针,无权释放内存,有权写数据 // 4. *const T 指向类型 T 的只读裸指针,没有生命周期信息,无权写数据 // 5. *mut T 指向类型 T 的可读写裸指针,没有生命周期信息,有权写数据 // 在标准库中还有一种封装起来的可以当作指针使用的类型,叫"智能指针"(smart pointer)。 // 1. Rc<T> 指向类型 T 的引用计数指针,共享所有权,线程不安全 // 2. Arc<T> 指向类型 T 的原子型引用计数指针,共享所有权,线程安全 // 3. Cow<'a, T> Clone-on-write,写时复制指针。可能是借用指针也可能是具有所有权的指针 } pub fn type_case() { // 类型转换 // Rust 对不同类型之间的转换控制得非常严格 // Rust 提供了一个关键字 as let var1: i8 = 41; let var2: i16 = var1 as i16; // as 关键字也不是随便可以用的,它只允许编译器认为合理的类型转换。任意类型转换是不允许的 let a = "some string"; // let b = a as u32; // 编译错误 // 有些时候, 甚至需要连续写多个as 才能转成功 let i = 42; // 先转为* c onst i32 , 再转为* mut i32 let p = &i as *const i32 as *mut i32; println!("{:p}", p); // 如果需要更复杂的类型转换,一般是使用标准库的 From Into 等 trait // e as U // e: 表达式; U: 目标类型 // Type of e U // Integer of Float type Integer or Float type // C-like enum Integer type // bool or char Integer type // u8 char // *T *V where V: Sized * // *T where T: Sized Numeric type // Integer type *V where V: Sized // &[T; n] *const T // Function pointer *V where T: Sized // Function pointer Integer }
// Copyright 2015-2018 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! A signer used by Engines which need to sign messages. use ed25519_dalek::PublicKey; use std::sync::Arc; use engines::authority_round::subst::{ AccountProvider, Address, H256, Password, SignError, Signature }; /// Everything that an Engine needs to sign messages. #[derive(Debug)] pub struct EngineSigner { account_provider: Arc<AccountProvider>, address: Option<Address>, key: Option<PublicKey>, password: Option<Password>, } impl Default for EngineSigner { fn default() -> Self { EngineSigner { account_provider: Arc::new(AccountProvider::transient_provider()), address: Default::default(), key: Default::default(), password: Default::default(), } } } impl EngineSigner { /// Set up the signer to sign with given address and password. pub fn set(&mut self, ap: Arc<AccountProvider>, address: Address, password: Password) { self.account_provider = ap; self.address = Some(address.clone()); self.password = Some(password); debug!(target: "poa", "Setting Engine signer to {:?}", address); } /// Sign a consensus message hash. pub fn sign(&self, hash: H256) -> Result<Signature, SignError> { self.account_provider.sign(self.address.clone().unwrap_or_else(Default::default).into(), self.password.clone(), hash.clone().into()) } #[allow(dead_code)] /// Signing address. pub fn address(&self) -> Option<Address> { self.address.clone() } /// Signing key. pub fn key(&self) -> Option<PublicKey> { self.key.clone() } /// Check if the signing address was set. pub fn is_some(&self) -> bool { self.address.is_some() } }
//! # MountainCar //! //! Currently there are two versions of MountainCar. //! One with a discrete action space and one with a continuous one. //! //! ## Discrete version //! //! The description on the OpenAI page reads: //! //! > *&ldquo;A car is on a one-dimensional track, positioned between two "mountains". The goal //! > is to drive up the mountain on the right; however, the car's engine is not strong enough to //! > scale the mountain in a single pass. Therefore, the only way to succeed is to drive back and //! > forth to build up momentum.&rdquo;* //! //! This discrete version accepts one of three actions: Left, Right or Do Nothing. //! //! ## Continuous version //! //! There is also the continuous version. //! In this version the action space accepts a value between -1.0 and 1.0 and the control can thus //! be more precise. //! //! The description on the OpenAI page has been appended with: //! //! > *&ldquo;... Here, the reward is greater if you spend less energy to reach the goal&rdquo;* //! //! --- //! //! *These environments are taken from //! [OpenAI Gym MountainCar-v0](https://gym.openai.com/envs/MountainCar-v0/) and //! [OpenAI Gym MountainCarContinuous-v0](https://gym.openai.com/envs/MountainCarContinuous-v0/).* use gymnarium_base::math::{radians_to_degrees, Position2D, Size2D, Vector2D}; use gymnarium_base::serde::{Deserialize, Serialize}; use gymnarium_base::space::{DimensionBoundaries, DimensionValue}; use gymnarium_base::{ ActionSpace, AgentAction, Environment, EnvironmentState, ObservationSpace, Seed, ToActionMapper, }; use gymnarium_visualisers_base::input::{Button, ButtonState, Input, Key}; use gymnarium_visualisers_base::{ Color, DrawableEnvironment, Geometry2D, LineShape, TwoDimensionalDrawableEnvironment, Viewport2D, Viewport2DModification, }; use rand::distributions::{Distribution, Uniform}; use rand_chacha::rand_core::SeedableRng; use rand_chacha::ChaCha20Rng; /* --- --- --- GENERAL --- --- --- */ #[derive(Debug)] pub enum MountainCarError { GivenActionDoesNotFitActionSpace, } impl std::fmt::Display for MountainCarError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::GivenActionDoesNotFitActionSpace => { write!(f, "Given Action does not fit ActionSpace") } } } } impl std::error::Error for MountainCarError {} const MINIMUM_POSITION: f32 = -1.2f32; const MAXIMUM_POSITION: f32 = 0.6f32; const MAXIMUM_SPEED: f32 = 0.07f32; const GRAVITY: f32 = 0.0025f32; const FORCE: f32 = 0.001f32; const GOAL_POSITION: f32 = 0.5f32; /* --- --- --- DISCRETE MOUNTAIN CAR --- --- --- */ /// The goal is to drive up the mountain on the right. /// /// The agent (a car) is started at the bottom of a valley. For any given state the agent may /// choose to accelerate to the left, right or cease any acceleration. /// /// *(Code semantic copied from <https://github.com/openai/gym/blob/master/gym/envs/classic_control/mountain_car.py>.)* /// /// ## Source /// The environment appeared first in Andrew Moore's PhD Thesis (1990). /// /// ## Observation /// Space-Structure: `[2]` /// /// | Index | Observation | Min | Max | /// | --- | --- | --- | --- | /// | `[0]` | Car Position | `-1.2` | `0.6` | /// | `[1]` | Car Velocity | `-0.07` | `0.07` | /// /// ## Actions /// Space-Structure: `[1]` /// /// | Value | Action | /// | --- | --- | /// | `[-1]` | Accelerate to the Left | /// | `[0]` | Don't accelerate | /// | `[1]` | Accelerate to the Right | /// /// ## Reward /// Reward of `0` is awarded if the agent reached the flag (position = `0.5`) on top of the mountain. /// /// Reward of `-1` is awarded if the position of the agent is less than `0.5`. /// /// ## Starting State /// The position of the car is assigned a uniform random value in `[-0.6, -0.4]`. /// /// The starting velocity of the car is always assigned to `0`. /// /// ## Episode Termination /// - The car position is more than `0.5` /// - Episode length is greater than `200` /// pub struct MountainCar { goal_velocity: f64, position: f32, velocity: f32, last_seed: Seed, rng: ChaCha20Rng, } impl MountainCar { pub fn new(goal_velocity: f64) -> Self { let last_seed = Seed::new_random(); Self { goal_velocity, position: -0.5f32, velocity: 0f32, last_seed: last_seed.clone(), rng: ChaCha20Rng::from_seed(last_seed.into()), } } } impl Environment<MountainCarError, f64, (), MountainCarStorage> for MountainCar { fn action_space() -> ActionSpace { ActionSpace::simple(vec![DimensionBoundaries::Integer(-1, 1)]) } fn observation_space() -> ObservationSpace { ObservationSpace::simple(vec![ DimensionBoundaries::Float(MINIMUM_POSITION, MAXIMUM_POSITION), DimensionBoundaries::Float(-MAXIMUM_SPEED, MAXIMUM_SPEED), ]) } fn suggested_episode_steps_count() -> Option<u128> { Some(200) } fn reseed(&mut self, random_seed: Option<Seed>) -> Result<(), MountainCarError> { if let Some(seed) = random_seed { self.last_seed = seed; self.rng = ChaCha20Rng::from_seed(self.last_seed.clone().into()); } else { self.last_seed = Seed::new_random(); self.rng = ChaCha20Rng::from_seed(self.last_seed.clone().into()); } Ok(()) } fn reset(&mut self) -> Result<EnvironmentState, MountainCarError> { self.position = Uniform::new_inclusive(-0.6f32, -0.4f32).sample(&mut self.rng); self.velocity = 0f32; Ok(self.state()) } fn state(&self) -> EnvironmentState { EnvironmentState::simple(vec![ DimensionValue::Float(self.position), DimensionValue::Float(self.velocity), ]) } fn step( &mut self, action: &AgentAction, ) -> Result<(EnvironmentState, f64, bool, ()), MountainCarError> { if !Self::action_space().contains(action) { Err(MountainCarError::GivenActionDoesNotFitActionSpace) } else { let direction = action[&[0]].expect_integer(); self.velocity += direction as f32 * FORCE + (3f32 * self.position).cos() * (-GRAVITY); self.velocity = clamp(self.velocity, -MAXIMUM_SPEED, MAXIMUM_SPEED); self.position += self.velocity; self.position = clamp(self.position, MINIMUM_POSITION, MAXIMUM_POSITION); if self.position == MINIMUM_POSITION && self.velocity < 0f32 { self.velocity = 0f32; } let done = self.position >= GOAL_POSITION && self.velocity >= self.goal_velocity as f32; let reward = if done { 0f64 } else { -1.0f64 }; Ok((self.state(), reward, done, ())) } } fn load(&mut self, data: MountainCarStorage) -> Result<(), MountainCarError> { self.goal_velocity = data.goal_velocity; self.position = data.position; self.velocity = data.velocity; self.last_seed = data.last_seed.clone(); self.rng = ChaCha20Rng::from_seed(self.last_seed.clone().into()); self.rng.set_word_pos(data.rng_word_pos); Ok(()) } fn store(&self) -> MountainCarStorage { MountainCarStorage { goal_velocity: self.goal_velocity, position: self.position, velocity: self.velocity, last_seed: self.last_seed.clone(), rng_word_pos: self.rng.get_word_pos(), } } fn close(&mut self) -> Result<(), MountainCarError> { Ok(()) } } impl DrawableEnvironment for MountainCar { fn suggested_rendered_steps_per_second() -> Option<f64> { Some(60f64) } } impl TwoDimensionalDrawableEnvironment<MountainCarError> for MountainCar { fn draw_two_dimensional(&self) -> Result<Vec<Geometry2D>, MountainCarError> { let height_calculator: fn(f64) -> f64 = |x| (3f64 * x).sin() * 0.45f64 + 0.55f64; // render let screen_width = 600f32; let world_width = MAXIMUM_POSITION - MINIMUM_POSITION; let scale = screen_width / world_width; let carwidth = 40f64; let carheight = 20f64; // track let track = Geometry2D::polyline( (0..=100) .into_iter() .map(|index| { index as f32 / 100f32 * (MAXIMUM_POSITION - MINIMUM_POSITION) + MINIMUM_POSITION }) .map(|x| x as f64) .map(|x| { Position2D::with( (x - MINIMUM_POSITION as f64) * scale as f64, height_calculator(x) * scale as f64, ) }) .collect(), ) .line_shape(LineShape::Round) .line_or_border_color(Color::black()) .line_or_border_width(2f64); // car let clearance = 10f64; let (l, r, t, b) = (-carwidth / 2f64, carwidth / 2f64, carheight, 0f64); let chassis = Geometry2D::polygon(vec![ Position2D::with(l, b), Position2D::with(l, t), Position2D::with(r, t), Position2D::with(r, b), ]) .move_by(Vector2D::with(0f64, clearance)); let gray = (255f64 * 0.5f64) as u8; let front_wheel = Geometry2D::circle(Position2D::zero(), carheight / 2.5f64) .fill_color(Color::with(gray, gray, gray, 255)) .move_by(Vector2D::with(carwidth / 4f64, clearance)); let back_wheel = Geometry2D::circle(Position2D::zero(), carheight / 2.5f64) .fill_color(Color::with(127, 127, 127, 255)) .move_by(Vector2D::with(-carwidth / 4f64, clearance)); let car = Geometry2D::group(vec![chassis, front_wheel, back_wheel]) .move_by(Vector2D::with( (self.position - MINIMUM_POSITION) as f64 * scale as f64, height_calculator(self.position as f64) * scale as f64, )) .rotate_around_self(radians_to_degrees((3f64 * self.position as f64).cos())); // flag let flagx = (GOAL_POSITION - MINIMUM_POSITION) as f64 * scale as f64; let flagy1 = height_calculator(GOAL_POSITION as f64) * scale as f64; let flagy2 = flagy1 + 50f64; let flagpole = Geometry2D::line( Position2D::with(flagx, flagy1), Position2D::with(flagx, flagy2), ) .line_or_border_color(Color::black()) .line_or_border_width(1f64); let flag_color = (255f64 * 0.8f64) as u8; let flag = Geometry2D::polygon(vec![ Position2D::with(flagx, flagy2), Position2D::with(flagx, flagy2 - 10f64), Position2D::with(flagx + 25f64, flagy2 - 5f64), ]) .fill_color(Color::with(flag_color, flag_color, 0, 255)); Ok(vec![track, car, flagpole, flag]) } fn preferred_view(&self) -> Option<(Viewport2D, Viewport2DModification)> { Some(( Viewport2D::with( Position2D::with(300f64, 200f64), Size2D::with(600f64, 400f64), ), Viewport2DModification::KeepAspectRatioAndScissorRemains, )) } fn preferred_background_color(&self) -> Option<Color> { Some(Color::white()) } } #[derive(Serialize, Deserialize)] pub struct MountainCarStorage { goal_velocity: f64, position: f32, velocity: f32, last_seed: Seed, rng_word_pos: u128, } #[derive(Default)] pub struct MountainCarInputToActionMapper { left_pressed: bool, right_pressed: bool, } impl ToActionMapper<Vec<Input>, MountainCarError> for MountainCarInputToActionMapper { fn map(&mut self, inputs: &Vec<Input>) -> Result<AgentAction, MountainCarError> { for input in inputs { if let Input::Button(button_args) = input { if let Button::Keyboard(key) = button_args.button { match key { Key::Left => { self.left_pressed = button_args.state == ButtonState::Press; } Key::Right => { self.right_pressed = button_args.state == ButtonState::Press; } _ => (), } } } } Ok(AgentAction::simple(vec![DimensionValue::from( if self.left_pressed && !self.right_pressed { -1 } else if self.right_pressed && !self.left_pressed { 1 } else { 0 }, )])) } } /* --- --- --- CONTINUOUS MOUNTAIN CAR --- --- --- */ pub struct MountainCarContinuous; impl Environment<MountainCarError, f64, (), MountainCarStorage> for MountainCarContinuous { fn action_space() -> ActionSpace { todo!() } fn observation_space() -> ObservationSpace { todo!() } fn suggested_episode_steps_count() -> Option<u128> { todo!() } fn reseed(&mut self, _random_seed: Option<Seed>) -> Result<(), MountainCarError> { todo!() } fn reset(&mut self) -> Result<EnvironmentState, MountainCarError> { todo!() } fn state(&self) -> EnvironmentState { todo!() } fn step( &mut self, _action: &AgentAction, ) -> Result<(EnvironmentState, f64, bool, ()), MountainCarError> { todo!() } fn load(&mut self, _data: MountainCarStorage) -> Result<(), MountainCarError> { todo!() } fn store(&self) -> MountainCarStorage { todo!() } fn close(&mut self) -> Result<(), MountainCarError> { todo!() } } /* --- --- --- HELPER FUNCTIONS --- --- --- */ fn min<T: PartialOrd + Copy>(a: T, b: T) -> T { if a < b { a } else { b } } fn max<T: PartialOrd + Copy>(a: T, b: T) -> T { if a > b { a } else { b } } fn clamp<T: PartialOrd + Copy>(value: T, minimum: T, maximum: T) -> T { min(max(value, minimum), maximum) }
//! 7. 整数反转 //! https://leetcode-cn.com/problems/reverse-integer pub struct Solution; impl Solution { pub fn reverse(x: i32) -> i32 { let mut x = x; let mut rev = 0; while x != 0 { let pop = x % 10; x /= 10; if (rev > i32::max_value() / 10) || (rev == i32::max_value() / 10 && pop > 7) { return 0; } if (rev < i32::min_value() / 10) || (rev == i32::min_value() / 10 && pop < -8) { return 0; } rev = rev * 10 + pop; } rev } } #[cfg(test)] mod tests { use super::*; #[test] fn test_reverse() { assert_eq!(Solution::reverse(123), 321); assert_eq!(Solution::reverse(-123), -321); assert_eq!(Solution::reverse(120), 21); } }
/* Copyright ⓒ 2016 rust-custom-derive contributors. Licensed under the MIT license (see LICENSE or <http://opensource.org /licenses/MIT>) or the Apache License, Version 2.0 (see LICENSE of <http://www.apache.org/licenses/LICENSE-2.0>), at your option. All files in the project carrying such notice may not be copied, modified, or distributed except according to those terms. */ #![cfg_attr(feature="use-parse-generics-poc", feature(plugin))] #![cfg_attr(feature="use-parse-generics-poc", plugin(parse_generics_poc))] #[macro_use] extern crate parse_generics_shim; macro_rules! as_item { ($i:item) => { $i } } macro_rules! aeqiws { ($lhs:expr, $rhs:expr) => { { let lhs = $lhs; let rhs = $rhs; let lhs_words = $lhs.split_whitespace(); let rhs_words = $rhs.split_whitespace(); for (i, (l, r)) in lhs_words.zip(rhs_words).enumerate() { if l != r { panic!("assertion failed: `(left == right)` (left: `{:?}`, right: `{:?}`, at word {}, `{:?}` != `{:?}`)", lhs, rhs, i, l, r); } } } }; } macro_rules! pgts { ($fields:tt, $($body:tt)*) => { parse_generics_shim! { $fields, then stringify!(), $($body)* } }; } #[test] fn test_no_generics() { aeqiws!( pgts!({..}, X), r#" { constr : [ ] , params : [ ] , ltimes : [ ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({..}, <> X), r#" { constr : [ ] , params : [ ] , ltimes : [ ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ constr, params, ltimes, tnames }, X), r#" { constr : [ ] , params : [ ] , ltimes : [ ] , tnames : [ ] , } , X "# ); aeqiws!( pgts!({ constr, params, ltimes, tnames }, <> X), r#" { constr : [ ] , params : [ ] , ltimes : [ ] , tnames : [ ] , } , X "# ); } #[test] fn test_simple_ty_params() { aeqiws!( pgts!({ .. }, <T> X), r#" { constr : [ T , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T, U> X), r#" { constr : [ T , U , ] , params : [ T , U , ] , ltimes : [ ] , tnames : [ T , U , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T, U,> X), r#" { constr : [ T , U , ] , params : [ T , U , ] , ltimes : [ ] , tnames : [ T , U , ] , .. } , X "# ); } #[test] fn test_constr_ty_params() { aeqiws!( pgts!({ .. }, <T: Copy> X), r#" { constr : [ T : Copy , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: Copy,> X), r#" { constr : [ T : Copy , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: Copy, U: Clone> X), r#" { constr : [ T : Copy , U : Clone , ] , params : [ T , U , ] , ltimes : [ ] , tnames : [ T , U , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: Copy, U, V: Clone,> X), r#" { constr : [ T : Copy , U , V : Clone , ] , params : [ T , U , V , ] , ltimes : [ ] , tnames : [ T , U , V , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: 'a, U: 'a + Copy> X), r#" { constr : [ T : 'a , U : 'a + Copy , ] , params : [ T , U , ] , ltimes : [ ] , tnames : [ T , U , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: 'a, U: Copy + 'a> X), r#" { constr : [ T : 'a , U : Copy + 'a , ] , params : [ T , U , ] , ltimes : [ ] , tnames : [ T , U , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: ?Sized> X), r#" { constr : [ T : ? Sized , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: ?Sized + 'a + Copy> X), r#" { constr : [ T : ? Sized + 'a + Copy , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: 'a + ?Sized + Copy> X), r#" { constr : [ T : 'a + ? Sized + Copy , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: 'a + Copy + ?Sized> X), r#" { constr : [ T : 'a + Copy + ? Sized , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# ); } #[test] fn test_simple_lt_params() { aeqiws!( pgts!({ .. }, <'a> X), r#" { constr : [ 'a , ] , params : [ 'a , ] , ltimes : [ 'a , ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <'a,> X), r#" { constr : [ 'a , ] , params : [ 'a , ] , ltimes : [ 'a , ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <'a, 'b> X), r#" { constr : [ 'a , 'b , ] , params : [ 'a , 'b , ] , ltimes : [ 'a , 'b , ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <'a, 'b, 'i, 'z,> X), r#" { constr : [ 'a , 'b , 'i , 'z , ] , params : [ 'a , 'b , 'i , 'z , ] , ltimes : [ 'a , 'b , 'i , 'z , ] , tnames : [ ] , .. } , X "# ); } #[test] fn test_constr_lt_params() { aeqiws!( pgts!({ .. }, <'a: 'b> X), r#" { constr : [ 'a : 'b , ] , params : [ 'a , ] , ltimes : [ 'a , ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <'a: 'b + 'c> X), r#" { constr : [ 'a : 'b + 'c , ] , params : [ 'a , ] , ltimes : [ 'a , ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <'a: 'b + 'c,> X), r#" { constr : [ 'a : 'b + 'c , ] , params : [ 'a , ] , ltimes : [ 'a , ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <'a: 'b + 'c, 'b: 'c, 'c> X), r#" { constr : [ 'a : 'b + 'c , 'b : 'c , 'c , ] , params : [ 'a , 'b , 'c , ] , ltimes : [ 'a , 'b , 'c , ] , tnames : [ ] , .. } , X "# ); aeqiws!( pgts!({ .. }, <T: ?Sized + Clone + Copy + for<'a> From<&'a str>> X), if cfg!(feature="parse-generics-poc") { r#" { constr : [ T : ? Sized + Clone + Copy + for < 'a > From < &'a str > , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# } else { r#" { constr : [ T : ? Sized + Clone + Copy + for < 'a > From < & 'a str > , ] , params : [ T , ] , ltimes : [ ] , tnames : [ T , ] , .. } , X "# } ); } #[test] fn test_passthru() { macro_rules! emit { ( $fn_name:ident { constr: [$($constr:tt)*], $($_rest:tt)* }, $($_tail:tt)* ) => { as_item! { #[allow(dead_code)] fn $fn_name<$($constr)*>() { panic!("BOOM!"); } } }; } parse_generics_shim! { { .. }, then emit!{a}, X } parse_generics_shim! { { .. }, then emit!{b}, <> X } parse_generics_shim! { { .. }, then emit!{c}, <T> X } parse_generics_shim! { { .. }, then emit!{d}, <T,> X } parse_generics_shim! { { .. }, then emit!{e}, <T, U> X } parse_generics_shim! { { .. }, then emit!{f}, <T, U,> X } parse_generics_shim! { { .. }, then emit!{g}, <T: Copy> X } parse_generics_shim! { { .. }, then emit!{g2}, <T: Copy + Clone> X } parse_generics_shim! { { .. }, then emit!{h}, <'a> X } parse_generics_shim! { { .. }, then emit!{i}, <'a,> X } parse_generics_shim! { { .. }, then emit!{j}, <'a, 'b> X } parse_generics_shim! { { .. }, then emit!{k}, <'a, 'b,> X } parse_generics_shim! { { .. }, then emit!{l}, <'a, 'b: 'a> X } parse_generics_shim! { { .. }, then emit!{l2}, <'a, 'b: 'a, 'c: 'a + 'b> X } parse_generics_shim! { { .. }, then emit!{m}, <'a, T: 'a + Copy> X } parse_generics_shim! { { .. }, then emit!{m2}, <'a, T: 'a + Copy + Clone> X } parse_generics_shim! { { .. }, then emit!{m3}, <'a, T: Copy + 'a> X } parse_generics_shim! { { .. }, then emit!{n}, <T: 'static> X } parse_generics_shim! { { .. }, then emit!{o}, <T: From<u8>> X } let _ = "the rustc parser is stoopid"; }
#[doc = r"Register block"] #[repr(C)] pub struct DOEP0 { #[doc = "0x00 - OTG_HS device control OUT endpoint 0 control register"] pub ctl: CTL, _reserved1: [u8; 0x04], #[doc = "0x08 - OTG_HS device endpoint-0 interrupt register"] pub int: INT, _reserved2: [u8; 0x04], #[doc = "0x10 - OTG_HS device endpoint-1 transfer size register"] pub tsiz: TSIZ, #[doc = "0x14 - OTG_HS device endpoint-0 DMA address register"] pub dma: DMA, } #[doc = "CTL (rw) register accessor: OTG_HS device control OUT endpoint 0 control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ctl::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ctl::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ctl`] module"] pub type CTL = crate::Reg<ctl::CTL_SPEC>; #[doc = "OTG_HS device control OUT endpoint 0 control register"] pub mod ctl; #[doc = "INT (rw) register accessor: OTG_HS device endpoint-0 interrupt register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`int::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`int::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`int`] module"] pub type INT = crate::Reg<int::INT_SPEC>; #[doc = "OTG_HS device endpoint-0 interrupt register"] pub mod int; #[doc = "TSIZ (rw) register accessor: OTG_HS device endpoint-1 transfer size register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tsiz::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tsiz::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`tsiz`] module"] pub type TSIZ = crate::Reg<tsiz::TSIZ_SPEC>; #[doc = "OTG_HS device endpoint-1 transfer size register"] pub mod tsiz; #[doc = "DMA (rw) register accessor: OTG_HS device endpoint-0 DMA address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dma::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dma::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dma`] module"] pub type DMA = crate::Reg<dma::DMA_SPEC>; #[doc = "OTG_HS device endpoint-0 DMA address register"] pub mod dma;
mod tools; mod car; extern crate nalgebra as na; use ggez::graphics; use ggez::{Context, GameResult}; use ggez::event::{self, KeyCode, KeyMods}; use na::Point2; use car::Car; struct MainState { car: Car, } impl MainState { fn new(_ctx: &mut Context) -> GameResult<MainState> { let s = MainState { car: Car::new(Point2::new(300.0, 200.0)), }; Ok(s) } } impl event::EventHandler for MainState { fn update(&mut self, ctx: &mut Context) -> GameResult { use ggez::timer; let dt= timer::duration_to_f64(timer::delta(ctx)) as f32; self.car.update(dt); Ok(()) } fn draw(&mut self, ctx: &mut Context) -> GameResult { graphics::clear(ctx, [0.0, 0.0, 0.0, 1.0].into()); self.car.draw(ctx)?; graphics::present(ctx)?; Ok(()) } fn key_down_event(&mut self, _ctx: &mut Context, keycode: KeyCode, _keymod: KeyMods, repeat: bool) { match keycode { KeyCode::W => self.car.set_throttle(1.0), KeyCode::S => self.car.set_throttle(-1.0), KeyCode::A => self.car.set_steering(-1.0), KeyCode::D => self.car.set_steering(1.0), _ => (), } } fn key_up_event(&mut self, _ctx: &mut Context, keycode: KeyCode, _keymods: KeyMods) { match keycode { KeyCode::W | KeyCode::S => self.car.reset_throttle(), KeyCode::A | KeyCode::D => self.car.reset_steering(), _ => (), } } } pub fn main() -> GameResult { let cb = ggez::ContextBuilder::new("Skrr", "ggez"); let (ctx, event_loop) = &mut cb.build()?; let state = &mut MainState::new(ctx)?; event::run(ctx, event_loop, state) }
#[doc = "Register `CFGR3` reader"] pub type R = crate::R<CFGR3_SPEC>; #[doc = "Register `CFGR3` writer"] pub type W = crate::W<CFGR3_SPEC>; #[doc = "Field `EN_VREFINT` reader - VREFINT enable and scaler control for COMP2 enable bit"] pub type EN_VREFINT_R = crate::BitReader<EN_VREFINT_A>; #[doc = "VREFINT enable and scaler control for COMP2 enable bit\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EN_VREFINT_A { #[doc = "0: VREFINT voltage disabled in low-power mode (if ULP=1) and scaler for COMP2 disabled"] Disabled = 0, #[doc = "1: VREFINT voltage enabled in low-power mode and scaler for COMP2 enabled"] Enabled = 1, } impl From<EN_VREFINT_A> for bool { #[inline(always)] fn from(variant: EN_VREFINT_A) -> Self { variant as u8 != 0 } } impl EN_VREFINT_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EN_VREFINT_A { match self.bits { false => EN_VREFINT_A::Disabled, true => EN_VREFINT_A::Enabled, } } #[doc = "VREFINT voltage disabled in low-power mode (if ULP=1) and scaler for COMP2 disabled"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == EN_VREFINT_A::Disabled } #[doc = "VREFINT voltage enabled in low-power mode and scaler for COMP2 enabled"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == EN_VREFINT_A::Enabled } } #[doc = "Field `EN_VREFINT` writer - VREFINT enable and scaler control for COMP2 enable bit"] pub type EN_VREFINT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EN_VREFINT_A>; impl<'a, REG, const O: u8> EN_VREFINT_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "VREFINT voltage disabled in low-power mode (if ULP=1) and scaler for COMP2 disabled"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(EN_VREFINT_A::Disabled) } #[doc = "VREFINT voltage enabled in low-power mode and scaler for COMP2 enabled"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(EN_VREFINT_A::Enabled) } } #[doc = "Field `SEL_VREF_OUT` reader - BGAP_ADC connection bit"] pub type SEL_VREF_OUT_R = crate::FieldReader<SEL_VREF_OUT_A>; #[doc = "BGAP_ADC connection bit\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum SEL_VREF_OUT_A { #[doc = "0: no pad connected"] NoConnection = 0, #[doc = "1: PB0 connected"] Pb0 = 1, #[doc = "2: PB1 connected"] Pb1 = 2, #[doc = "3: PB0 and PB1 connected"] Both = 3, } impl From<SEL_VREF_OUT_A> for u8 { #[inline(always)] fn from(variant: SEL_VREF_OUT_A) -> Self { variant as _ } } impl crate::FieldSpec for SEL_VREF_OUT_A { type Ux = u8; } impl SEL_VREF_OUT_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> SEL_VREF_OUT_A { match self.bits { 0 => SEL_VREF_OUT_A::NoConnection, 1 => SEL_VREF_OUT_A::Pb0, 2 => SEL_VREF_OUT_A::Pb1, 3 => SEL_VREF_OUT_A::Both, _ => unreachable!(), } } #[doc = "no pad connected"] #[inline(always)] pub fn is_no_connection(&self) -> bool { *self == SEL_VREF_OUT_A::NoConnection } #[doc = "PB0 connected"] #[inline(always)] pub fn is_pb0(&self) -> bool { *self == SEL_VREF_OUT_A::Pb0 } #[doc = "PB1 connected"] #[inline(always)] pub fn is_pb1(&self) -> bool { *self == SEL_VREF_OUT_A::Pb1 } #[doc = "PB0 and PB1 connected"] #[inline(always)] pub fn is_both(&self) -> bool { *self == SEL_VREF_OUT_A::Both } } #[doc = "Field `SEL_VREF_OUT` writer - BGAP_ADC connection bit"] pub type SEL_VREF_OUT_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, SEL_VREF_OUT_A>; impl<'a, REG, const O: u8> SEL_VREF_OUT_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "no pad connected"] #[inline(always)] pub fn no_connection(self) -> &'a mut crate::W<REG> { self.variant(SEL_VREF_OUT_A::NoConnection) } #[doc = "PB0 connected"] #[inline(always)] pub fn pb0(self) -> &'a mut crate::W<REG> { self.variant(SEL_VREF_OUT_A::Pb0) } #[doc = "PB1 connected"] #[inline(always)] pub fn pb1(self) -> &'a mut crate::W<REG> { self.variant(SEL_VREF_OUT_A::Pb1) } #[doc = "PB0 and PB1 connected"] #[inline(always)] pub fn both(self) -> &'a mut crate::W<REG> { self.variant(SEL_VREF_OUT_A::Both) } } #[doc = "Field `ENBUF_VREFINT_ADC` reader - VREFINT reference for ADC enable bit"] pub type ENBUF_VREFINT_ADC_R = crate::BitReader<ENBUF_VREFINT_ADC_A>; #[doc = "VREFINT reference for ADC enable bit\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ENBUF_VREFINT_ADC_A { #[doc = "0: Disables the buffer used to generate VREFINT reference for the ADC"] Disabled = 0, #[doc = "1: Enables the buffer used to generate VREFINT reference for the ADC"] Enabled = 1, } impl From<ENBUF_VREFINT_ADC_A> for bool { #[inline(always)] fn from(variant: ENBUF_VREFINT_ADC_A) -> Self { variant as u8 != 0 } } impl ENBUF_VREFINT_ADC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENBUF_VREFINT_ADC_A { match self.bits { false => ENBUF_VREFINT_ADC_A::Disabled, true => ENBUF_VREFINT_ADC_A::Enabled, } } #[doc = "Disables the buffer used to generate VREFINT reference for the ADC"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == ENBUF_VREFINT_ADC_A::Disabled } #[doc = "Enables the buffer used to generate VREFINT reference for the ADC"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == ENBUF_VREFINT_ADC_A::Enabled } } #[doc = "Field `ENBUF_VREFINT_ADC` writer - VREFINT reference for ADC enable bit"] pub type ENBUF_VREFINT_ADC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ENBUF_VREFINT_ADC_A>; impl<'a, REG, const O: u8> ENBUF_VREFINT_ADC_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Disables the buffer used to generate VREFINT reference for the ADC"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(ENBUF_VREFINT_ADC_A::Disabled) } #[doc = "Enables the buffer used to generate VREFINT reference for the ADC"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(ENBUF_VREFINT_ADC_A::Enabled) } } #[doc = "Field `ENBUF_SENSOR_ADC` reader - Sensor reference for ADC enable bit"] pub type ENBUF_SENSOR_ADC_R = crate::BitReader<ENBUF_SENSOR_ADC_A>; #[doc = "Sensor reference for ADC enable bit\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ENBUF_SENSOR_ADC_A { #[doc = "0: Disables the buffer used to generate VREFINT reference for the temperature sensor"] Disabled = 0, #[doc = "1: Enables the buffer used to generate VREFINT reference for the temperature sensor"] Enabled = 1, } impl From<ENBUF_SENSOR_ADC_A> for bool { #[inline(always)] fn from(variant: ENBUF_SENSOR_ADC_A) -> Self { variant as u8 != 0 } } impl ENBUF_SENSOR_ADC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENBUF_SENSOR_ADC_A { match self.bits { false => ENBUF_SENSOR_ADC_A::Disabled, true => ENBUF_SENSOR_ADC_A::Enabled, } } #[doc = "Disables the buffer used to generate VREFINT reference for the temperature sensor"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == ENBUF_SENSOR_ADC_A::Disabled } #[doc = "Enables the buffer used to generate VREFINT reference for the temperature sensor"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == ENBUF_SENSOR_ADC_A::Enabled } } #[doc = "Field `ENBUF_SENSOR_ADC` writer - Sensor reference for ADC enable bit"] pub type ENBUF_SENSOR_ADC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ENBUF_SENSOR_ADC_A>; impl<'a, REG, const O: u8> ENBUF_SENSOR_ADC_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Disables the buffer used to generate VREFINT reference for the temperature sensor"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(ENBUF_SENSOR_ADC_A::Disabled) } #[doc = "Enables the buffer used to generate VREFINT reference for the temperature sensor"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(ENBUF_SENSOR_ADC_A::Enabled) } } #[doc = "Field `ENBUF_VREFINT_COMP2` reader - VREFINT reference for COMP2 scaler enable bit"] pub type ENBUF_VREFINT_COMP2_R = crate::BitReader<ENBUF_VREFINT_COMP2_A>; #[doc = "VREFINT reference for COMP2 scaler enable bit\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ENBUF_VREFINT_COMP2_A { #[doc = "0: Disables the buffer used to generate VREFINT references for COMP2"] Disabled = 0, #[doc = "1: Enables the buffer used to generate VREFINT references for COMP2"] Enabled = 1, } impl From<ENBUF_VREFINT_COMP2_A> for bool { #[inline(always)] fn from(variant: ENBUF_VREFINT_COMP2_A) -> Self { variant as u8 != 0 } } impl ENBUF_VREFINT_COMP2_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENBUF_VREFINT_COMP2_A { match self.bits { false => ENBUF_VREFINT_COMP2_A::Disabled, true => ENBUF_VREFINT_COMP2_A::Enabled, } } #[doc = "Disables the buffer used to generate VREFINT references for COMP2"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == ENBUF_VREFINT_COMP2_A::Disabled } #[doc = "Enables the buffer used to generate VREFINT references for COMP2"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == ENBUF_VREFINT_COMP2_A::Enabled } } #[doc = "Field `ENBUF_VREFINT_COMP2` writer - VREFINT reference for COMP2 scaler enable bit"] pub type ENBUF_VREFINT_COMP2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ENBUF_VREFINT_COMP2_A>; impl<'a, REG, const O: u8> ENBUF_VREFINT_COMP2_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Disables the buffer used to generate VREFINT references for COMP2"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(ENBUF_VREFINT_COMP2_A::Disabled) } #[doc = "Enables the buffer used to generate VREFINT references for COMP2"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(ENBUF_VREFINT_COMP2_A::Enabled) } } #[doc = "Field `ENREF_HSI48` reader - VREFINT reference for HSI48 oscillator enable bit"] pub type ENREF_HSI48_R = crate::BitReader; #[doc = "Field `ENREF_HSI48` writer - VREFINT reference for HSI48 oscillator enable bit"] pub type ENREF_HSI48_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `VREFINT_RDYF` reader - VREFINT ready flag"] pub type VREFINT_RDYF_R = crate::BitReader<VREFINT_RDYF_A>; #[doc = "VREFINT ready flag\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum VREFINT_RDYF_A { #[doc = "0: VREFINT OFF"] NotReady = 0, #[doc = "1: VREFINT ready"] Ready = 1, } impl From<VREFINT_RDYF_A> for bool { #[inline(always)] fn from(variant: VREFINT_RDYF_A) -> Self { variant as u8 != 0 } } impl VREFINT_RDYF_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> VREFINT_RDYF_A { match self.bits { false => VREFINT_RDYF_A::NotReady, true => VREFINT_RDYF_A::Ready, } } #[doc = "VREFINT OFF"] #[inline(always)] pub fn is_not_ready(&self) -> bool { *self == VREFINT_RDYF_A::NotReady } #[doc = "VREFINT ready"] #[inline(always)] pub fn is_ready(&self) -> bool { *self == VREFINT_RDYF_A::Ready } } #[doc = "Field `REF_LOCK` reader - SYSCFG_CFGR3 lock bit"] pub type REF_LOCK_R = crate::BitReader<REF_LOCK_A>; #[doc = "SYSCFG_CFGR3 lock bit\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum REF_LOCK_A { #[doc = "0: SYSCFG_CFGR3\\[31:0\\] bits are read/write"] ReadWrite = 0, #[doc = "1: SYSCFG_CFGR3\\[31:0\\] bits are read-only"] ReadOnly = 1, } impl From<REF_LOCK_A> for bool { #[inline(always)] fn from(variant: REF_LOCK_A) -> Self { variant as u8 != 0 } } impl REF_LOCK_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> REF_LOCK_A { match self.bits { false => REF_LOCK_A::ReadWrite, true => REF_LOCK_A::ReadOnly, } } #[doc = "SYSCFG_CFGR3\\[31:0\\] bits are read/write"] #[inline(always)] pub fn is_read_write(&self) -> bool { *self == REF_LOCK_A::ReadWrite } #[doc = "SYSCFG_CFGR3\\[31:0\\] bits are read-only"] #[inline(always)] pub fn is_read_only(&self) -> bool { *self == REF_LOCK_A::ReadOnly } } #[doc = "Field `REF_LOCK` writer - SYSCFG_CFGR3 lock bit"] pub type REF_LOCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, REF_LOCK_A>; impl<'a, REG, const O: u8> REF_LOCK_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "SYSCFG_CFGR3\\[31:0\\] bits are read/write"] #[inline(always)] pub fn read_write(self) -> &'a mut crate::W<REG> { self.variant(REF_LOCK_A::ReadWrite) } #[doc = "SYSCFG_CFGR3\\[31:0\\] bits are read-only"] #[inline(always)] pub fn read_only(self) -> &'a mut crate::W<REG> { self.variant(REF_LOCK_A::ReadOnly) } } impl R { #[doc = "Bit 0 - VREFINT enable and scaler control for COMP2 enable bit"] #[inline(always)] pub fn en_vrefint(&self) -> EN_VREFINT_R { EN_VREFINT_R::new((self.bits & 1) != 0) } #[doc = "Bits 4:5 - BGAP_ADC connection bit"] #[inline(always)] pub fn sel_vref_out(&self) -> SEL_VREF_OUT_R { SEL_VREF_OUT_R::new(((self.bits >> 4) & 3) as u8) } #[doc = "Bit 8 - VREFINT reference for ADC enable bit"] #[inline(always)] pub fn enbuf_vrefint_adc(&self) -> ENBUF_VREFINT_ADC_R { ENBUF_VREFINT_ADC_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - Sensor reference for ADC enable bit"] #[inline(always)] pub fn enbuf_sensor_adc(&self) -> ENBUF_SENSOR_ADC_R { ENBUF_SENSOR_ADC_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 12 - VREFINT reference for COMP2 scaler enable bit"] #[inline(always)] pub fn enbuf_vrefint_comp2(&self) -> ENBUF_VREFINT_COMP2_R { ENBUF_VREFINT_COMP2_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - VREFINT reference for HSI48 oscillator enable bit"] #[inline(always)] pub fn enref_hsi48(&self) -> ENREF_HSI48_R { ENREF_HSI48_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 30 - VREFINT ready flag"] #[inline(always)] pub fn vrefint_rdyf(&self) -> VREFINT_RDYF_R { VREFINT_RDYF_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - SYSCFG_CFGR3 lock bit"] #[inline(always)] pub fn ref_lock(&self) -> REF_LOCK_R { REF_LOCK_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - VREFINT enable and scaler control for COMP2 enable bit"] #[inline(always)] #[must_use] pub fn en_vrefint(&mut self) -> EN_VREFINT_W<CFGR3_SPEC, 0> { EN_VREFINT_W::new(self) } #[doc = "Bits 4:5 - BGAP_ADC connection bit"] #[inline(always)] #[must_use] pub fn sel_vref_out(&mut self) -> SEL_VREF_OUT_W<CFGR3_SPEC, 4> { SEL_VREF_OUT_W::new(self) } #[doc = "Bit 8 - VREFINT reference for ADC enable bit"] #[inline(always)] #[must_use] pub fn enbuf_vrefint_adc(&mut self) -> ENBUF_VREFINT_ADC_W<CFGR3_SPEC, 8> { ENBUF_VREFINT_ADC_W::new(self) } #[doc = "Bit 9 - Sensor reference for ADC enable bit"] #[inline(always)] #[must_use] pub fn enbuf_sensor_adc(&mut self) -> ENBUF_SENSOR_ADC_W<CFGR3_SPEC, 9> { ENBUF_SENSOR_ADC_W::new(self) } #[doc = "Bit 12 - VREFINT reference for COMP2 scaler enable bit"] #[inline(always)] #[must_use] pub fn enbuf_vrefint_comp2(&mut self) -> ENBUF_VREFINT_COMP2_W<CFGR3_SPEC, 12> { ENBUF_VREFINT_COMP2_W::new(self) } #[doc = "Bit 13 - VREFINT reference for HSI48 oscillator enable bit"] #[inline(always)] #[must_use] pub fn enref_hsi48(&mut self) -> ENREF_HSI48_W<CFGR3_SPEC, 13> { ENREF_HSI48_W::new(self) } #[doc = "Bit 31 - SYSCFG_CFGR3 lock bit"] #[inline(always)] #[must_use] pub fn ref_lock(&mut self) -> REF_LOCK_W<CFGR3_SPEC, 31> { REF_LOCK_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "SYSCFG configuration register 3\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr3::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr3::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CFGR3_SPEC; impl crate::RegisterSpec for CFGR3_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`cfgr3::R`](R) reader structure"] impl crate::Readable for CFGR3_SPEC {} #[doc = "`write(|w| ..)` method takes [`cfgr3::W`](W) writer structure"] impl crate::Writable for CFGR3_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CFGR3 to value 0"] impl crate::Resettable for CFGR3_SPEC { const RESET_VALUE: Self::Ux = 0; }
use hacspec_lib::prelude::*; mod test_util; use test_util::*; macro_rules! test_unsigned_public_macro { ($t:ty) => { assert_eq!(<$t>::max_val(), <$t>::max_value() as $t); assert_eq!((3 as $t).exp(5), 243); // assert_eq!((3 as $t).pow_self(5), 243); // ... }; } #[test] fn test_unsigned_public() { test_unsigned_public_macro!(u8); test_unsigned_public_macro!(u16); test_unsigned_public_macro!(u32); test_unsigned_public_macro!(u64); test_unsigned_public_macro!(u128); } macro_rules! test_signed_public_macro { ($t:ty) => { assert_eq!(<$t>::max_val(), <$t>::max_value()); let (a, a_t, b, b_t) = get_random_numbers::<$t>(); // multiplication operator might panic on overflow let res = std::panic::catch_unwind(|| a_t * b_t); match res { Ok(r) => { let expected = get_expected("*", &a, &b); let res_s = format!("0x{:x}", r); assert_eq!(res_s, expected); } Err(_) => (), } assert_eq!((6 as $t).pow_mod(8, 127), 41); // assert_eq!((2 as $t).pow_self(5), 32); // ... }; } #[test] fn test_signed_public() { test_signed_public_macro!(i8); test_signed_public_macro!(i16); test_signed_public_macro!(i32); test_signed_public_macro!(i64); test_signed_public_macro!(i128); } macro_rules! test_secret_macro { ($t:ty,$true_val:expr) => { assert!(<$t>::max_val().equal(<$t>::max_value())); let (a, a_t, b, b_t) = get_random_numbers::<$t>(); // mod if !b_t.equal(<$t>::ZERO()) { let r = a_t.modulo(b_t); let expected = get_expected("%", &a, &b); assert_eq!(format!("0x{:x}", r), expected); } // Comparison functions returning bool. assert_eq!(a_t.equal(b_t), a == b); let expected_gt = if get_expected(">", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.greater_than(b_t), expected_gt); let expected_gte = if get_expected(">=", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.greater_than_or_equal(b_t), expected_gte); let expected_lt = if get_expected("<", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.less_than(b_t), expected_lt); let expected_lte = if get_expected("<=", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.less_than_or_equal(b_t), expected_lte); // Comparison functions returning a bit mask (0x0..0 or 0xF..F). let expected = if a == b { $true_val } else { 0 }; assert_eq!(a_t.equal_bm(b_t).declassify(), expected); let expected = if expected_gt { $true_val } else { 0 }; assert_eq!(a_t.greater_than_bm(b_t).declassify(), expected); let expected = if expected_gte { $true_val } else { 0 }; assert_eq!(a_t.greater_than_or_equal_bm(b_t).declassify(), expected); let expected = if expected_lt { $true_val } else { 0 }; assert_eq!(a_t.less_than_bm(b_t).declassify(), expected); let expected = if expected_lte { $true_val } else { 0 }; assert_eq!(a_t.less_than_or_equal_bm(b_t).declassify(), expected); }; } #[test] fn test_unsigned_secret() { test_secret_macro!(U8, U8::max_value().declassify()); test_secret_macro!(U16, U16::max_value().declassify()); test_secret_macro!(U32, U32::max_value().declassify()); test_secret_macro!(U64, U64::max_value().declassify()); test_secret_macro!(U128, U128::max_value().declassify()); } #[test] fn test_signed_secret() { test_secret_macro!(I8, -1); test_secret_macro!(I16, -1); test_secret_macro!(I32, -1); test_secret_macro!(I64, -1); test_secret_macro!(I128, -1); } #[test] fn test_secret_testing() { let (a, a_t, b, b_t) = get_random_numbers::<I8>(); // mod if !b_t.equal(<I8>::ZERO()) { let r = a_t.modulo(b_t); let expected = get_expected("%", &a, &b); assert_eq!(format!("0x{:x}", r), expected); } // Comparison functions returning bool. assert_eq!(a_t.equal(b_t), a == b); let expected_gt = if get_expected(">", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.greater_than(b_t), expected_gt); let expected_gte = if get_expected(">=", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.greater_than_or_equal(b_t), expected_gte); let expected_lt = if get_expected("<", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.less_than(b_t), expected_lt); let expected_lte = if get_expected("<=", &a, &b) == "0x0" { false } else { true }; assert_eq!(a_t.less_than_or_equal(b_t), expected_lte); // Comparison functions returning a bit mask (0x0..0 or 0xF..F). let expected = if a == b { -1 } else { 0 }; assert_eq!(a_t.equal_bm(b_t).declassify(), expected); let expected = if expected_gt { -1 } else { 0 }; assert_eq!(a_t.greater_than_bm(b_t).declassify(), expected); let expected = if expected_gte { -1 } else { 0 }; assert_eq!(a_t.greater_than_or_equal_bm(b_t).declassify(), expected); let expected = if expected_lt { -1 } else { 0 }; assert_eq!(a_t.less_than_bm(b_t).declassify(), expected); let expected = if expected_lte { -1 } else { 0 }; assert_eq!(a_t.less_than_or_equal_bm(b_t).declassify(), expected); }
use std::mem; use stdweb::Value; use shared::Note; use misc::{SerialNumber}; use data::{State, DragType}; use draw::{PIXELS_PER_SEMITONE, PIXELS_PER_TIME}; pub struct NoteDrawingInfo <'a> { pub drag_type: Option <DragType>, pub state: & 'a State, } pub struct EditedNote { pub note: Note, pub serial_number: SerialNumber, //pub selected: bool, element: Value, } impl EditedNote { fn new_element() -> Value { let result: Value = js!{ return ($("<div>", {class: "note", "data-handletype": "note"}).appendTo ($("#notes"))); }; result } pub fn new (note: Note)->EditedNote { EditedNote { note, serial_number: Default::default(), element: Self::new_element() } } pub fn new_stealing (note: Note, steal_from: &mut EditedNote)->EditedNote { let element = mem::replace(&mut steal_from.element, Self::new_element()); EditedNote { note, serial_number: Default::default(), element } } pub fn update_element(&self, info: & NoteDrawingInfo) { let mut exact_pitch = self.note.pitch as f64; let mut rounded_pitch = exact_pitch; let mut exact_start = self.note.start_time; let mut rounded_start = exact_start; let mut transition = "all 0.2s ease-out"; let selected = info.state.selected.contains (& self.serial_number); let mut selecting = false; if let Some(drag_type) = &info.drag_type {match drag_type { DragType::MoveNotes {notes, exact_movement, rounded_movement, copying} => { if notes.contains (& self.serial_number) { exact_pitch += exact_movement [1]; rounded_pitch += rounded_movement [1]; exact_start += exact_movement [0]; rounded_start += rounded_movement [0]; transition = "none"; } }, DragType::DragSelect {notes,..} => { if notes.contains (& self.serial_number) { selecting = true; } }, _=>(), }} rounded_pitch = rounded_pitch.round(); let left = info.state.time_to_client (exact_start); let top = info.state.pitch_to_client (exact_pitch as f64 + 0.5); let width = self.note.duration * PIXELS_PER_TIME; let height = PIXELS_PER_SEMITONE; let color; let box_shadow; if exact_pitch == rounded_pitch && exact_start == rounded_start { color = "black"; box_shadow = "none".to_string(); } else { color = "rgba(0,0,0,0.5)"; box_shadow = format! ("{}px {}px {}px {}", info.state.time_to_client (rounded_start) - info.state.time_to_client (exact_start), info.state.pitch_to_client (rounded_pitch) - info.state.pitch_to_client (exact_pitch), PIXELS_PER_SEMITONE/4.0, color, ) ; } js!{ let element =@{& self.element}; element .width (@{width}) .height(@{height}) .attr("data-noteid", @{self.serial_number.0 as u32}) .css({ left:@{left}, top:@{top}, "background-color": @{color}, "box-shadow": @{box_shadow}, transition:@{transition}, }); if (@{selected}) {element.addClass ("selected");} else {element.removeClass ("selected");} if (@{selecting}) {element.addClass ("selecting");} else {element.removeClass ("selecting");} } } } impl Drop for EditedNote { fn drop(&mut self) { js!{@{&self.element}.remove();} } }
// https://projecteuler.net/problem=22 /* Using names.txt (right click and 'Save Link/Target As...'), a 46K text file containing over five-thousand first names, begin by sorting it into alphabetical order. Then working out the alphabetical value for each name, multiply this value by its alphabetical position in the list to obtain a name score. For example, when the list is sorted into alphabetical order, COLIN, which is worth 3 + 15 + 12 + 9 + 14 = 53, is the 938th name in the list. So, COLIN would obtain a score of 938 × 53 = 49714. What is the total of all the name scores in the file? */ use std::io::prelude::*; // needed for .bytes() //use std::vec::Vec; //use std::io::BufRead; fn main() { let start_time = std::time::Instant::now(); let sol = solve(); let elapsed = start_time.elapsed().as_micros(); println!("\nSolution: {}", sol); println!("Elasped time: {} us", elapsed); } fn solve() -> u64 { let args: std::vec::Vec<String> = std::env::args().collect(); if args.len() != 2 { panic!("needs a single argument"); } let path = std::path::Path::new(&args[1]); // Open the path in read-only mode, returns `io::Result<File>` let infile = match std::fs::File::open(path) { Err(why) => panic!("couldn't open {}: {}", path.display(), why), Ok(infile) => infile, }; let mut v = std::vec::Vec::<String>::with_capacity(46*1024/4); let mut s = String::new(); let reader = std::io::BufReader::new(infile); for byte in reader.bytes() { let c = byte.unwrap() as char; if c == ',' { // do nothing for this } else if c == '"' { if !s.is_empty() { v.push(s); s = String::new(); } } else { s.push(c); } } v.sort(); let mut rv = 0_u64; for (i,s) in v.iter().enumerate() { let mut val = 0_u64; for c in s.bytes() { val += (c - b'@') as u64; } val *= 1 + i as u64; //if i == 937 { // println!("{} {}",s,val); //} rv += val; } return rv; }
// use std::fs::File; // use std::io::ErrorKind; // fn main() { // let f = File::open("hello.txt"); // let f = match f{ // Ok(file) => file, // Err(ref error) if error.kind() == ErrorKind::NotFound => { // match File::create("hello.txt"){ // Ok(fc) => fc, // Err(e) => { // panic!( // "Tried to create file but there was a problem:{:?}", // e // ) // }, // } // }, // Err(error) => { // panic!( // "There was a problem opening the file: {:?}", // error // ) // }, // }; // } // use std::fs::File; // fn main(){ // let f = File::open("hello.txt").unwrap(); // } // use std::fs::File; // fn main() { // let f = File::open("hello.txt").expect("Failed to open hello.txt"); // } // use std::io; // use std::io::Read; // use std::fs::File; // fn main(){ // read_username_from_file(); // } // fn read_username_from_file() -> Result<String,io::Error>{ // let f = File::open("hello.txt"); // let mut f = match f{ // Ok(file) => file, // Err(e) => return Err(e), // }; // let mut s = String::new(); // // 将文件中的内容读取到s中 // match f.read_to_string(&mut s){ // Ok(_) => Ok(s), // Err(e) => Err(e), // } // } // use std::io; // use std::io::Read; // use std::fs::File; // fn main(){ // } // fn read_username_from_file() -> Result<String,io::Error>{ // let mut f = File::open("Hello.txt")?; // let mut s = String::new(); // f.read_to_string(&mut s)?; // Ok(s) // } // 进一步缩短代码 // fn read_username_from_file() -> Result<String,io::Error>{ // let mut s = String::new(); // File::open("hello.txt")?.read_to_string(&mut s)?; // Ok(s) // } // fn main() { // loop{ // let guess: i32 = match guess.trim().parse(){ // Ok(num) => num, // Err(_) => continue, // }; // if guess < 1 || guess > 100{ // println!( // "The secret number will be between 1 and 100." // ); // continue; // } // match guess.cmp(&secret_number){} // } // } pub struct Guess{ value:u32, } impl Guess{ pub fn new(value:u32) -> guess{ if value < 1 || value > 100{ panic!("Guess value must be between 1 and 100,got {}",value); } Guess { value } } // getter pub fn value(&self) -> u32{ self.value } }
pub mod status { use macros::Packet; #[derive(Packet)] #[packet(0x00, crate::STATUS_STATE, false)] pub struct Response { pub json: String } #[derive(Packet)] #[packet(0x01, crate::STATUS_STATE, false)] pub struct Pong { pub payload: i64 } } pub mod login { use macros::Packet; use utils::sendable::{Vari32, InferLenVec}; use utils::indexed_vec::IndexedVec; #[derive(Packet)] #[packet(0x00, crate::LOGIN_STATE, false)] pub struct Disconnect { pub reason: String } #[derive(Packet)] #[packet(0x01, crate::LOGIN_STATE, false)] pub struct EncryptionRequest { pub server_id: String, pub public_key: IndexedVec<u8>, pub verify_token: IndexedVec<u8>, } #[derive(Packet)] #[packet(0x02, crate::LOGIN_STATE, false)] pub struct LoginSuccess { pub uuid: u128, pub username: String } #[derive(Packet)] #[packet(0x03, crate::LOGIN_STATE, false)] pub struct SetCompression { pub threshold: Vari32, } #[derive(Packet)] #[packet(0x04, crate::LOGIN_STATE, false)] pub struct LoginPluginRequest { pub message_id: Vari32, pub channel: String, pub data: InferLenVec } } pub mod play { use macros::Packet; use utils::sendable::{Vari32, InferLenVec}; #[derive(Packet)] #[packet(0x17, crate::PLAY_STATE, false)] pub struct PluginMessage { pub channel: String, pub data: InferLenVec } #[derive(Packet)] #[packet(0x27, crate::PLAY_STATE, false)] pub struct EntityPositionPacket { pub entity_id: Vari32, pub delta_x: i16, pub delta_y: i16, pub delta_z: i16, pub on_ground: bool } }
use actix_web::{web, HttpRequest, HttpResponse, Responder}; use publishing::application::collection::{ AddPublication, Create, CreateCommand, Delete, GetAll, GetById, RemovePublication, Update, UpdateCommand, }; use crate::authorization::auth; use crate::container::Container; use crate::error::PublicError; // POST /collections async fn create( req: HttpRequest, cmd: web::Json<CreateCommand>, c: web::Data<Container>, ) -> impl Responder { let user_id = auth(&req, &c).await?; Create::new( c.publishing.event_pub(), c.publishing.author_repo(), c.publishing.category_repo(), c.publishing.collection_repo(), ) .exec(user_id, cmd.into_inner()) .await .map(|res| HttpResponse::Ok().json(res)) .map_err(PublicError::from) } // GET /collections async fn get_all(req: HttpRequest, c: web::Data<Container>) -> impl Responder { let _user_id = auth(&req, &c).await?; GetAll::new( c.publishing.author_repo(), c.publishing.category_repo(), c.publishing.collection_repo(), c.publishing.publication_repo(), ) .exec() .await .map(|res| HttpResponse::Ok().json(res)) .map_err(PublicError::from) } // GET /collections/:id async fn get_by_id( req: HttpRequest, path: web::Path<String>, c: web::Data<Container>, ) -> impl Responder { let _user_id = auth(&req, &c).await?; GetById::new( c.publishing.author_repo(), c.publishing.category_repo(), c.publishing.collection_repo(), c.publishing.publication_repo(), ) .exec(path.into_inner()) .await .map(|res| HttpResponse::Ok().json(res)) .map_err(PublicError::from) } // PUT /collections/:id async fn update( req: HttpRequest, path: web::Path<String>, cmd: web::Json<UpdateCommand>, c: web::Data<Container>, ) -> impl Responder { let user_id = auth(&req, &c).await?; Update::new( c.publishing.event_pub(), c.publishing.category_repo(), c.publishing.collection_repo(), ) .exec(user_id, path.into_inner(), cmd.into_inner()) .await .map(|res| HttpResponse::Ok().json(res)) .map_err(PublicError::from) } // DELETE /collections/:id async fn delete( req: HttpRequest, path: web::Path<String>, c: web::Data<Container>, ) -> impl Responder { let user_id = auth(&req, &c).await?; Delete::new(c.publishing.event_pub(), c.publishing.collection_repo()) .exec(user_id, path.into_inner()) .await .map(|res| HttpResponse::Ok().json(res)) .map_err(PublicError::from) } // POST /collections/:id/publication/:publication_id async fn add_publication( req: HttpRequest, path: web::Path<(String, String)>, c: web::Data<Container>, ) -> impl Responder { let _user_id = auth(&req, &c).await?; let path = path.into_inner(); AddPublication::new( c.publishing.event_pub(), c.publishing.collection_repo(), c.publishing.publication_repo(), ) .exec(path.0, path.1) .await .map(|res| HttpResponse::Ok().json(res)) .map_err(PublicError::from) } // DELETE /collections/:id/publication/:publication_id async fn remove_publication( req: HttpRequest, path: web::Path<(String, String)>, c: web::Data<Container>, ) -> impl Responder { let _user_id = auth(&req, &c).await?; let path = path.into_inner(); RemovePublication::new(c.publishing.event_pub(), c.publishing.collection_repo()) .exec(path.0, path.1) .await .map(|res| HttpResponse::Ok().json(res)) .map_err(PublicError::from) } pub fn routes(cfg: &mut web::ServiceConfig) { cfg.service( web::scope("/collections") .route("", web::post().to(create)) .route("", web::get().to(get_all)) .route("/{collection_id}", web::get().to(get_by_id)) .route("/{collection_id}", web::put().to(update)) .route("/{collection_id}", web::delete().to(delete)) .route( "/{collection_id}/publication/{publication_id}", web::post().to(add_publication), ) .route( "/{collection_id}/publication/{publication_id}", web::delete().to(remove_publication), ), ); }
use std::fmt; /// Returns the relevant network error if a program fails. pub type NetResult<T> = std::result::Result<T, NetError>; /// An enum representing the main network errors. #[derive(Debug, Copy, Clone)] pub enum NetError { OperationFailed, ConnectFailed, ConnectTimeout, ChannelStopped, ChannelTimeout, ServiceStopped, } impl std::error::Error for NetError {} impl fmt::Display for NetError { fn fmt(&self, f: &mut fmt::Formatter) -> std::fmt::Result { match *self { NetError::OperationFailed => f.write_str("Operation failed"), NetError::ConnectFailed => f.write_str("Connection failed"), NetError::ConnectTimeout => f.write_str("Connection timed out"), NetError::ChannelStopped => f.write_str("Channel stopped"), NetError::ChannelTimeout => f.write_str("Channel timed out"), NetError::ServiceStopped => f.write_str("Service stopped"), } } }
#[doc = "Register `SECCFGR2` reader"] pub type R = crate::R<SECCFGR2_SPEC>; #[doc = "Register `SECCFGR2` writer"] pub type W = crate::W<SECCFGR2_SPEC>; #[doc = "Field `TIM8SEC` reader - TIM8SEC"] pub type TIM8SEC_R = crate::BitReader; #[doc = "Field `TIM8SEC` writer - TIM8SEC"] pub type TIM8SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `USART1SEC` reader - USART1SEC"] pub type USART1SEC_R = crate::BitReader; #[doc = "Field `USART1SEC` writer - USART1SEC"] pub type USART1SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TIM15SEC` reader - TIM15SEC"] pub type TIM15SEC_R = crate::BitReader; #[doc = "Field `TIM15SEC` writer - TIM15SEC"] pub type TIM15SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TIM16SEC` reader - TIM16SEC"] pub type TIM16SEC_R = crate::BitReader; #[doc = "Field `TIM16SEC` writer - TIM16SEC"] pub type TIM16SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TIM17SEC` reader - TIM17SEC"] pub type TIM17SEC_R = crate::BitReader; #[doc = "Field `TIM17SEC` writer - TIM17SEC"] pub type TIM17SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `SAI1SEC` reader - SAI1SEC"] pub type SAI1SEC_R = crate::BitReader; #[doc = "Field `SAI1SEC` writer - SAI1SEC"] pub type SAI1SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `SAI2SEC` reader - SAI2SEC"] pub type SAI2SEC_R = crate::BitReader; #[doc = "Field `SAI2SEC` writer - SAI2SEC"] pub type SAI2SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DFSDM1SEC` reader - DFSDM1SEC"] pub type DFSDM1SEC_R = crate::BitReader; #[doc = "Field `DFSDM1SEC` writer - DFSDM1SEC"] pub type DFSDM1SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CRCSEC` reader - CRCSEC"] pub type CRCSEC_R = crate::BitReader; #[doc = "Field `CRCSEC` writer - CRCSEC"] pub type CRCSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSCSEC` reader - TSCSEC"] pub type TSCSEC_R = crate::BitReader; #[doc = "Field `TSCSEC` writer - TSCSEC"] pub type TSCSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ICACHESEC` reader - ICACHESEC"] pub type ICACHESEC_R = crate::BitReader; #[doc = "Field `ICACHESEC` writer - ICACHESEC"] pub type ICACHESEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ADCSEC` reader - ADCSEC"] pub type ADCSEC_R = crate::BitReader; #[doc = "Field `ADCSEC` writer - ADCSEC"] pub type ADCSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `AESSEC` reader - AESSEC"] pub type AESSEC_R = crate::BitReader; #[doc = "Field `AESSEC` writer - AESSEC"] pub type AESSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `HASHSEC` reader - HASHSEC"] pub type HASHSEC_R = crate::BitReader; #[doc = "Field `HASHSEC` writer - HASHSEC"] pub type HASHSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RNGSEC` reader - RNGSEC"] pub type RNGSEC_R = crate::BitReader; #[doc = "Field `RNGSEC` writer - RNGSEC"] pub type RNGSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PKASEC` reader - PKASEC"] pub type PKASEC_R = crate::BitReader; #[doc = "Field `PKASEC` writer - PKASEC"] pub type PKASEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `SDMMC1SEC` reader - SDMMC1SEC"] pub type SDMMC1SEC_R = crate::BitReader; #[doc = "Field `SDMMC1SEC` writer - SDMMC1SEC"] pub type SDMMC1SEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FSMC_REGSEC` reader - FSMC_REGSEC"] pub type FSMC_REGSEC_R = crate::BitReader; #[doc = "Field `FSMC_REGSEC` writer - FSMC_REGSEC"] pub type FSMC_REGSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `OCTOSPI1_REGSEC` reader - OCTOSPI1_REGSEC"] pub type OCTOSPI1_REGSEC_R = crate::BitReader; #[doc = "Field `OCTOSPI1_REGSEC` writer - OCTOSPI1_REGSEC"] pub type OCTOSPI1_REGSEC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - TIM8SEC"] #[inline(always)] pub fn tim8sec(&self) -> TIM8SEC_R { TIM8SEC_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - USART1SEC"] #[inline(always)] pub fn usart1sec(&self) -> USART1SEC_R { USART1SEC_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - TIM15SEC"] #[inline(always)] pub fn tim15sec(&self) -> TIM15SEC_R { TIM15SEC_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - TIM16SEC"] #[inline(always)] pub fn tim16sec(&self) -> TIM16SEC_R { TIM16SEC_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - TIM17SEC"] #[inline(always)] pub fn tim17sec(&self) -> TIM17SEC_R { TIM17SEC_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - SAI1SEC"] #[inline(always)] pub fn sai1sec(&self) -> SAI1SEC_R { SAI1SEC_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - SAI2SEC"] #[inline(always)] pub fn sai2sec(&self) -> SAI2SEC_R { SAI2SEC_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - DFSDM1SEC"] #[inline(always)] pub fn dfsdm1sec(&self) -> DFSDM1SEC_R { DFSDM1SEC_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - CRCSEC"] #[inline(always)] pub fn crcsec(&self) -> CRCSEC_R { CRCSEC_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - TSCSEC"] #[inline(always)] pub fn tscsec(&self) -> TSCSEC_R { TSCSEC_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - ICACHESEC"] #[inline(always)] pub fn icachesec(&self) -> ICACHESEC_R { ICACHESEC_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - ADCSEC"] #[inline(always)] pub fn adcsec(&self) -> ADCSEC_R { ADCSEC_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - AESSEC"] #[inline(always)] pub fn aessec(&self) -> AESSEC_R { AESSEC_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - HASHSEC"] #[inline(always)] pub fn hashsec(&self) -> HASHSEC_R { HASHSEC_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - RNGSEC"] #[inline(always)] pub fn rngsec(&self) -> RNGSEC_R { RNGSEC_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - PKASEC"] #[inline(always)] pub fn pkasec(&self) -> PKASEC_R { PKASEC_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - SDMMC1SEC"] #[inline(always)] pub fn sdmmc1sec(&self) -> SDMMC1SEC_R { SDMMC1SEC_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - FSMC_REGSEC"] #[inline(always)] pub fn fsmc_regsec(&self) -> FSMC_REGSEC_R { FSMC_REGSEC_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - OCTOSPI1_REGSEC"] #[inline(always)] pub fn octospi1_regsec(&self) -> OCTOSPI1_REGSEC_R { OCTOSPI1_REGSEC_R::new(((self.bits >> 18) & 1) != 0) } } impl W { #[doc = "Bit 0 - TIM8SEC"] #[inline(always)] #[must_use] pub fn tim8sec(&mut self) -> TIM8SEC_W<SECCFGR2_SPEC, 0> { TIM8SEC_W::new(self) } #[doc = "Bit 1 - USART1SEC"] #[inline(always)] #[must_use] pub fn usart1sec(&mut self) -> USART1SEC_W<SECCFGR2_SPEC, 1> { USART1SEC_W::new(self) } #[doc = "Bit 2 - TIM15SEC"] #[inline(always)] #[must_use] pub fn tim15sec(&mut self) -> TIM15SEC_W<SECCFGR2_SPEC, 2> { TIM15SEC_W::new(self) } #[doc = "Bit 3 - TIM16SEC"] #[inline(always)] #[must_use] pub fn tim16sec(&mut self) -> TIM16SEC_W<SECCFGR2_SPEC, 3> { TIM16SEC_W::new(self) } #[doc = "Bit 4 - TIM17SEC"] #[inline(always)] #[must_use] pub fn tim17sec(&mut self) -> TIM17SEC_W<SECCFGR2_SPEC, 4> { TIM17SEC_W::new(self) } #[doc = "Bit 5 - SAI1SEC"] #[inline(always)] #[must_use] pub fn sai1sec(&mut self) -> SAI1SEC_W<SECCFGR2_SPEC, 5> { SAI1SEC_W::new(self) } #[doc = "Bit 6 - SAI2SEC"] #[inline(always)] #[must_use] pub fn sai2sec(&mut self) -> SAI2SEC_W<SECCFGR2_SPEC, 6> { SAI2SEC_W::new(self) } #[doc = "Bit 7 - DFSDM1SEC"] #[inline(always)] #[must_use] pub fn dfsdm1sec(&mut self) -> DFSDM1SEC_W<SECCFGR2_SPEC, 7> { DFSDM1SEC_W::new(self) } #[doc = "Bit 8 - CRCSEC"] #[inline(always)] #[must_use] pub fn crcsec(&mut self) -> CRCSEC_W<SECCFGR2_SPEC, 8> { CRCSEC_W::new(self) } #[doc = "Bit 9 - TSCSEC"] #[inline(always)] #[must_use] pub fn tscsec(&mut self) -> TSCSEC_W<SECCFGR2_SPEC, 9> { TSCSEC_W::new(self) } #[doc = "Bit 10 - ICACHESEC"] #[inline(always)] #[must_use] pub fn icachesec(&mut self) -> ICACHESEC_W<SECCFGR2_SPEC, 10> { ICACHESEC_W::new(self) } #[doc = "Bit 11 - ADCSEC"] #[inline(always)] #[must_use] pub fn adcsec(&mut self) -> ADCSEC_W<SECCFGR2_SPEC, 11> { ADCSEC_W::new(self) } #[doc = "Bit 12 - AESSEC"] #[inline(always)] #[must_use] pub fn aessec(&mut self) -> AESSEC_W<SECCFGR2_SPEC, 12> { AESSEC_W::new(self) } #[doc = "Bit 13 - HASHSEC"] #[inline(always)] #[must_use] pub fn hashsec(&mut self) -> HASHSEC_W<SECCFGR2_SPEC, 13> { HASHSEC_W::new(self) } #[doc = "Bit 14 - RNGSEC"] #[inline(always)] #[must_use] pub fn rngsec(&mut self) -> RNGSEC_W<SECCFGR2_SPEC, 14> { RNGSEC_W::new(self) } #[doc = "Bit 15 - PKASEC"] #[inline(always)] #[must_use] pub fn pkasec(&mut self) -> PKASEC_W<SECCFGR2_SPEC, 15> { PKASEC_W::new(self) } #[doc = "Bit 16 - SDMMC1SEC"] #[inline(always)] #[must_use] pub fn sdmmc1sec(&mut self) -> SDMMC1SEC_W<SECCFGR2_SPEC, 16> { SDMMC1SEC_W::new(self) } #[doc = "Bit 17 - FSMC_REGSEC"] #[inline(always)] #[must_use] pub fn fsmc_regsec(&mut self) -> FSMC_REGSEC_W<SECCFGR2_SPEC, 17> { FSMC_REGSEC_W::new(self) } #[doc = "Bit 18 - OCTOSPI1_REGSEC"] #[inline(always)] #[must_use] pub fn octospi1_regsec(&mut self) -> OCTOSPI1_REGSEC_W<SECCFGR2_SPEC, 18> { OCTOSPI1_REGSEC_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "TZSC secure configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`seccfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`seccfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SECCFGR2_SPEC; impl crate::RegisterSpec for SECCFGR2_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`seccfgr2::R`](R) reader structure"] impl crate::Readable for SECCFGR2_SPEC {} #[doc = "`write(|w| ..)` method takes [`seccfgr2::W`](W) writer structure"] impl crate::Writable for SECCFGR2_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets SECCFGR2 to value 0"] impl crate::Resettable for SECCFGR2_SPEC { const RESET_VALUE: Self::Ux = 0; }
pub mod delete_step_page; pub mod create_step_page; pub mod update_step_page; pub mod read_step_page; pub use crate::http::delete_step_page::delete_step_page; pub use crate::http::create_step_page::create_step_page; pub use crate::http::update_step_page::update_step_page; pub use crate::http::read_step_page::read_step_page;
use clap::{load_yaml, App}; use edit_distance::edit_distance; use itertools::Itertools; use std::{cmp::max, fmt, fs, io, path::Path}; fn main() -> io::Result<()> { let yaml = load_yaml!("../cli.yml"); let matches = App::from_yaml(yaml).get_matches(); let eol = matches.value_of_lossy("linesep").unwrap(); let eoc = matches.value_of_lossy("columnsep").unwrap(); let relative = matches.is_present("relative"); let trim_whitespaces = matches.is_present("trim-whitespaces"); let mut strings: Vec<(Option<String>, String)> = matches .values_of_lossy("string") .unwrap_or_default() .drain(..) .enumerate() .map(|(nr, s)| (Some(format!("<string{}>", nr)), s)) .collect(); if let Some(file) = matches.value_of_lossy("from-file") { let mut filenames = read_filenames_from_file(file.to_string())?; let additional_strings = filenames .drain(..) .map(|p| (Some(p.clone()), fs::read_to_string(&p))) .map(|(p, res)| (p, notify_err(res))) .filter_map(|(p, opt)| opt.map(|s| (p, s))); strings.extend(additional_strings) } if let Some(mut files) = matches.values_of_lossy("FILE") { let additional_strings = files .drain(..) .map(|p| (Some(p.clone()), fs::read_to_string(&p))) .map(|(p, res)| (p, notify_err(res))) .filter_map(|(p, opt)| opt.map(|s| (p, s))); strings.extend(additional_strings) } for tuple in strings.iter().combinations(2) { // Extract the combination let (name1, s1) = tuple[0]; let (name2, s2) = tuple[1]; // Unwrap names let name1 = name1.to_owned().unwrap_or("NONE".into()); let name2 = name2.to_owned().unwrap_or("NONE".into()); // Trim whitespaces, if enabled let s1 = if trim_whitespaces { s1.trim() } else { s1 }; let s2 = if trim_whitespaces { s2.trim() } else { s2 }; // Calculate the edit distance // Make it relative, if enabled let distance = if relative { let dist = edit_distance(&s1, &s2) as f32; let len = max(s1.len(), s2.len()) as f32; dist / len } else { edit_distance(&s1, &s2) as f32 }; print!("{4}{0}{2}{0}{3}{1}", eoc, eol, name1, name2, distance) } Ok(()) } fn read_filenames_from_file<P>(path: P) -> io::Result<Vec<String>> where P: AsRef<Path>, { let content = fs::read_to_string(path)?; let lines = content .lines() .filter(|line| !line.trim().is_empty()) .map(|line| line.trim().to_owned()); Ok(lines.collect()) } fn notify_err<T, E>(result: Result<T, E>) -> Option<T> where E: fmt::Debug, { match result { Ok(inner) => Some(inner), Err(e) => { eprintln!("Error: {:?}", e); None } } }
// All credit for this code goes to https://dev.to/rpalo/comment/ig0e use std::fs; use std::ops::Add; use std::iter::FromIterator; use std::collections::{HashMap, HashSet}; #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] struct Coordinate { x: isize, y: isize, } impl Coordinate { pub fn new(x: isize, y: isize) -> Self { Self {x, y} } } impl Add for Coordinate { type Output = Self; fn add(self, other: Self) -> Self { Self { x: self.x + other.x, y: self.y + other.y, } } } type Wire = Vec<Coordinate>; type Moves = Vec<Coordinate>; fn format_instructions(wire_instructions: &str) -> Moves { // Represents each x, y coordinate move-by-move let mut moves: Moves = Vec::new(); // For each wire, parse instructions and for task in wire_instructions.split(",") { // turn: U,D,L,R distance: int let (turn, distance_text) = task.split_at(1); let distance: usize = distance_text.parse().unwrap(); let coord = match turn { "U" => Coordinate::new(0, 1), "R" => Coordinate::new(1, 0), "D" => Coordinate::new(0, -1), "L" => Coordinate::new(-1, 0), _ => panic!("Direction {:?} not valid.", turn) }; for _ in 0..distance { //println!("{:?}", c); moves.push(coord); } } moves } /// Build a Wire out of relative Moves fn make_wire(moves: Moves) -> Wire { let mut current = Coordinate { x: 0, y: 0 }; let mut results: Wire = Vec::new(); for step in moves { current = step + current; results.push(current); } results } fn manhattan_distance(coord: &Coordinate) -> usize { (coord.x.abs() + coord.y.abs()) as usize } fn find_closest_cross(wire_1: &Wire, wire_2: &Wire) -> Coordinate { let wire_1: HashSet<&Coordinate> = HashSet::from_iter(wire_1.iter()); let wire_2: HashSet<&Coordinate> = HashSet::from_iter(wire_2.iter()); **wire_1.intersection(&wire_2).min_by_key(|c| manhattan_distance(c)).unwrap() } fn find_in_wire(wire: &Wire, target: &Coordinate) -> usize { wire.iter().position(|e| e == target).unwrap() + 1 } fn shortest_cross_distance(a: &Wire, b: &Wire) -> usize { let a_set: HashSet<&Coordinate> = HashSet::from_iter(a.iter()); let b_set: HashSet<&Coordinate> = HashSet::from_iter(b.iter()); a_set.intersection(&b_set).map(|c| find_in_wire(a, c) + find_in_wire(b, c)).min().unwrap() } fn main() { let text = fs::read_to_string("input.txt").unwrap(); let lines: Vec<&str> = text.split("\n").collect(); let mut wires = lines.into_iter().map(format_instructions).map(make_wire); let (wire1, wire2) = (wires.next().unwrap(), wires.next().unwrap()); let closest_cross = find_closest_cross(&wire1, &wire2); println!("Manhatten Distance of closest cross {:?}", manhattan_distance(&closest_cross)); println!("Fewest combined steps: {}", shortest_cross_distance(&wire1, &wire2)); }
use std::f32; use std::f32::consts::PI; use std::ops::Mul; use crate::vector::Vector3; use crate::camera::Camera; pub struct Matrix4 { pub cells: [[f32; 4]; 4], } macro_rules! matrix { [ $x:expr ] => (Matrix4 { cells: [[$x; 4]; 4] }); [ $a00:expr, $a01:expr, $a02:expr, $a03:expr; $a10:expr, $a11:expr, $a12:expr, $a13:expr; $a20:expr, $a21:expr, $a22:expr, $a23:expr; $a30:expr, $a31:expr, $a32:expr, $a33:expr ] => (Matrix4 { cells: [ [$a00, $a01, $a02, $a03], [$a10, $a11, $a12, $a13], [$a20, $a21, $a22, $a23], [$a30, $a31, $a32, $a33] ] }) } impl Matrix4 { pub fn zero() -> Matrix4 { matrix![0.] } pub fn identity() -> Matrix4 { matrix![ 1., 0., 0., 0.; 0., 1., 0., 0.; 0., 0., 1., 0.; 0., 0., 0., 1. ] } pub fn rot_x(t: f32) -> Matrix4 { matrix![ 1., 0., 0., 0.; 0., t.cos(), -t.sin(), 0.; 0., t.sin(), t.cos(), 0.; 0., 0., 0., 1. ] } pub fn rot_y(t: f32) -> Matrix4 { matrix![ t.cos(), -t.sin(), 0., 0.; t.sin(), t.cos(), 0., 0.; 0., 0., 1., 0.; 0., 0., 0., 1. ] } pub fn rot_z(t: f32) -> Matrix4 { matrix![ t.cos(), 0., t.sin(), 0.; 0., 1., 0., 0.; -t.sin(), 0., t.cos(), 0.; 0., 0., 0., 1. ] } pub fn rot(t_x: f32, t_y: f32, t_z: f32) -> Matrix4 { Matrix4::rot_x(t_x) * Matrix4::rot_y(t_y) * Matrix4::rot_z(t_z) } pub fn translate(v: Vector3) -> Matrix4 { matrix![ 1., 0., 0., v.x; 0., 1., 0., v.y; 0., 0., 1., v.z; 0., 0., 0., 1. ] } pub fn rot_and_translate(t_x:f32, t_y:f32, t_z:f32, v: Vector3) -> Matrix4 { let mut out = Matrix4::rot(t_x, t_y, t_z); out.cells[0][3] = v.x; out.cells[1][3] = v.y; out.cells[2][3] = v.z; out } pub fn project(cam: &Camera) -> Matrix4 { let f = cam.far; let n = cam.near; let s = 1. / f32::tan(cam.fov * PI / 360.); // After perspective divide (vector /z), the depth coodinate z // is remapped to (near => 0), (far => 1) matrix![ s, 0., 0., 0.; 0., s, 0., 0.; 0., 0., -(f+n)/(f-n), -2.*f*n/(f-n); 0., 0., -1., 0. ] } } impl Mul<Matrix4> for Matrix4 { type Output = Matrix4; fn mul(self, other: Matrix4) -> Matrix4 { let mut out = Matrix4::zero(); for i in 0..4 { for j in 0..4 { for k in 0..4 { out.cells[i][j] += self.cells[i][k] * other.cells[k][j]; } } } out } } impl Mul<Vector3> for &Matrix4{ type Output = (Vector3, f32); fn mul(self, v: Vector3) -> (Vector3, f32) { let w = v.x * self.cells[3][0] + v.y * self.cells[3][1] + v.z * self.cells[3][2] + self.cells[3][3]; let p = Vector3 { x: v.x * self.cells[0][0] + v.y * self.cells[0][1] + v.z * self.cells[0][2] + self.cells[0][3], y: v.x * self.cells[1][0] + v.y * self.cells[1][1] + v.z * self.cells[1][2] + self.cells[1][3], z: v.x * self.cells[2][0] + v.y * self.cells[2][1] + v.z * self.cells[2][2] + self.cells[2][3], }; (p, w) } }
use crate::delay::Delay; use crate::lowpass::OnePoleLPF; pub struct LPFCombFilter { delay: Delay, lowpass: OnePoleLPF, g: f64, } impl LPFCombFilter { pub fn new(delay_length: usize, g: f64, sample_rate: f64, cutoff: f64) -> LPFCombFilter { LPFCombFilter { delay: Delay::new(delay_length), lowpass: OnePoleLPF::new(sample_rate, cutoff), g: g, } } pub fn next(&mut self, s: f64) -> f64 { let delayed_sample = self.delay.read(); self.delay .write_and_advance((self.lowpass.next(delayed_sample) * self.g) + s); delayed_sample } } #[cfg(test)] mod tests { use super::*; use crate::test_util::*; #[test] fn test_comb_filter() { let noise = generate_noise(44100); save(&noise, "test_lpf_comb_filter_original.wav"); let mut lpfcf = LPFCombFilter::new(20, 0.5, 44100., 400.); let filtered: Vec<f64> = noise.into_iter().map(|s| lpfcf.next(s)).collect(); save(&filtered, "test_lpf_comb_filter_filtered.wav"); } }
/* * Copyright Stalwart Labs Ltd. See the COPYING * file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or * https://www.apache.org/licenses/LICENSE-2.0> or the MIT license * file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or * https://www.apache.org/licenses/LICENSE-2.0> or the MIT license * <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your * option. This file may not be copied, modified, or distributed * except according to those terms. */ #[cfg(feature = "websockets")] use futures_util::StreamExt; #[cfg(feature = "websockets")] use jmap_client::{client::Client, client_ws::WebSocketMessage, core::set::SetObject}; #[cfg(feature = "websockets")] use tokio::sync::mpsc; // Make sure the "websockets" feature is enabled! #[cfg(feature = "websockets")] async fn websocket() { // Connect to the JMAP server using Basic authentication let client = Client::new() .credentials(("john@example.org", "secret")) .connect("https://jmap.example.org") .await .unwrap(); // Connect to the WebSocket endpoint let mut ws_stream = client.connect_ws().await.unwrap(); // Read WS messages on a separate thread let (stream_tx, mut stream_rx) = mpsc::channel::<WebSocketMessage>(100); tokio::spawn(async move { while let Some(change) = ws_stream.next().await { stream_tx.send(change.unwrap()).await.unwrap(); } }); // Create a mailbox over WS let mut request = client.build(); let create_id = request .set_mailbox() .create() .name("WebSocket Test") .create_id() .unwrap(); let request_id = request.send_ws().await.unwrap(); // Read response from WS stream let mailbox_id = if let Some(WebSocketMessage::Response(mut response)) = stream_rx.recv().await { assert_eq!(request_id, response.request_id().unwrap()); response .pop_method_response() .unwrap() .unwrap_set_mailbox() .unwrap() .created(&create_id) .unwrap() .take_id() } else { unreachable!() }; // Enable push notifications over WS client .enable_push_ws(None::<Vec<_>>, None::<&str>) .await .unwrap(); // Make changes over standard HTTP and expect a push notification via WS client .mailbox_update_sort_order(&mailbox_id, 1) .await .unwrap(); if let Some(WebSocketMessage::StateChange(changes)) = stream_rx.recv().await { println!("Received changes: {:?}", changes); } else { unreachable!() } } fn main() { #[cfg(feature = "websockets")] let _c = websocket(); }
//! Types for the mutator to use to build data structures use std::cell::Cell; use std::mem::transmute; use std::ops::{Deref, DerefMut}; use std::ptr::{null, null_mut}; use std::raw::TraitObject; use std::sync::atomic::{AtomicPtr, Ordering}; use std::thread; use constants::{INC_BIT, JOURNAL_BUFFER_SIZE, NEW_BIT, TRAVERSE_BIT}; use gcthread::{JournalSender, EntrySender}; use heap::{Object, TraceStack}; use journal; use trace::Trace; /// Each thread gets it's own EntrySender thread_local!( static GC_JOURNAL: Cell<*const EntrySender> = Cell::new(null()) ); /// GcBox struct and traits: a boxed object that is GC managed pub struct GcBox<T: Trace> { value: T, } /// Root smart pointer, sends reference count changes to the journal. /// /// Whenever a reference to an object on the heap must be retained on the stack, this type must be /// used. It's use will ensure that the object will be seen as a root. pub struct GcRoot<T: Trace> { ptr: *mut GcBox<T>, } /// Non-atomic pointer type. This type is `!Sync` and thus is useful for presenting a Rust-ish /// API to a data structure where aliasing and mutability must follow the standard rules: there /// can be only one mutator. /// /// *Important note:* even though this type is `!Sync`, any data structures that are composed of /// `Gc` pointers must still be designed with the awareness that the GC thread will call `trace()` /// at any point and so, must still be thread safe! /// /// This is not a root pointer type. It should be used inside data structures to reference other /// GC-managed objects. pub struct Gc<T: Trace> { ptr: *mut GcBox<T>, } /// Atomic pointer type that points at a traceable object. This type is `Sync` and can be used to /// build concurrent data structures. /// /// This type should be used inside data structures to reference other GC-managed objects, but /// provides interior mutability and atomic methods. /// /// TODO: cas, swap etc for GcRoot and Gc pub struct GcAtomic<T: Trace> { ptr: AtomicPtr<GcBox<T>>, } /// An Application Thread, manages a thread-local reference to a tx channel /// /// TODO: a version of `spawn()` is required that can be called from an existing mutator thread. pub struct AppThread; impl AppThread { /// As thread::spawn but takes a journal Sender to initialize the thread_local instance with. pub fn spawn_from_gc<F, T>(tx: JournalSender, f: F) -> thread::JoinHandle<T> where F: FnOnce() -> T, F: Send + 'static, T: Send + 'static { thread::spawn(move || { let (jtx, jrx) = journal::make_journal(JOURNAL_BUFFER_SIZE); tx.send(jrx).expect("Failed to send a new Journal to the GC thread!"); GC_JOURNAL.with(|j| { j.set(&jtx); }); f() }) } } // Reference count functions. Only new-objects need to specify the traverse bit. #[inline] fn as_traitobject<T: Trace>(object: &T) -> TraitObject { let trace: &Trace = object; unsafe { transmute(trace) } } /// Write a reference count increment to the journal for a newly allocated object #[inline] fn write<T: Trace>(object: &T, is_new: bool, flags: usize) { GC_JOURNAL.with(|j| { let tx = unsafe { &*j.get() }; let tobj = as_traitobject(object); // set the refcount-increment bit let ptr = (tobj.data as usize) | flags; // set the traversible bit let mut vtable = tobj.vtable as usize; if is_new && object.traversible() { vtable |= TRAVERSE_BIT; } tx.send(Object { ptr: ptr, vtable: vtable, }); }); } // GcBox implementation impl<T: Trace> GcBox<T> { fn new(value: T) -> GcBox<T> { GcBox { value: value, } } } unsafe impl<T: Trace> Trace for GcBox<T> { #[inline] fn traversible(&self) -> bool { self.value.traversible() } #[inline] unsafe fn trace(&self, heap: &mut TraceStack) { self.value.trace(heap); } } // GcRoot implementation impl<T: Trace> GcRoot<T> { /// Put a new object on the heap and hand ownership to the GC, writing a reference count /// increment to the journal. pub fn new(value: T) -> GcRoot<T> { let boxed = Box::new(GcBox::new(value)); write(&*boxed, true, NEW_BIT | INC_BIT); GcRoot { ptr: Box::into_raw(boxed) } } fn from_raw(ptr: *mut GcBox<T>) -> GcRoot<T> { let root = GcRoot { ptr: ptr }; write(&*root, false, INC_BIT); root } fn ptr(&self) -> *mut GcBox<T> { self.ptr } fn value(&self) -> &T { unsafe { &(*self.ptr).value } } fn value_mut(&mut self) -> &mut T { unsafe { &mut (*self.ptr).value } } } impl<T: Trace> Drop for GcRoot<T> { fn drop(&mut self) { write(&**self, false, 0); } } impl<T: Trace> Deref for GcRoot<T> { type Target = T; fn deref(&self) -> &T { self.value() } } impl<T: Trace> DerefMut for GcRoot<T> { fn deref_mut(&mut self) -> &mut T { self.value_mut() } } impl<T: Trace> Clone for GcRoot<T> { fn clone(&self) -> Self { GcRoot::from_raw(self.ptr()) } } // Gc implementation impl<T: Trace> Gc<T> { /// Creates a new null pointer. pub fn null() -> Gc<T> { Gc { ptr: null_mut(), } } /// Move a value to the heap and create a pointer to it. pub fn new(value: T) -> Gc<T> { let boxed = Box::new(GcBox::new(value)); write(&*boxed, true, NEW_BIT); Gc { ptr: Box::into_raw(boxed) } } /// Return the raw pointer value, or None if it is a null pointer. pub fn as_raw(&self) -> Option<*mut GcBox<T>> { if self.ptr.is_null() { None } else { Some(self.ptr) } } /// Pointer equality comparison. pub fn is(&self, other: Gc<T>) -> bool { self.ptr == other.ptr } fn from_raw(ptr: *mut GcBox<T>) -> Gc<T> { Gc { ptr: ptr, } } fn ptr(&self) -> *mut GcBox<T> { self.ptr } fn value(&self) -> &T { unsafe { &(*self.ptr).value } } fn value_mut(&mut self) -> &mut T { unsafe { &mut (*self.ptr).value } } } impl<T: Trace> Deref for Gc<T> { type Target = T; fn deref(&self) -> &T { self.value() } } impl<T: Trace> DerefMut for Gc<T> { fn deref_mut(&mut self) -> &mut T { self.value_mut() } } impl<T: Trace> Clone for Gc<T> { fn clone(&self) -> Self { Gc { ptr: self.ptr, } } } impl<T: Trace> Copy for Gc<T> {} // GcAtomic implementation impl<T: Trace> GcAtomic<T> { /// Instantiate a new null pointer pub fn null() -> GcAtomic<T> { GcAtomic { ptr: AtomicPtr::new(null_mut()) } } /// Instantiate a new pointer, moving `value` to the heap. Writes to the journal. pub fn new(value: T) -> GcAtomic<T> { let boxed = Box::new(GcBox::new(value)); write(&*boxed, true, NEW_BIT); GcAtomic { ptr: AtomicPtr::new(Box::into_raw(boxed)), } } /// Root the pointer by loading it into a `GcRoot<T>` /// /// Panics if `order` is `Release` or `AcqRel`. pub fn load_into_root(&self, order: Ordering) -> GcRoot<T> { let root = GcRoot { ptr: self.ptr.load(order), }; write(&*root, false, INC_BIT); root } /// Copy the pointer into a new `Gc` instance. /// /// Panics if `order` is `Release` or `AcqRel`. pub fn load_into_gc(&self, order: Ordering) -> Gc<T> { Gc::from_raw(self.ptr.load(order)) } /// Fetch the current raw pointer value /// /// Panics if `order` is `Release` or `AcqRel`. pub fn load_raw(&self, order: Ordering) -> *mut GcBox<T> { self.ptr.load(order) } /// Replace the current pointer value with the pointer from the given `GcRoot`. /// /// Panics if `order` is `Acquire` or `AcqRel`. pub fn store_from_root(&self, root: GcRoot<T>, order: Ordering) { self.ptr.store(root.ptr(), order); } /// Replace the current pointer value with the pointer from the given `Gc`. /// /// Panics of `order` is `Acquire` or `AcqRel`. pub fn store_from_gc(&self, gc: Gc<T>, order: Ordering) { self.ptr.store(gc.ptr(), order); } /// Replace the current pointer value with the given raw pointer /// /// Panics if `order` is `Acquire` or `AcqRel`. pub fn store_raw(&self, ptr: *mut GcBox<T>, order: Ordering) { self.ptr.store(ptr, order); } }
use std::{borrow::Cow, collections::HashMap}; use serde::Serialize; #[derive(Debug, Copy, Clone, Serialize, Eq, PartialEq, Hash)] pub(crate) struct SharedStringIndex(pub(crate) usize); pub(crate) type SharedString = Cow<'static, str>; #[derive(Default, Serialize)] pub(crate) struct SharedStrings { pub(crate) strings: HashMap<SharedString, SharedStringIndex>, } impl SharedStrings { pub(crate) fn insert(&mut self, value: Cow<'static, str>) -> SharedStringIndex { let len = self.strings.len(); *self .strings .entry(value) .or_insert_with(|| SharedStringIndex(len)) } }
#![no_main] #[macro_use] extern crate libfuzzer_sys; extern crate seq_io; #[macro_use] extern crate matches; use std::io::prelude::*; use std::io; use seq_io::fasta::{self, Record}; #[macro_use] mod common; fuzz_target!(|data: &[u8]| { // determine reader capacity (max: 65 KiB) if data.len() < 2 { return; } let mut a: [u8; 2] = Default::default(); a.copy_from_slice(&data[..2]); let cap = u16::from_le_bytes(a) as usize; let data = &data[2..]; // Ensure minimum capacity and only accept UTF-8 data for easier debugging if cap < 3 || ::std::str::from_utf8(&data).is_err() { return; } let mut simple_reader = SimpleReader::new(data); let mut reader = fasta::Reader::with_capacity(data, cap); let mut i = 0; while let Some(simple_res) = simple_reader.next() { i += 1; let res = reader.next().unwrap_or_else(|| { panic!(format!( "Result {} not returned by seq_io reader: {:?}", i, simple_res )); }); let (mut simple_rec, rec) = match (simple_res, res) { (Ok(simple_r), Ok(r)) => (simple_r, r), (Err(e), Ok(_)) => panic!(format!("simple reader produced error, seq_io didn't ({})", e)), (Ok(_), Err(e)) => panic!(format!("seq_io produced error, rust-bio didn't ({})", e)), _ => continue, }; assert_eq!(simple_rec.id.as_slice(), rec.id_bytes()); assert_eq!(simple_rec.desc.as_ref().map(|d| d.as_slice()), rec.desc_bytes()); assert_eq!(&simple_rec.seq, &rec.owned_seq()); } if let Some(res) = reader.next() { panic!(format!( "Result {} not returned by seq_io reader: {:?}", i + 1, res )) } }); #[derive(Default, Clone, Debug)] pub struct SimpleRecord { id: Vec<u8>, desc: Option<Vec<u8>>, seq: Vec<u8>, } /// FASTA reader that should behave like seq_io::fasta::Reader, but using a much simpler /// (and slower) implementation and reduced functionality /// Based on the Rust-Bio FASTA reader implementation. #[derive(Debug)] pub struct SimpleReader<R: io::Read> { reader: io::BufReader<R>, line: Vec<u8>, error_has_occured: bool, } impl<R: io::Read> SimpleReader<R> { pub fn new(reader: R) -> Self { SimpleReader { reader: io::BufReader::new(reader), line: vec![], error_has_occured: false, } } } impl<R> SimpleReader<R> where R: io::Read, { fn next(&mut self) -> Option<io::Result<SimpleRecord>> { if self.error_has_occured { return None; } let mut record = SimpleRecord::default(); if self.line.is_empty() { loop { if try_opt!(self.reader.read_until(b'\n', &mut self.line)) == 0 { // End of input return None; } if !common::trim_newline(&self.line).is_empty() { // initial empty lines break; } self.line.clear(); } } if self.line.first() != Some(&b'>') { self.error_has_occured = true; return Some(Err(io::Error::new( io::ErrorKind::Other, "Expected > at record start.", ))); } { let mut header_fields = common::trim_newline(&self.line[1..]) .splitn(2, |&b| b == b' '); record.id = header_fields.next().unwrap().to_owned(); record.desc = header_fields.next().map(|s| s.to_owned()); } loop { self.line.clear(); try_opt!(self.reader.read_until(b'\n', &mut self.line)); if self.line.is_empty() || self.line.starts_with(b">") { break; } record.seq.extend_from_slice(common::trim_newline(&self.line)); } Some(Ok(record)) } }
use std::pin::Pin; use std::task::{self, Poll}; use bytes::Bytes; use destream::en::{self, IntoStream}; use futures::ready; use futures::stream::{Fuse, FusedStream, Stream, StreamExt, TryStreamExt}; use pin_project::pin_project; use crate::constants::*; use super::{Encoder, JSONStream}; use futures::task::Context; #[pin_project] struct JSONMapEntryStream<'en> { #[pin] key: Fuse<JSONStream<'en>>, #[pin] value: Fuse<JSONStream<'en>>, } impl<'en> JSONMapEntryStream<'en> { fn new<K: IntoStream<'en>, V: IntoStream<'en>>(key: K, value: V) -> Result<Self, super::Error> { let key = key.into_stream(Encoder)?; let value = value.into_stream(Encoder)?; Ok(Self { key: key.fuse(), value: value.fuse(), }) } } impl<'en> Stream for JSONMapEntryStream<'en> { type Item = Result<Bytes, super::Error>; fn poll_next(self: Pin<&mut Self>, cxt: &mut Context<'_>) -> Poll<Option<Self::Item>> { let mut this = self.project(); let result = if !this.key.is_terminated() { match ready!(this.key.as_mut().poll_next(cxt)) { Some(result) => Some(result), None => Some(Ok(Bytes::from_static(COLON))), } } else if !this.value.is_terminated() { match ready!(this.value.as_mut().poll_next(cxt)) { Some(result) => Some(result), None => None, } } else { None }; Poll::Ready(result) } } impl<'en> FusedStream for JSONMapEntryStream<'en> { fn is_terminated(&self) -> bool { self.key.is_terminated() && self.value.is_terminated() } } #[pin_project] struct JSONEncodingStream< I: Stream<Item = Result<Bytes, super::Error>>, S: Stream<Item = Result<I, super::Error>>, > { #[pin] source: Fuse<S>, next: Option<Pin<Box<I>>>, started: bool, finished: bool, start: &'static [u8], end: &'static [u8], } impl<I: Stream<Item = Result<Bytes, super::Error>>, S: Stream<Item = Result<I, super::Error>>> Stream for JSONEncodingStream<I, S> { type Item = Result<Bytes, super::Error>; fn poll_next(self: Pin<&mut Self>, cxt: &mut task::Context) -> Poll<Option<Self::Item>> { let mut this = self.project(); Poll::Ready(loop { match this.next { Some(next) => match ready!(next.as_mut().poll_next(cxt)) { Some(result) => break Some(result), None => *this.next = None, }, None => match ready!(this.source.as_mut().poll_next(cxt)) { Some(Ok(next)) => { *this.next = Some(Box::pin(next)); if *this.started { break Some(Ok(Bytes::from_static(COMMA))); } else { *this.started = true; break Some(Ok(Bytes::from_static(*this.start))); } } Some(Err(cause)) => break Some(Err(en::Error::custom(cause))), None if !*this.started => { *this.started = true; break Some(Ok(Bytes::from_static(*this.start))); } None if !*this.finished => { *this.finished = true; break Some(Ok(Bytes::from_static(*this.end))); } None => break None, }, } }) } } impl<I: Stream<Item = Result<Bytes, super::Error>>, S: Stream<Item = Result<I, super::Error>>> FusedStream for JSONEncodingStream<I, S> { fn is_terminated(&self) -> bool { self.finished } } pub fn encode_list< 'en, I: IntoStream<'en>, S: Stream<Item = Result<I, super::Error>> + Send + Unpin + 'en, >( seq: S, ) -> impl Stream<Item = Result<Bytes, super::Error>> + 'en { let source = seq .map(|result| result.and_then(|element| element.into_stream(Encoder))) .map_err(en::Error::custom); JSONEncodingStream { source: source.fuse(), next: None, started: false, finished: false, start: LIST_BEGIN, end: LIST_END, } } pub fn encode_map< 'en, K: IntoStream<'en>, V: IntoStream<'en>, S: Stream<Item = Result<(K, V), super::Error>> + Send + Unpin + 'en, >( seq: S, ) -> impl Stream<Item = Result<Bytes, super::Error>> + Send + Unpin + 'en { let source = seq .map(|result| result.and_then(|(key, value)| JSONMapEntryStream::new(key, value))) .map_err(en::Error::custom); JSONEncodingStream { source: source.fuse(), next: None, started: false, finished: false, start: MAP_BEGIN, end: MAP_END, } }
#![allow(non_snake_case)] use clear_on_drop::clear::Clear; use core::mem; use curve25519_dalek::ristretto::{CompressedRistretto, RistrettoPoint}; use curve25519_dalek::scalar::Scalar; use curve25519_dalek::traits::{Identity, MultiscalarMul}; use merlin::Transcript; use super::{ ConstraintSystem, LinearCombination, R1CSProof, RandomizableConstraintSystem, RandomizedConstraintSystem, Variable, }; use crate::errors::R1CSError; use crate::generators::{BulletproofGens, PedersenGens}; use crate::inner_product_proof::InnerProductProof; use crate::transcript::TranscriptProtocol; /// A [`ConstraintSystem`] implementation for use by the prover. /// /// The prover commits high-level variables and their blinding factors `(v, v_blinding)`, /// allocates low-level variables and creates constraints in terms of these /// high-level variables and low-level variables. /// /// When all constraints are added, the proving code calls `prove` /// which consumes the `Prover` instance, samples random challenges /// that instantiate the randomized constraints, and creates a complete proof. pub struct Prover<'t, 'g> { transcript: &'t mut Transcript, pc_gens: &'g PedersenGens, /// The constraints accumulated so far. constraints: Vec<LinearCombination>, /// Stores assignments to the "left" of multiplication gates a_L: Vec<Scalar>, /// Stores assignments to the "right" of multiplication gates a_R: Vec<Scalar>, /// Stores assignments to the "output" of multiplication gates a_O: Vec<Scalar>, /// High-level witness data (value openings to V commitments) v: Vec<Scalar>, /// High-level witness data (blinding openings to V commitments) v_blinding: Vec<Scalar>, /// This list holds closures that will be called in the second phase of the protocol, /// when non-randomized variables are committed. deferred_constraints: Vec<Box<dyn Fn(&mut RandomizingProver<'t, 'g>) -> Result<(), R1CSError>>>, /// Index of a pending multiplier that's not fully assigned yet. pending_multiplier: Option<usize>, } /// Prover in the randomizing phase. /// /// Note: this type is exported because it is used to specify the associated type /// in the public impl of a trait `ConstraintSystem`, which boils down to allowing compiler to /// monomorphize the closures for the proving and verifying code. /// However, this type cannot be instantiated by the user and therefore can only be used within /// the callback provided to `specify_randomized_constraints`. pub struct RandomizingProver<'t, 'g> { prover: Prover<'t, 'g>, } /// Overwrite secrets with null bytes when they go out of scope. impl<'t, 'g> Drop for Prover<'t, 'g> { fn drop(&mut self) { self.v.clear(); self.v_blinding.clear(); // Important: due to how ClearOnDrop auto-implements InitializableFromZeroed // for T: Default, calling .clear() on Vec compiles, but does not // clear the content. Instead, it only clears the Vec's header. // Clearing the underlying buffer item-by-item will do the job, but will // keep the header as-is, which is fine since the header does not contain secrets. for e in self.a_L.iter_mut() { e.clear(); } for e in self.a_R.iter_mut() { e.clear(); } for e in self.a_O.iter_mut() { e.clear(); } // XXX use ClearOnDrop instead of doing the above } } impl<'t, 'g> ConstraintSystem for Prover<'t, 'g> { fn transcript(&mut self) -> &mut Transcript { self.transcript } fn multiply( &mut self, mut left: LinearCombination, mut right: LinearCombination, ) -> (Variable, Variable, Variable) { // Synthesize the assignments for l,r,o let l = self.eval(&left); let r = self.eval(&right); let o = l * r; // Create variables for l,r,o ... let l_var = Variable::MultiplierLeft(self.a_L.len()); let r_var = Variable::MultiplierRight(self.a_R.len()); let o_var = Variable::MultiplierOutput(self.a_O.len()); // ... and assign them self.a_L.push(l); self.a_R.push(r); self.a_O.push(o); // Constrain l,r,o: left.terms.push((l_var, -Scalar::one())); right.terms.push((r_var, -Scalar::one())); self.constrain(left); self.constrain(right); (l_var, r_var, o_var) } fn allocate(&mut self, assignment: Option<Scalar>) -> Result<Variable, R1CSError> { let scalar = assignment.ok_or(R1CSError::MissingAssignment)?; match self.pending_multiplier { None => { let i = self.a_L.len(); self.pending_multiplier = Some(i); self.a_L.push(scalar); self.a_R.push(Scalar::zero()); self.a_O.push(Scalar::zero()); Ok(Variable::MultiplierLeft(i)) } Some(i) => { self.pending_multiplier = None; self.a_R[i] = scalar; self.a_O[i] = self.a_L[i] * self.a_R[i]; Ok(Variable::MultiplierRight(i)) } } } fn allocate_multiplier( &mut self, input_assignments: Option<(Scalar, Scalar)>, ) -> Result<(Variable, Variable, Variable), R1CSError> { let (l, r) = input_assignments.ok_or(R1CSError::MissingAssignment)?; let o = l * r; // Create variables for l,r,o ... let l_var = Variable::MultiplierLeft(self.a_L.len()); let r_var = Variable::MultiplierRight(self.a_R.len()); let o_var = Variable::MultiplierOutput(self.a_O.len()); // ... and assign them self.a_L.push(l); self.a_R.push(r); self.a_O.push(o); Ok((l_var, r_var, o_var)) } fn multipliers_len(&self) -> usize { self.a_L.len() } fn constrain(&mut self, lc: LinearCombination) { // TODO: check that the linear combinations are valid // (e.g. that variables are valid, that the linear combination evals to 0 for prover, etc). self.constraints.push(lc); } } impl<'t, 'g> RandomizableConstraintSystem for Prover<'t, 'g> { type RandomizedCS = RandomizingProver<'t, 'g>; fn specify_randomized_constraints<F>(&mut self, callback: F) -> Result<(), R1CSError> where F: 'static + Fn(&mut Self::RandomizedCS) -> Result<(), R1CSError>, { self.deferred_constraints.push(Box::new(callback)); Ok(()) } } impl<'t, 'g> ConstraintSystem for RandomizingProver<'t, 'g> { fn transcript(&mut self) -> &mut Transcript { self.prover.transcript } fn multiply( &mut self, left: LinearCombination, right: LinearCombination, ) -> (Variable, Variable, Variable) { self.prover.multiply(left, right) } fn allocate(&mut self, assignment: Option<Scalar>) -> Result<Variable, R1CSError> { self.prover.allocate(assignment) } fn allocate_multiplier( &mut self, input_assignments: Option<(Scalar, Scalar)>, ) -> Result<(Variable, Variable, Variable), R1CSError> { self.prover.allocate_multiplier(input_assignments) } fn multipliers_len(&self) -> usize { self.prover.multipliers_len() } fn constrain(&mut self, lc: LinearCombination) { self.prover.constrain(lc) } } impl<'t, 'g> RandomizedConstraintSystem for RandomizingProver<'t, 'g> { fn challenge_scalar(&mut self, label: &'static [u8]) -> Scalar { self.prover.transcript.challenge_scalar(label) } } impl<'t, 'g> Prover<'t, 'g> { /// Construct an empty constraint system with specified external /// input variables. /// /// # Inputs /// /// The `bp_gens` and `pc_gens` are generators for Bulletproofs /// and for the Pedersen commitments, respectively. The /// [`BulletproofGens`] should have `gens_capacity` greater than /// the number of multiplication constraints that will eventually /// be added into the constraint system. /// /// The `transcript` parameter is a Merlin proof transcript. The /// `ProverCS` holds onto the `&mut Transcript` until it consumes /// itself during [`ProverCS::prove`], releasing its borrow of the /// transcript. This ensures that the transcript cannot be /// altered except by the `ProverCS` before proving is complete. /// /// # Returns /// /// Returns a new `Prover` instance. pub fn new(pc_gens: &'g PedersenGens, transcript: &'t mut Transcript) -> Self { transcript.r1cs_domain_sep(); Prover { pc_gens, transcript, v: Vec::new(), v_blinding: Vec::new(), constraints: Vec::new(), a_L: Vec::new(), a_R: Vec::new(), a_O: Vec::new(), deferred_constraints: Vec::new(), pending_multiplier: None, } } /// Creates commitment to a high-level variable and adds it to the transcript. /// /// # Inputs /// /// The `v` and `v_blinding` parameters are openings to the /// commitment to the external variable for the constraint /// system. Passing the opening (the value together with the /// blinding factor) makes it possible to reference pre-existing /// commitments in the constraint system. All external variables /// must be passed up-front, so that challenges produced by /// [`ConstraintSystem::challenge_scalar`] are bound to the /// external variables. /// /// # Returns /// /// Returns a pair of a Pedersen commitment (as a compressed Ristretto point), /// and a [`Variable`] corresponding to it, which can be used to form constraints. pub fn commit(&mut self, v: Scalar, v_blinding: Scalar) -> (CompressedRistretto, Variable) { let i = self.v.len(); self.v.push(v); self.v_blinding.push(v_blinding); // Add the commitment to the transcript. let V = self.pc_gens.commit(v, v_blinding).compress(); self.transcript.append_point(b"V", &V); (V, Variable::Committed(i)) } /// Use a challenge, `z`, to flatten the constraints in the /// constraint system into vectors used for proving and /// verification. /// /// # Output /// /// Returns a tuple of /// ```text /// (wL, wR, wO, wV) /// ``` /// where `w{L,R,O}` is \\( z \cdot z^Q \cdot W_{L,R,O} \\). fn flattened_constraints( &mut self, z: &Scalar, ) -> (Vec<Scalar>, Vec<Scalar>, Vec<Scalar>, Vec<Scalar>) { let n = self.a_L.len(); let m = self.v.len(); let mut wL = vec![Scalar::zero(); n]; let mut wR = vec![Scalar::zero(); n]; let mut wO = vec![Scalar::zero(); n]; let mut wV = vec![Scalar::zero(); m]; let mut exp_z = *z; for lc in self.constraints.iter() { for (var, coeff) in &lc.terms { match var { Variable::MultiplierLeft(i) => { wL[*i] += exp_z * coeff; } Variable::MultiplierRight(i) => { wR[*i] += exp_z * coeff; } Variable::MultiplierOutput(i) => { wO[*i] += exp_z * coeff; } Variable::Committed(i) => { wV[*i] -= exp_z * coeff; } Variable::One() => { // The prover doesn't need to handle constant terms } } } exp_z *= z; } (wL, wR, wO, wV) } fn eval(&self, lc: &LinearCombination) -> Scalar { lc.terms .iter() .map(|(var, coeff)| { coeff * match var { Variable::MultiplierLeft(i) => self.a_L[*i], Variable::MultiplierRight(i) => self.a_R[*i], Variable::MultiplierOutput(i) => self.a_O[*i], Variable::Committed(i) => self.v[*i], Variable::One() => Scalar::one(), } }) .sum() } /// Calls all remembered callbacks with an API that /// allows generating challenge scalars. fn create_randomized_constraints(mut self) -> Result<Self, R1CSError> { // Clear the pending multiplier (if any) because it was committed into A_L/A_R/S. self.pending_multiplier = None; if self.deferred_constraints.len() == 0 { self.transcript.r1cs_1phase_domain_sep(); Ok(self) } else { self.transcript.r1cs_2phase_domain_sep(); // Note: the wrapper could've used &mut instead of ownership, // but specifying lifetimes for boxed closures is not going to be nice, // so we move the self into wrapper and then move it back out afterwards. let mut callbacks = mem::replace(&mut self.deferred_constraints, Vec::new()); let mut wrapped_self = RandomizingProver { prover: self }; for callback in callbacks.drain(..) { callback(&mut wrapped_self)?; } Ok(wrapped_self.prover) } } /// Consume this `ConstraintSystem` to produce a proof. pub fn prove(mut self, bp_gens: &BulletproofGens) -> Result<R1CSProof, R1CSError> { use crate::util; use std::iter; // Commit a length _suffix_ for the number of high-level variables. // We cannot do this in advance because user can commit variables one-by-one, // but this suffix provides safe disambiguation because each variable // is prefixed with a separate label. self.transcript.append_u64(b"m", self.v.len() as u64); // Create a `TranscriptRng` from the high-level witness data // // The prover wants to rekey the RNG with its witness data. // // This consists of the high level witness data (the v's and // v_blinding's), as well as the low-level witness data (a_L, // a_R, a_O). Since the low-level data should (hopefully) be // determined by the high-level data, it doesn't give any // extra entropy for reseeding the RNG. // // Since the v_blindings should be random scalars (in order to // protect the v's in the commitments), we don't gain much by // committing the v's as well as the v_blinding's. let mut rng = { let mut builder = self.transcript.build_rng(); // Commit the blinding factors for the input wires for v_b in &self.v_blinding { builder = builder.rekey_with_witness_bytes(b"v_blinding", v_b.as_bytes()); } use rand::thread_rng; builder.finalize(&mut thread_rng()) }; // Commit to the first-phase low-level witness variables. let n1 = self.a_L.len(); if bp_gens.gens_capacity < n1 { return Err(R1CSError::InvalidGeneratorsLength); } // We are performing a single-party circuit proof, so party index is 0. let gens = bp_gens.share(0); let i_blinding1 = Scalar::random(&mut rng); let o_blinding1 = Scalar::random(&mut rng); let s_blinding1 = Scalar::random(&mut rng); let mut s_L1: Vec<Scalar> = (0..n1).map(|_| Scalar::random(&mut rng)).collect(); let mut s_R1: Vec<Scalar> = (0..n1).map(|_| Scalar::random(&mut rng)).collect(); // A_I = <a_L, G> + <a_R, H> + i_blinding * B_blinding let A_I1 = RistrettoPoint::multiscalar_mul( iter::once(&i_blinding1) .chain(self.a_L.iter()) .chain(self.a_R.iter()), iter::once(&self.pc_gens.B_blinding) .chain(gens.G(n1)) .chain(gens.H(n1)), ) .compress(); // A_O = <a_O, G> + o_blinding * B_blinding let A_O1 = RistrettoPoint::multiscalar_mul( iter::once(&o_blinding1).chain(self.a_O.iter()), iter::once(&self.pc_gens.B_blinding).chain(gens.G(n1)), ) .compress(); // S = <s_L, G> + <s_R, H> + s_blinding * B_blinding let S1 = RistrettoPoint::multiscalar_mul( iter::once(&s_blinding1) .chain(s_L1.iter()) .chain(s_R1.iter()), iter::once(&self.pc_gens.B_blinding) .chain(gens.G(n1)) .chain(gens.H(n1)), ) .compress(); self.transcript.append_point(b"A_I1", &A_I1); self.transcript.append_point(b"A_O1", &A_O1); self.transcript.append_point(b"S1", &S1); // Process the remaining constraints. self = self.create_randomized_constraints()?; // Pad zeros to the next power of two (or do that implicitly when creating vectors) // If the number of multiplications is not 0 or a power of 2, then pad the circuit. let n = self.a_L.len(); let n2 = n - n1; let padded_n = self.a_L.len().next_power_of_two(); let pad = padded_n - n; if bp_gens.gens_capacity < padded_n { return Err(R1CSError::InvalidGeneratorsLength); } // Commit to the second-phase low-level witness variables let has_2nd_phase_commitments = n2 > 0; let (i_blinding2, o_blinding2, s_blinding2) = if has_2nd_phase_commitments { ( Scalar::random(&mut rng), Scalar::random(&mut rng), Scalar::random(&mut rng), ) } else { (Scalar::zero(), Scalar::zero(), Scalar::zero()) }; let mut s_L2: Vec<Scalar> = (0..n2).map(|_| Scalar::random(&mut rng)).collect(); let mut s_R2: Vec<Scalar> = (0..n2).map(|_| Scalar::random(&mut rng)).collect(); let (A_I2, A_O2, S2) = if has_2nd_phase_commitments { ( // A_I = <a_L, G> + <a_R, H> + i_blinding * B_blinding RistrettoPoint::multiscalar_mul( iter::once(&i_blinding2) .chain(self.a_L.iter().skip(n1)) .chain(self.a_R.iter().skip(n1)), iter::once(&self.pc_gens.B_blinding) .chain(gens.G(n).skip(n1)) .chain(gens.H(n).skip(n1)), ) .compress(), // A_O = <a_O, G> + o_blinding * B_blinding RistrettoPoint::multiscalar_mul( iter::once(&o_blinding2).chain(self.a_O.iter().skip(n1)), iter::once(&self.pc_gens.B_blinding).chain(gens.G(n).skip(n1)), ) .compress(), // S = <s_L, G> + <s_R, H> + s_blinding * B_blinding RistrettoPoint::multiscalar_mul( iter::once(&s_blinding2) .chain(s_L2.iter()) .chain(s_R2.iter()), iter::once(&self.pc_gens.B_blinding) .chain(gens.G(n).skip(n1)) .chain(gens.H(n).skip(n1)), ) .compress(), ) } else { // Since we are using zero blinding factors and // there are no variables to commit, // the commitments _must_ be identity points, // so we can hardcode them saving 3 mults+compressions. ( CompressedRistretto::identity(), CompressedRistretto::identity(), CompressedRistretto::identity(), ) }; self.transcript.append_point(b"A_I2", &A_I2); self.transcript.append_point(b"A_O2", &A_O2); self.transcript.append_point(b"S2", &S2); // 4. Compute blinded vector polynomials l(x) and r(x) let y = self.transcript.challenge_scalar(b"y"); let z = self.transcript.challenge_scalar(b"z"); let (wL, wR, wO, wV) = self.flattened_constraints(&z); let mut l_poly = util::VecPoly3::zero(n); let mut r_poly = util::VecPoly3::zero(n); let mut exp_y = Scalar::one(); // y^n starting at n=0 let y_inv = y.invert(); let exp_y_inv = util::exp_iter(y_inv).take(padded_n).collect::<Vec<_>>(); let sLsR = s_L1 .iter() .chain(s_L2.iter()) .zip(s_R1.iter().chain(s_R2.iter())); for (i, (sl, sr)) in sLsR.enumerate() { // l_poly.0 = 0 // l_poly.1 = a_L + y^-n * (z * z^Q * W_R) l_poly.1[i] = self.a_L[i] + exp_y_inv[i] * wR[i]; // l_poly.2 = a_O l_poly.2[i] = self.a_O[i]; // l_poly.3 = s_L l_poly.3[i] = *sl; // r_poly.0 = (z * z^Q * W_O) - y^n r_poly.0[i] = wO[i] - exp_y; // r_poly.1 = y^n * a_R + (z * z^Q * W_L) r_poly.1[i] = exp_y * self.a_R[i] + wL[i]; // r_poly.2 = 0 // r_poly.3 = y^n * s_R r_poly.3[i] = exp_y * sr; exp_y = exp_y * y; // y^i -> y^(i+1) } let t_poly = util::VecPoly3::special_inner_product(&l_poly, &r_poly); let t_1_blinding = Scalar::random(&mut rng); let t_3_blinding = Scalar::random(&mut rng); let t_4_blinding = Scalar::random(&mut rng); let t_5_blinding = Scalar::random(&mut rng); let t_6_blinding = Scalar::random(&mut rng); let T_1 = self.pc_gens.commit(t_poly.t1, t_1_blinding).compress(); let T_3 = self.pc_gens.commit(t_poly.t3, t_3_blinding).compress(); let T_4 = self.pc_gens.commit(t_poly.t4, t_4_blinding).compress(); let T_5 = self.pc_gens.commit(t_poly.t5, t_5_blinding).compress(); let T_6 = self.pc_gens.commit(t_poly.t6, t_6_blinding).compress(); self.transcript.append_point(b"T_1", &T_1); self.transcript.append_point(b"T_3", &T_3); self.transcript.append_point(b"T_4", &T_4); self.transcript.append_point(b"T_5", &T_5); self.transcript.append_point(b"T_6", &T_6); let u = self.transcript.challenge_scalar(b"u"); let x = self.transcript.challenge_scalar(b"x"); // t_2_blinding = <z*z^Q, W_V * v_blinding> // in the t_x_blinding calculations, line 76. let t_2_blinding = wV .iter() .zip(self.v_blinding.iter()) .map(|(c, v_blinding)| c * v_blinding) .sum(); let t_blinding_poly = util::Poly6 { t1: t_1_blinding, t2: t_2_blinding, t3: t_3_blinding, t4: t_4_blinding, t5: t_5_blinding, t6: t_6_blinding, }; let t_x = t_poly.eval(x); let t_x_blinding = t_blinding_poly.eval(x); let mut l_vec = l_poly.eval(x); l_vec.append(&mut vec![Scalar::zero(); pad]); let mut r_vec = r_poly.eval(x); r_vec.append(&mut vec![Scalar::zero(); pad]); // XXX this should refer to the notes to explain why this is correct for i in n..padded_n { r_vec[i] = -exp_y; exp_y = exp_y * y; // y^i -> y^(i+1) } let i_blinding = i_blinding1 + u * i_blinding2; let o_blinding = o_blinding1 + u * o_blinding2; let s_blinding = s_blinding1 + u * s_blinding2; let e_blinding = x * (i_blinding + x * (o_blinding + x * s_blinding)); self.transcript.append_scalar(b"t_x", &t_x); self.transcript .append_scalar(b"t_x_blinding", &t_x_blinding); self.transcript.append_scalar(b"e_blinding", &e_blinding); // Get a challenge value to combine statements for the IPP let w = self.transcript.challenge_scalar(b"w"); let Q = w * self.pc_gens.B; let G_factors = iter::repeat(Scalar::one()) .take(n1) .chain(iter::repeat(u).take(n2 + pad)) .collect::<Vec<_>>(); let H_factors = exp_y_inv .into_iter() .zip(G_factors.iter()) .map(|(y, u_or_1)| y * u_or_1) .collect::<Vec<_>>(); let ipp_proof = InnerProductProof::create( self.transcript, &Q, &G_factors, &H_factors, gens.G(padded_n).cloned().collect(), gens.H(padded_n).cloned().collect(), l_vec, r_vec, ); // We do not yet have a ClearOnDrop wrapper for Vec<Scalar>. // When PR 202 [1] is merged, we can simply wrap s_L and s_R at the point of creation. // [1] https://github.com/dalek-cryptography/curve25519-dalek/pull/202 for scalar in s_L1 .iter_mut() .chain(s_L2.iter_mut()) .chain(s_R1.iter_mut()) .chain(s_R2.iter_mut()) { scalar.clear(); } Ok(R1CSProof { A_I1, A_O1, S1, A_I2, A_O2, S2, T_1, T_3, T_4, T_5, T_6, t_x, t_x_blinding, e_blinding, ipp_proof, }) } }
#[doc = "Reader of register DDRPHYC_DCR"] pub type R = crate::R<u32, super::DDRPHYC_DCR>; #[doc = "Writer for register DDRPHYC_DCR"] pub type W = crate::W<u32, super::DDRPHYC_DCR>; #[doc = "Register DDRPHYC_DCR `reset()`'s with value 0x0b"] impl crate::ResetValue for super::DDRPHYC_DCR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0b } } #[doc = "Reader of field `DDRMD`"] pub type DDRMD_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DDRMD`"] pub struct DDRMD_W<'a> { w: &'a mut W, } impl<'a> DDRMD_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07); self.w } } #[doc = "Reader of field `DDR8BNK`"] pub type DDR8BNK_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DDR8BNK`"] pub struct DDR8BNK_W<'a> { w: &'a mut W, } impl<'a> DDR8BNK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `PDQ`"] pub type PDQ_R = crate::R<u8, u8>; #[doc = "Write proxy for field `PDQ`"] pub struct PDQ_W<'a> { w: &'a mut W, } impl<'a> PDQ_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "Reader of field `MPRDQ`"] pub type MPRDQ_R = crate::R<bool, bool>; #[doc = "Write proxy for field `MPRDQ`"] pub struct MPRDQ_W<'a> { w: &'a mut W, } impl<'a> MPRDQ_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `DDRTYPE`"] pub type DDRTYPE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DDRTYPE`"] pub struct DDRTYPE_W<'a> { w: &'a mut W, } impl<'a> DDRTYPE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u32) & 0x03) << 8); self.w } } #[doc = "Reader of field `NOSRA`"] pub type NOSRA_R = crate::R<bool, bool>; #[doc = "Write proxy for field `NOSRA`"] pub struct NOSRA_W<'a> { w: &'a mut W, } impl<'a> NOSRA_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27); self.w } } #[doc = "Reader of field `DDR2T`"] pub type DDR2T_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DDR2T`"] pub struct DDR2T_W<'a> { w: &'a mut W, } impl<'a> DDR2T_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28); self.w } } #[doc = "Reader of field `UDIMM`"] pub type UDIMM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `UDIMM`"] pub struct UDIMM_W<'a> { w: &'a mut W, } impl<'a> UDIMM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Reader of field `RDIMM`"] pub type RDIMM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `RDIMM`"] pub struct RDIMM_W<'a> { w: &'a mut W, } impl<'a> RDIMM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `TPD`"] pub type TPD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `TPD`"] pub struct TPD_W<'a> { w: &'a mut W, } impl<'a> TPD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:2 - DDRMD"] #[inline(always)] pub fn ddrmd(&self) -> DDRMD_R { DDRMD_R::new((self.bits & 0x07) as u8) } #[doc = "Bit 3 - DDR8BNK"] #[inline(always)] pub fn ddr8bnk(&self) -> DDR8BNK_R { DDR8BNK_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - PDQ"] #[inline(always)] pub fn pdq(&self) -> PDQ_R { PDQ_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 7 - MPRDQ"] #[inline(always)] pub fn mprdq(&self) -> MPRDQ_R { MPRDQ_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bits 8:9 - DDRTYPE"] #[inline(always)] pub fn ddrtype(&self) -> DDRTYPE_R { DDRTYPE_R::new(((self.bits >> 8) & 0x03) as u8) } #[doc = "Bit 27 - NOSRA"] #[inline(always)] pub fn nosra(&self) -> NOSRA_R { NOSRA_R::new(((self.bits >> 27) & 0x01) != 0) } #[doc = "Bit 28 - DDR2T"] #[inline(always)] pub fn ddr2t(&self) -> DDR2T_R { DDR2T_R::new(((self.bits >> 28) & 0x01) != 0) } #[doc = "Bit 29 - UDIMM"] #[inline(always)] pub fn udimm(&self) -> UDIMM_R { UDIMM_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - RDIMM"] #[inline(always)] pub fn rdimm(&self) -> RDIMM_R { RDIMM_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - TPD"] #[inline(always)] pub fn tpd(&self) -> TPD_R { TPD_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:2 - DDRMD"] #[inline(always)] pub fn ddrmd(&mut self) -> DDRMD_W { DDRMD_W { w: self } } #[doc = "Bit 3 - DDR8BNK"] #[inline(always)] pub fn ddr8bnk(&mut self) -> DDR8BNK_W { DDR8BNK_W { w: self } } #[doc = "Bits 4:6 - PDQ"] #[inline(always)] pub fn pdq(&mut self) -> PDQ_W { PDQ_W { w: self } } #[doc = "Bit 7 - MPRDQ"] #[inline(always)] pub fn mprdq(&mut self) -> MPRDQ_W { MPRDQ_W { w: self } } #[doc = "Bits 8:9 - DDRTYPE"] #[inline(always)] pub fn ddrtype(&mut self) -> DDRTYPE_W { DDRTYPE_W { w: self } } #[doc = "Bit 27 - NOSRA"] #[inline(always)] pub fn nosra(&mut self) -> NOSRA_W { NOSRA_W { w: self } } #[doc = "Bit 28 - DDR2T"] #[inline(always)] pub fn ddr2t(&mut self) -> DDR2T_W { DDR2T_W { w: self } } #[doc = "Bit 29 - UDIMM"] #[inline(always)] pub fn udimm(&mut self) -> UDIMM_W { UDIMM_W { w: self } } #[doc = "Bit 30 - RDIMM"] #[inline(always)] pub fn rdimm(&mut self) -> RDIMM_W { RDIMM_W { w: self } } #[doc = "Bit 31 - TPD"] #[inline(always)] pub fn tpd(&mut self) -> TPD_W { TPD_W { w: self } } }
use crate::system::interface::ProcessId; use super::SignalNumber; /// Information related to the arrival of a signal. #[repr(transparent)] pub(crate) struct SignalInfo { info: libc::siginfo_t, } impl SignalInfo { pub(super) const SIZE: usize = std::mem::size_of::<Self>(); /// Returns whether the signal was sent by the user or not. pub(crate) fn is_user_signaled(&self) -> bool { // FIXME: we should check if si_code is equal to SI_USER but for some reason the latter it // is not available in libc. self.info.si_code <= 0 } /// Gets the PID that sent the signal. pub(crate) fn pid(&self) -> ProcessId { // FIXME: some signals don't set si_pid. unsafe { self.info.si_pid() } } /// Gets the signal number. pub(crate) fn signal(&self) -> SignalNumber { self.info.si_signo } }
use vendored_ripemd160::{Digest, Ripemd160}; pub use crate::Hash; pub type RIPEMD160 = Ripemd160; pub const BLOCK_SIZE: usize = 64; pub const SIZE: usize = 20; impl Hash for RIPEMD160 { fn size() -> usize { Ripemd160::output_size() } fn block_size() -> usize { BLOCK_SIZE } fn reset(&mut self) { Digest::reset(self); } fn sum(&mut self) -> Vec<u8> { self.clone().result().as_slice().to_vec() } } pub fn new() -> RIPEMD160 { Digest::new() } pub fn sum(b: &[u8]) -> [u8; SIZE] { let d = Ripemd160::digest(b); let mut out = [0u8; SIZE]; out.copy_from_slice(d.as_slice()); out }
// Copyright 2020 ChainSafe Systems // SPDX-License-Identifier: Apache-2.0, MIT // Doesn't run these unless feature specified #![cfg(feature = "serde_tests")] use address::Address; use encoding::{from_slice, to_vec}; use forest_message::UnsignedMessage; use hex::encode; use serde::Deserialize; use std::fs::File; use std::io::prelude::*; use std::str::FromStr; use vm::Serialized; #[derive(Debug, Deserialize)] struct MessageVector { #[serde(alias = "To")] to: String, #[serde(alias = "From")] from: String, #[serde(alias = "Nonce")] nonce: u64, #[serde(alias = "Value")] value: String, #[serde(alias = "GasPrice")] gas_price: String, #[serde(alias = "GasLimit")] gas_limit: u64, #[serde(alias = "Method")] method: u64, #[serde(alias = "Params")] params: String, } impl From<MessageVector> for UnsignedMessage { fn from(vector: MessageVector) -> UnsignedMessage { UnsignedMessage::builder() .to(Address::from_str(&vector.to).unwrap()) .from(Address::from_str(&vector.from).unwrap()) .sequence(vector.nonce) .value(vector.value.parse().unwrap()) .method_num(vector.method) .params(Serialized::new(base64::decode(&vector.params).unwrap())) .gas_limit(vector.gas_limit) .gas_price(vector.gas_price.parse().unwrap()) .build() .unwrap() } } #[derive(Deserialize)] struct TestVector { message: MessageVector, hex_cbor: String, } fn encode_assert_cbor(message: &UnsignedMessage, expected: &str) { let enc_bz: Vec<u8> = to_vec(message).expect("Cbor serialization failed"); // Assert the message is encoded in same format assert_eq!(encode(enc_bz.as_slice()), expected); // Assert decoding from those bytes goes back to unsigned message assert_eq!( &from_slice::<UnsignedMessage>(&enc_bz).expect("Should be able to deserialize cbor bytes"), message ); } #[test] #[ignore] fn unsigned_message_cbor_vectors() { let mut file = File::open("../serialization-vectors/unsigned_messages.json").unwrap(); let mut string = String::new(); file.read_to_string(&mut string).unwrap(); let vectors: Vec<TestVector> = serde_json::from_str(&string).expect("Test vector deserialization failed"); for tv in vectors { encode_assert_cbor(&UnsignedMessage::from(tv.message), &tv.hex_cbor) } }
#[doc = "Register `UR14` reader"] pub type R = crate::R<UR14_SPEC>; #[doc = "Register `UR14` writer"] pub type W = crate::W<UR14_SPEC>; #[doc = "Field `D1STPRST` reader - D1 Stop Reset"] pub type D1STPRST_R = crate::BitReader; #[doc = "Field `D1STPRST` writer - D1 Stop Reset"] pub type D1STPRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `D2SBRST` reader - D2 Standby Reset"] pub type D2SBRST_R = crate::BitReader; #[doc = "Field `D2SBRST` writer - D2 Standby Reset"] pub type D2SBRST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - D1 Stop Reset"] #[inline(always)] pub fn d1stprst(&self) -> D1STPRST_R { D1STPRST_R::new((self.bits & 1) != 0) } #[doc = "Bit 16 - D2 Standby Reset"] #[inline(always)] pub fn d2sbrst(&self) -> D2SBRST_R { D2SBRST_R::new(((self.bits >> 16) & 1) != 0) } } impl W { #[doc = "Bit 0 - D1 Stop Reset"] #[inline(always)] #[must_use] pub fn d1stprst(&mut self) -> D1STPRST_W<UR14_SPEC, 0> { D1STPRST_W::new(self) } #[doc = "Bit 16 - D2 Standby Reset"] #[inline(always)] #[must_use] pub fn d2sbrst(&mut self) -> D2SBRST_W<UR14_SPEC, 16> { D2SBRST_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "SYSCFG user register 14\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ur14::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ur14::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct UR14_SPEC; impl crate::RegisterSpec for UR14_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`ur14::R`](R) reader structure"] impl crate::Readable for UR14_SPEC {} #[doc = "`write(|w| ..)` method takes [`ur14::W`](W) writer structure"] impl crate::Writable for UR14_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets UR14 to value 0"] impl crate::Resettable for UR14_SPEC { const RESET_VALUE: Self::Ux = 0; }
use core::fmt; use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign}; use super::TICKS_PER_SECOND; #[derive(Debug, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] /// Represents the difference between two [Instant](struct.Instant.html)s pub struct Duration { pub(crate) ticks: u64, } impl Duration { pub const fn as_ticks(&self) -> u64 { self.ticks } pub const fn as_secs(&self) -> u64 { self.ticks / TICKS_PER_SECOND } pub const fn as_millis(&self) -> u64 { self.ticks * 1000 / TICKS_PER_SECOND } pub const fn as_micros(&self) -> u64 { self.ticks * 1_000_000 / TICKS_PER_SECOND } /// Creates a duration from the specified number of clock ticks pub const fn from_ticks(ticks: u64) -> Duration { Duration { ticks } } /// Creates a duration from the specified number of seconds pub const fn from_secs(secs: u64) -> Duration { Duration { ticks: secs * TICKS_PER_SECOND, } } /// Creates a duration from the specified number of milliseconds pub const fn from_millis(millis: u64) -> Duration { Duration { ticks: millis * TICKS_PER_SECOND / 1000, } } /// Creates a duration from the specified number of microseconds /// NOTE: Delays this small may be inaccurate. pub const fn from_micros(micros: u64) -> Duration { Duration { ticks: micros * TICKS_PER_SECOND / 1_000_000, } } /// Adds one Duration to another, returning a new Duration or None in the event of an overflow. pub fn checked_add(self, rhs: Duration) -> Option<Duration> { self.ticks .checked_add(rhs.ticks) .map(|ticks| Duration { ticks }) } /// Subtracts one Duration to another, returning a new Duration or None in the event of an overflow. pub fn checked_sub(self, rhs: Duration) -> Option<Duration> { self.ticks .checked_sub(rhs.ticks) .map(|ticks| Duration { ticks }) } /// Multiplies one Duration by a scalar u32, returning a new Duration or None in the event of an overflow. pub fn checked_mul(self, rhs: u32) -> Option<Duration> { self.ticks .checked_mul(rhs as _) .map(|ticks| Duration { ticks }) } /// Divides one Duration a scalar u32, returning a new Duration or None in the event of an overflow. pub fn checked_div(self, rhs: u32) -> Option<Duration> { self.ticks .checked_div(rhs as _) .map(|ticks| Duration { ticks }) } } impl Add for Duration { type Output = Duration; fn add(self, rhs: Duration) -> Duration { self.checked_add(rhs) .expect("overflow when adding durations") } } impl AddAssign for Duration { fn add_assign(&mut self, rhs: Duration) { *self = *self + rhs; } } impl Sub for Duration { type Output = Duration; fn sub(self, rhs: Duration) -> Duration { self.checked_sub(rhs) .expect("overflow when subtracting durations") } } impl SubAssign for Duration { fn sub_assign(&mut self, rhs: Duration) { *self = *self - rhs; } } impl Mul<u32> for Duration { type Output = Duration; fn mul(self, rhs: u32) -> Duration { self.checked_mul(rhs) .expect("overflow when multiplying duration by scalar") } } impl Mul<Duration> for u32 { type Output = Duration; fn mul(self, rhs: Duration) -> Duration { rhs * self } } impl MulAssign<u32> for Duration { fn mul_assign(&mut self, rhs: u32) { *self = *self * rhs; } } impl Div<u32> for Duration { type Output = Duration; fn div(self, rhs: u32) -> Duration { self.checked_div(rhs) .expect("divide by zero error when dividing duration by scalar") } } impl DivAssign<u32> for Duration { fn div_assign(&mut self, rhs: u32) { *self = *self / rhs; } } impl<'a> fmt::Display for Duration { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} ticks", self.ticks) } }
/* * Copyright (c) 2017 Boucher, Antoni <bouanto@zoho.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #![feature(proc_macro)] extern crate gtk; #[macro_use] extern crate relm; #[macro_use] extern crate relm_derive; use gtk::{ BoxExt, Cast, ContainerExt, Inhibit, IsA, Object, PackType, WidgetExt, }; use gtk::Orientation::Vertical; use gtk::WindowType::Toplevel; use relm::{Component, ContainerWidget, Relm, Update, Widget}; use self::Msg::*; #[derive(Msg)] pub enum ButtonMsg { } struct Button { button: gtk::Button, } impl Update for Button { type Model = (); type ModelParam = (); type Msg = ButtonMsg; fn model(_: &Relm<Self>, _: ()) -> () { } fn update(&mut self, _msg: ButtonMsg) { } } impl Widget for Button { type Root = gtk::Button; fn root(&self) -> Self::Root { self.button.clone() } fn on_add<W: IsA<gtk::Widget> + IsA<Object>>(&self, parent: W) { let parent: gtk::Box = parent .upcast::<gtk::Widget>() .downcast() .expect("Button widget must be added in a gtk::Box"); parent.set_child_expand(&self.button, false); parent.set_child_fill(&self.button, true); parent.set_child_pack_type(&self.button, PackType::Start); parent.set_child_padding(&self.button, 10); parent.set_child_position(&self.button, 0); } fn view(_relm: &Relm<Self>, _model: Self::Model) -> Self { let button = gtk::Button::new_with_label("+"); Button { button: button, } } } #[derive(Msg)] pub enum Msg { Quit, } struct Win { _button: Component<Button>, window: gtk::Window, } impl Update for Win { type Model = (); type ModelParam = (); type Msg = Msg; fn model(_: &Relm<Self>, _: ()) -> () { } fn update(&mut self, event: Msg) { match event { Quit => gtk::main_quit(), } } } impl Widget for Win { type Root = gtk::Window; fn root(&self) -> Self::Root { self.window.clone() } fn view(relm: &Relm<Self>, _model: Self::Model) -> Self { let window = gtk::Window::new(Toplevel); let vbox = gtk::Box::new(Vertical, 0); window.add(&vbox); let label = gtk::Label::new(Some("0")); vbox.add(&label); let button = gtk::Button::new_with_label("-"); vbox.add(&button); let relm_button = vbox.add_widget::<Button>(()); connect!(relm, window, connect_delete_event(_, _), return (Some(Msg::Quit), Inhibit(false))); window.show_all(); Win { _button: relm_button, window: window, } } } fn main() { Win::run(()).unwrap(); }
//! Structs and methods for working with **moving** `Hittable` spheres. use crate::{ aabb::AABB, hittable::{get_sphere_uv, HitRecord, Hittable}, material::Material, ray::Ray, vec3, vec3::Vec3, }; use std::sync::Arc; /// A linearly-moving sphere. Will move from `center0` at `time0` to `center1` /// at `time1`. #[derive(Debug, Clone)] pub struct MovingSphere { pub center0: Vec3, pub center1: Vec3, pub time0: f32, pub time1: f32, pub radius: f32, pub material: Arc<Material>, } impl MovingSphere { /// Create a new linearly-moving sphere. pub fn new( center0: Vec3, center1: Vec3, time0: f32, time1: f32, radius: f32, material: Material, ) -> Self { Self { center0, center1, time0, time1, radius, material: Arc::new(material), } } /// Get the center of the sphere at time `time`. /// /// # Usage /// /// ``` /// use weekend_tracer_rs::hittable::moving_sphere::MovingSphere; /// use weekend_tracer_rs::vec3; /// use weekend_tracer_rs::vec3::Vec3; /// use weekend_tracer_rs::material::Material; /// /// let sphere = MovingSphere::new( /// vec3!(-1.0, -1.0, -1.0), /// vec3!(1.0, 1.0, 1.0), /// 0.0, /// 2.0, /// 0.5, /// Material::dielectric(1.5, 0.0), /// ); /// /// assert_eq!(sphere.center(-1.0), vec3!(-2.0, -2.0, -2.0)); /// assert_eq!(sphere.center(0.0), vec3!(-1.0, -1.0, -1.0)); /// assert_eq!(sphere.center(1.0), vec3!(0.0, 0.0, 0.0)); /// assert_eq!(sphere.center(2.0), vec3!(1.0, 1.0, 1.0)); /// assert_eq!(sphere.center(3.0), vec3!(2.0, 2.0, 2.0)); /// ``` pub fn center(&self, time: f32) -> Vec3 { self.center0 + ((time - self.time0) / (self.time1 - self.time0)) * (self.center1 - self.center0) } } impl Hittable for MovingSphere { fn hit(&self, ray: &Ray, t_min: f32, t_max: f32) -> Option<HitRecord> { // See the raytracing in one weekend book, chapter 6, for this formula. // We found a (modified) quadratic formula for hit-testing a sphere. let oc = ray.origin - self.center(ray.time); let a = ray.direction.length_squared(); let half_b = oc.dot(&ray.direction); let c = oc.length_squared() - (self.radius * self.radius); let discriminant = (half_b * half_b) - (a * c); // The sphere is hit if the discriminant is greater than 0. if discriminant > 0.0 { let root = discriminant.sqrt(); let solution_1 = (-half_b - root) / a; let solution_2 = (-half_b + root) / a; let t = if solution_1 < t_max && solution_1 > t_min { Some(solution_1) } else if solution_2 < t_max && solution_2 > t_min { Some(solution_2) } else { None }; if let Some(t) = t { let hit_point = ray.at(t); Some(HitRecord::new( ray, t, hit_point, (hit_point - self.center(ray.time)) / self.radius, self.material.clone(), get_sphere_uv(hit_point - self.center(ray.time) / self.radius), )) } else { None } } else { None } } fn bounding_box(&self, t0: f32, t1: f32) -> Option<AABB> { let box0 = AABB::new( self.center(t0) - vec3!(self.radius, self.radius, self.radius), self.center(t0) + vec3!(self.radius, self.radius, self.radius), ); let box1 = AABB::new( self.center(t1) - vec3!(self.radius, self.radius, self.radius), self.center(t1) + vec3!(self.radius, self.radius, self.radius), ); Some(AABB::surrounding_box(box0, box1)) } fn box_clone(&self) -> Box<dyn Hittable> { Box::new(self.clone()) } }
use crate::segment::{Segment, Start}; use core::{cmp::Ordering, fmt::Debug, ops::Bound}; /// Wrapper type for items the map (range should only ever be increasing) #[derive(Clone)] pub(crate) struct Key<T>(pub(crate) Segment<T>); impl<T: Clone> Key<&T> { pub(crate) fn cloned(&self) -> Key<T> { Key(self.0.cloned()) } } impl<T: Copy> Copy for Key<T> {} impl<T: Debug> Debug for Key<T> { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "{:?}", self.0) } } impl<T> core::borrow::Borrow<Start<T>> for Key<T> { fn borrow(&self) -> &Start<T> { &self.0.start } } // impl<T> core::borrow::Borrow<Start<&T>> for Key<T> { // fn borrow(&self) -> &Start<&T> { // &self.0.start.as_ref() // } // } // impl<'a, T> core::borrow::Borrow<Start<&'a T>> for Key<T> { // fn borrow(&self) -> &Start<&'a T> { // match &self.0.start.0 { // Bound::Included(x) => Bound::Included(x), // Bound::Excluded(x) => Bound::Excluded(x), // Bound::Unbounded => Bound::Unbounded, // } // } // } impl<T> core::borrow::Borrow<Bound<T>> for Key<T> { fn borrow(&self) -> &Bound<T> { &self.0.start.0 } } // impl<'a, T> core::borrow::Borrow<Bound<&'a T>> for Key<&'a T> { // fn borrow(&self) -> &Bound<&'a T> { // // self.0.start.as_bound_inner_ref(); // &self.0.start.0 // // match &self.0.start.0 { // // Bound::Included(x) => &Bound::Included(x), // // Bound::Excluded(x) => &Bound::Excluded(x), // // Bound::Unbounded => &Bound::Unbounded, // // } // } // } impl<T: PartialEq> PartialEq for Key<T> { fn eq(&self, other: &Key<T>) -> bool { self.0.start == other.0.start } } impl<T: PartialEq> PartialEq<Bound<T>> for Key<T> { fn eq(&self, other: &Bound<T>) -> bool { self.0.start.0.eq(other) } } impl<T: PartialEq> PartialEq<T> for Key<T> { fn eq(&self, other: &T) -> bool { if let Bound::Included(start) = &self.0.start.0 { start == other } else { false } } } impl<T: Eq> Eq for Key<T> {} impl<T: Ord> Ord for Key<T> { fn cmp(&self, other: &Key<T>) -> Ordering { self.0.start.cmp(&other.0.start) } } impl<T> PartialOrd for Key<T> where T: Ord, { fn partial_cmp(&self, other: &Key<T>) -> Option<Ordering> { Some(self.cmp(other)) } }
use super::memory::{ MemoryMap, Version }; use super::InfocomError; use super::state::FrameStack; use super::object_table::ObjectTable; use super::text::{ Decoder, Encoder }; use super::interface::{ Interface, StatusLineFormat }; use super::dictionary::Dictionary; use log::debug; use serde::{ Serialize }; use std::collections::HashSet; use std::iter::FromIterator; #[derive(Debug, Serialize)] enum OpcodeForm { Long, Short, Extended, Variable } impl From<u8> for OpcodeForm { fn from(opcode: u8) -> OpcodeForm { if opcode == 0xBE { OpcodeForm::Extended } else { match (opcode >> 6) & 0x3 { 2 => OpcodeForm::Short, 3 => OpcodeForm::Variable, _ => OpcodeForm::Long } } } } #[derive(Copy, Clone, Debug, Serialize)] enum OperandType { LargeConstant, SmallConstant, Variable, Omitted } impl From<u8> for OperandType { fn from(v: u8) -> OperandType { match v & 0x3 { 0 => OperandType::LargeConstant, 1 => OperandType::SmallConstant, 2 => OperandType::Variable, _ => OperandType::Omitted } } } #[derive(Serialize)] pub struct Instruction { address: usize, form: OpcodeForm, opcode: u8, name: String, operand_types: Vec<OperandType>, operands: Vec<u16>, store_variable: Option<u8>, branch_offset: Option<BranchOffset>, next_pc: usize } use std::fmt; fn format_variable(operand: u8) -> String { match operand { 0 => String::from("(SP)"), 1..=15 => format!("L{:02x}", operand - 1), _ => format!("G{:02x}", operand - 16) } } impl fmt::Debug for Instruction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let store = match self.store_variable { Some(v) => format!(" S:{}", format_variable(v)), _ => String::new() }; let branch = match &self.branch_offset { Some(b) => match b.return_value { Some(v) => format!(" B: {} -> RET {}", b.condition, v), _ => format!(" B:[{}]->${:06x}", b.condition, b.address.unwrap()) }, _ => String::new() }; let mut args = String::new(); for i in 0..self.operands.len() { if i > 0 { args.push_str(","); } match self.operand_types[i] { OperandType::SmallConstant => args.push_str(&format!("#{:02x}", self.operands[i])), OperandType::LargeConstant => args.push_str(&format!("#{:04x}", self.operands[i])), OperandType::Variable => args.push_str(&format_variable(self.operands[i] as u8)), _ => {} } } f.write_fmt(format_args!("${:06x}: {} {} {}{}", self.address, self.name, args, store, branch)) } } #[derive(Default, Serialize)] pub struct InstructionResult { store_value: Option<u16>, branch_condition: Option<bool>, next_pc: Option<usize> } impl fmt::Debug for InstructionResult { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let store_value = match self.store_value { Some(v) => format!(" S=>${:04x}", v), _ => String::new() }; let branch_condition = match self.branch_condition { Some(b) => format!(" B=>{}", b), _ => String::new() }; f.write_fmt(format_args!("{}{}", store_value, branch_condition)) } } impl Instruction { fn get_argument(&self, state: &mut FrameStack, index: usize) -> Result<u16,InfocomError> { Ok(match self.operand_types[index] { OperandType::SmallConstant => self.operands[index] & 0xFF, OperandType::LargeConstant => self.operands[index], OperandType::Variable => { let var = (self.operands[index] & 0xFF) as u8; state.get_variable(var, false)? }, OperandType::Omitted => return Err(InfocomError::Memory(format!("Operand with type 'Omitted'"))) }) } fn get_indirect_variable_reference(&self, state: &mut FrameStack, index: usize) -> Result<u8,InfocomError> { debug!("indirect reference: {:?} ${:02x}", self.operand_types[index], self.operands[index]); Ok(match self.operand_types[index] { OperandType::SmallConstant | OperandType::LargeConstant => (self.operands[index] & 0xFF) as u8, OperandType::Variable => state.get_variable((self.operands[index] & 0xFF) as u8, true)? as u8, OperandType::Omitted => return Err(InfocomError::Memory(format!("Operand with type 'Omitted'"))) }) } // 2OP fn je(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let a = self.get_argument(state, 0)?; for i in 1..self.operands.len() { let b = self.get_argument(state, i)?; debug!("JE: ${:04x} ${:04x}", a, b); if a == b { return Ok(InstructionResult { branch_condition: Some(true), ..Default::default() }) } } return Ok(InstructionResult { branch_condition: Some(false), ..Default::default() }) } fn jg(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let a = self.get_argument(state, 0)? as i16; for i in 1..self.operands.len() { let b = self.get_argument(state, i)? as i16; if a <= b { return Ok(InstructionResult { branch_condition: Some(false), ..Default::default() }) } } return Ok(InstructionResult { branch_condition: Some(true), ..Default::default() }) } fn jl(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let a = self.get_argument(state, 0)? as i16; for i in 1..self.operands.len() { let b = self.get_argument(state, i)? as i16; if a >= b { return Ok(InstructionResult { branch_condition: Some(false), ..Default::default() }) } } return Ok(InstructionResult { branch_condition: Some(true), ..Default::default() }) } fn dec_chk(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let var = self.get_indirect_variable_reference(state, 0)?; let var_value = state.get_variable(var, true)? as i16 - 1; state.set_variable(var, var_value as u16, true)?; let value = self.get_argument(state, 1)? as i16; Ok(InstructionResult { branch_condition: Some(var_value < value), ..Default::default() }) } fn inc_chk(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let var = self.get_indirect_variable_reference(state, 0)?; let var_value = state.get_variable(var, true)? as i16 + 1; state.set_variable(var, var_value as u16, true)?; let value = self.get_argument(state, 1)? as i16; Ok(InstructionResult { branch_condition: Some(var_value > value), ..Default::default() }) } fn jin(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let a = self.get_argument(state, 0)? as usize; let b = self.get_argument(state, 1)?; let ot = ObjectTable::new(&state.get_memory())?; let o = ot.get_object(&state.get_memory(), a)?; Ok(InstructionResult { branch_condition: Some(o.get_parent() == b), ..Default::default() }) } fn test(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let bitmap = self.get_argument(state, 0)?; let flags = self.get_argument(state, 1)?; Ok(InstructionResult { branch_condition: Some(bitmap & flags == flags), ..Default::default() }) } fn or(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let mut r:u16 = self.get_argument(state, 0)?; for i in 1..self.operands.len() { r |= self.get_argument(state, i)?; } Ok(InstructionResult { store_value: Some(r), ..Default::default() }) } fn and(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let mut r:u16 = self.get_argument(state, 0)?; for i in 1..self.operands.len() { r &= self.get_argument(state, i)?; } Ok(InstructionResult { store_value: Some(r), ..Default::default() }) } fn test_attr(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)?; let attr = self.get_argument(state, 1)?; let ot = ObjectTable::new(&state.get_memory())?; Ok(InstructionResult { branch_condition: Some(ot.has_attribute(state.get_memory(), object as usize, attr as usize)?), ..Default::default() }) } fn set_attr(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)?; let attr = self.get_argument(state, 1)?; let mut ot = ObjectTable::new(&state.get_memory())?; ot.set_attribute(state, object as usize, attr as usize)?; Ok(InstructionResult::default()) } fn clear_attr(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)?; let attr = self.get_argument(state, 1)?; let mut ot = ObjectTable::new(&state.get_memory())?; ot.clear_attribute(state, object as usize, attr as usize)?; Ok(InstructionResult::default()) } fn store(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let var = self.get_indirect_variable_reference(state, 0)?; let value = self.get_argument(state, 1)?; state.set_variable(var, value, false)?; Ok(InstructionResult::default()) } fn insert_obj(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)?; let destination = self.get_argument(state, 1)?; let mut ot = ObjectTable::new(state.get_memory())?; ot.insert_object(state, object as usize, destination as usize)?; Ok(InstructionResult::default()) } fn loadw(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let array = self.get_argument(state, 0)?; let index = self.get_argument(state, 1)?; let value = state.get_memory().get_word(array as usize + (index as usize * 2))?; Ok(InstructionResult { store_value: Some(value), ..Default::default() }) } fn loadb(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let array = self.get_argument(state, 0)? as usize; let index = self.get_argument(state, 1)? as usize; let value = state.get_memory().get_byte(array + index)?; Ok(InstructionResult { store_value: Some(value as u16), ..Default::default() }) } fn get_prop(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)? as usize; let property = self.get_argument(state, 1)? as usize; let ot = ObjectTable::new(state.get_memory())?; let value = ot.get_property_value(state.get_memory(), object, property)?; Ok(InstructionResult { store_value: Some(value), ..Default::default() }) } fn get_prop_addr(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)? as usize; let property = self.get_argument(state, 1)? as usize; let ot = ObjectTable::new(state.get_memory())?; let value = ot.get_property_address(state.get_memory(), object, property)?; Ok(InstructionResult { store_value: Some(value as u16), ..Default::default() }) } fn get_next_prop(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)? as usize; let property = self.get_argument(state, 1)? as usize; let ot = ObjectTable::new(state.get_memory())?; let value = ot.get_next_property(state.get_memory(), object, property)?; Ok(InstructionResult { store_value: Some(value as u16), ..Default::default() }) } fn add(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let mut result:i16 = 0; for i in 0..self.operands.len() { let arg = self.get_argument(state, i)?; debug!("Add ${:04x} to ${:04x}", arg, result); result = result + arg as i16; } Ok(InstructionResult { store_value: Some(result as u16), ..Default::default() }) } fn sub(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let mut result:i16 = self.get_argument(state, 0)? as i16; for i in 1..self.operands.len() { let arg = self.get_argument(state, i)?; debug!("Sub ${:04x} from ${:04x}", arg, result); result = result - arg as i16; } Ok(InstructionResult { store_value: Some(result as u16), ..Default::default() }) } fn mul(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let mut result:i16 = self.get_argument(state, 0)? as i16; for i in 1..self.operands.len() { let arg = self.get_argument(state, i)?; debug!("Mul ${:04x} by ${:04x}", arg, result); result = result * arg as i16; } Ok(InstructionResult { store_value: Some(result as u16), ..Default::default() }) } fn div(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let mut result:i16 = self.get_argument(state, 0)? as i16; for i in 1..self.operands.len() { let arg = self.get_argument(state, i)?; if arg == 0 { return Err(InfocomError::Memory(format!("Division by zero"))); } debug!("Div ${:04x} by ${:04x}", arg, result); result = result / arg as i16; } Ok(InstructionResult { store_value: Some(result as u16), ..Default::default() }) } fn modulo(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let mut result:i16 = self.get_argument(state, 0)? as i16; for i in 1..self.operands.len() { let arg = self.get_argument(state, i)?; if arg == 0 { return Err(InfocomError::Memory(format!("Modulo by zero"))); } debug!("Mod ${:04x} by ${:04x}", arg, result); result = result.rem_euclid(arg as i16); } Ok(InstructionResult { store_value: Some(result as u16), ..Default::default() }) } fn call_2s(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let routine = self.get_argument(state, 0)?; let arg = self.get_argument(state, 1)?; let next_pc = state.call(routine, vec![arg], self.store_variable, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn call_2n(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let routine = self.get_argument(state, 0)?; let arg = self.get_argument(state, 1)?; let next_pc = state.call(routine, vec![arg], None, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn set_colour(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("set_colour not implemented yet"))) } fn throw(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("throw not implemented yet"))) } // 1OP fn jz(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let a = self.get_argument(state, 0)?; Ok(InstructionResult { branch_condition: Some(a == 0), ..Default::default() }) } fn get_sibling(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)?; let ot = ObjectTable::new(state.get_memory())?; let o = ot.get_object(state.get_memory(), object as usize)?; Ok(InstructionResult { store_value: Some(o.get_sibling()), branch_condition: Some(o.get_sibling() != 0), ..Default::default() }) } fn get_child(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)?; let ot = ObjectTable::new(state.get_memory())?; let o = ot.get_object(state.get_memory(), object as usize)?; Ok(InstructionResult { store_value: Some(o.get_child()), branch_condition: Some(o.get_child() != 0), ..Default::default() }) } fn get_parent(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)?; let ot = ObjectTable::new(state.get_memory())?; let o = ot.get_object(state.get_memory(), object as usize)?; Ok(InstructionResult { store_value: Some(o.get_parent()), ..Default::default() }) } fn get_prop_len(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)? as usize; let ot = ObjectTable::new(state.get_memory())?; Ok(InstructionResult { store_value: Some(ot.get_property_len(state.get_memory(), object)? as u16), ..Default::default() }) } fn inc(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let var = self.get_indirect_variable_reference(state, 0)?; let var_value = state.get_variable(var, true)? as i16 + 1; state.set_variable(var, var_value as u16, true)?; Ok(InstructionResult::default()) } fn dec(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let var = self.get_indirect_variable_reference(state, 0)?; let var_value = state.get_variable(var, true)? as i16 - 1; state.set_variable(var, var_value as u16, true)?; Ok(InstructionResult::default()) } fn print_addr(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let addr = self.get_argument(state, 0)? as usize; let decoder = Decoder::new(state.get_memory())?; let string = decoder.decode(addr)?; print!("{}", string); Ok(InstructionResult::default()) } fn call_1s(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let routine = self.get_argument(state, 0)?; let next_pc = state.call(routine, vec![], self.store_variable, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn remove_obj(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)? as usize; let mut ot = ObjectTable::new(state.get_memory())?; ot.remove_object(state, object)?; Ok(InstructionResult::default()) } fn print_obj(&self, state: &mut FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)? as usize; let ot = ObjectTable::new(state.get_memory())?; let o = ot.get_object(&mut state.get_memory(), object)?; interface.print(&o.get_short_name()); Ok(InstructionResult::default()) } fn ret(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let value = self.get_argument(state, 0)?; let next_pc = state.return_from(value)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn jump(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let label = self.get_argument(state, 0)? as i16; let address = self.next_pc as isize + label as isize - 2; debug!("JUMP: {} -> {}", label, self.next_pc); Ok(InstructionResult { next_pc: Some(address as usize), ..Default::default() }) } fn print_paddr(&self, state: &mut FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { let packed_address = self.get_argument(state, 0)?; let address = state.unpack_address(packed_address)?; let decoder = Decoder::new(state.get_memory())?; let string = decoder.decode(address)?; interface.print(&string); Ok(InstructionResult::default()) } fn load(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let var = self.get_indirect_variable_reference(state, 0)?; let var_value = state.get_variable(var, true)?; Ok(InstructionResult { store_value: Some(var_value), ..Default::default() }) } // Also VAR:18 for version 5+ fn not(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let value = self.get_argument(state, 0)?; let result = !value; Ok(InstructionResult { store_value: Some(result), ..Default::default() }) } fn call_1n(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let routine = self.get_argument(state, 0)?; let next_pc = state.call(routine, vec![], None, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } // 0OP fn rtrue(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let next_pc = state.return_from(1)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn rfalse(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let next_pc = state.return_from(0)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn print(&self, state: &FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { let address = self.address + 1; let decoder = Decoder::new(state.get_memory())?; let string = decoder.decode(address)?; interface.print(&string); Ok(InstructionResult::default()) } fn print_ret(&self, state: &mut FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { let address = self.address + 1; let decoder = Decoder::new(state.get_memory())?; let string = decoder.decode(address)?; interface.print(&string); interface.new_line(); let next_pc = state.return_from(1)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn nop(&self, state: &FrameStack) -> Result<InstructionResult,InfocomError> { debug!("NOP"); Ok(InstructionResult::default()) } fn save_v1(&self, state: &FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("save_v1 not implemented yet"))) } fn save_v4(&self, state: &FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("save_v4 not implemented yet"))) } fn restore_v1(&self, state: &FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("restore_v1 not implemented yet"))) } fn restore_v4(&self, state: &FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("restore_v4 not implemented yet"))) } fn restart(&self, state: &FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("restart not implemented yet"))) } fn ret_popped(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let value = state.get_variable(0, false)?; let next_pc = state.return_from(value)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn pop(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { state.get_variable(0, false)?; Ok(InstructionResult::default()) } fn catch(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("catch not implemented yet"))) } fn quit(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("quit not implemented yet"))) } fn new_line(&self, state: &mut FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { interface.new_line(); Ok(InstructionResult::default()) } fn show_status(&self, state: &mut FrameStack, interface: &mut Interface) -> Result<InstructionResult,InfocomError> { let v1 = state.get_variable(17, false)? as i16; let v2 = state.get_variable(18, false)?; let name_obj = state.get_variable(16, false)? as usize; let o = ObjectTable::new(state.get_memory())?.get_object(state.get_memory(), name_obj)?; let status_type = match state.get_memory().version { Version::V(3) => { let flags1 = state.get_memory().get_byte(0x01)?; if flags1 & 0x02 == 0x02 { StatusLineFormat::TIMED } else { StatusLineFormat::SCORED } }, _ => StatusLineFormat::SCORED }; interface.status_line(&o.get_short_name(), status_type, v1, v2); Ok(InstructionResult::default()) } fn verify(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("verify not implemented yet"))) } fn piracy(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { debug!("PIRACY: {:?}", self.branch_offset.as_ref().unwrap()); Ok(InstructionResult { branch_condition: Some(self.branch_offset.as_ref().unwrap().condition), ..Default::default() }) } // VAR fn call(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let packed_address = self.get_argument(state, 0)?; let mut args:Vec<u16> = Vec::new(); for i in 1..self.operands.len() { args.push(self.get_argument(state, i)?); } let next_pc = state.call(packed_address, args, self.store_variable, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn storew(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let array = self.get_argument(state, 0)? as usize; let word_index = self.get_argument(state, 1)? as usize; let value = self.get_argument(state, 2)?; state.set_word(array + (2 * word_index), value)?; Ok(InstructionResult::default()) } fn storeb(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let array = self.get_argument(state, 0)? as usize; let byte_index = self.get_argument(state, 1)? as usize; let value = (self.get_argument(state, 2)? as u8) & 0xFF; state.set_byte(array + byte_index, value)?; Ok(InstructionResult::default()) } fn put_prop(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let object = self.get_argument(state, 0)? as usize; let property = self.get_argument(state, 1)? as usize; let value = self.get_argument(state, 2)?; let mut ot = ObjectTable::new(state.get_memory())?; ot.put_property_data(state, object, property, value)?; Ok(InstructionResult::default()) } fn sread_v1(&self, state: &mut FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { self.show_status(state, interface)?; // let v2 = state.get_variable(18, false)?; // let name_obj = state.get_variable(16, false)? as usize; // let o = ObjectTable::new(state.get_memory())?.get_object(state.get_memory(), name_obj)?; // let status_type = match state.get_memory().version { // Version::V(3) => { // let flags1 = state.get_memory().get_byte(0x01)?; // if flags1 & 0x02 == 0x02 { // StatusLineFormat::TIMED // } else { // StatusLineFormat::SCORED // } // }, // _ => StatusLineFormat::SCORED // }; // interface.status_line(&o.get_short_name(), status_type, v1, v2); let text_buffer = self.get_argument(state, 0)? as usize; let parse_buffer = self.get_argument(state, 1)? as usize; let max_chars = state.get_memory().get_byte(text_buffer)? as usize - 1; debug!("Text buffer: ${:04x} for ${:02x} bytes", text_buffer, max_chars); let mut input = interface.read(HashSet::from_iter(vec!['\n', '\r']), max_chars); // Remove the terminating character from the buffer... let terminator = input.pop(); debug!("Input: {}", input); let encoder = Encoder::new(state.get_memory())?; let mut input_bytes = encoder.to_bytes(&input); // ...and replace it with a 0 byte input_bytes.push(0); // Byte 1 of the buffer is the number of characters read state.set_byte(text_buffer + 1, input.len() as u8)?; // Byte 2 onward is the text with a '\0' terminator. for (i, c) in input_bytes.iter().enumerate() { state.set_byte(text_buffer + i + 2, *c)?; } let max_words = state.get_memory().get_byte(parse_buffer)?; debug!("Parse buffer: ${:04x} for ${:02x} words", parse_buffer, max_words); let dic = Dictionary::new(state.get_memory())?; dic.analyze_text(state, &input, parse_buffer)?; // state.set_byte(parse_buffer + 1, 1)?; // state.set_word(parse_buffer + 2, 0)?; // state.set_byte(parse_buffer + 4, input.len() as u8)?; // state.set_byte(parse_buffer + 5, 1)?; // // TODO: Parse words Ok(InstructionResult::default()) } fn sread_v4(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("sread not implemented yet"))) } fn aread(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("aread not implemented yet"))) } fn print_char(&self, state: &mut FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { let z = self.get_argument(state, 0)?; let d = Decoder::new(state.get_memory())?; interface.print(&format!("{}", d.zscii_to_char(z)?)); Ok(InstructionResult::default()) } fn print_num(&self, state: &mut FrameStack, interface: &mut dyn Interface) -> Result<InstructionResult,InfocomError> { let value = self.get_argument(state, 0)? as i16; interface.print(&format!("{}", value)); Ok(InstructionResult::default()) } fn random(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let range = self.get_argument(state, 0)?; let value = state.random(range)?; Ok(InstructionResult { store_value: Some(value), ..Default::default() }) } fn push(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let value = self.get_argument(state, 0)?; state.current_frame.push(value); Ok(InstructionResult::default()) } fn pull(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let variable = self.get_indirect_variable_reference(state, 0)?; let value = state.current_frame.pop()?; state.set_variable(variable, value, false)?; Ok(InstructionResult::default()) } fn split_window(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("split_window not implemented yet"))) } fn set_window(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("set_window not implemented yet"))) } fn call_vs2(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let packed_address = self.get_argument(state, 0)?; let mut args:Vec<u16> = Vec::new(); for i in 1..self.operands.len() { args.push(self.get_argument(state, i)?); } let next_pc = state.call(packed_address, args, self.store_variable, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn erase_window(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("erase_window not implemented yet"))) } fn erase_line(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("erase_line not implemented yet"))) } fn set_cursor(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("set_cursor not implemented yet"))) } fn get_cursor(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("get_cursor not implemented yet"))) } fn set_text_style(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("set_text_style not implemented yet"))) } fn buffer_mode(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("buffer_mode not implemented yet"))) } fn output_stream(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("output_stream not implemented yet"))) } fn input_stream(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("input_stream not implemented yet"))) } fn sound_effect(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("sound_effect not implemented yet"))) } fn read_char(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("read_char not implemented yet"))) } fn scan_table(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("scan_table not implemented yet"))) } fn call_vn(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let packed_address = self.get_argument(state, 0)?; let mut args:Vec<u16> = Vec::new(); for i in 1..self.operands.len() { args.push(self.get_argument(state, i)?); } let next_pc = state.call(packed_address, args, None, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn call_vn2(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { let packed_address = self.get_argument(state, 0)?; let mut args:Vec<u16> = Vec::new(); for i in 1..self.operands.len() { args.push(self.get_argument(state, i)?); } let next_pc = state.call(packed_address, args, None, self.next_pc)?; Ok(InstructionResult { next_pc: Some(next_pc), ..Default::default() }) } fn tokenise(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("tokenise not implemented yet"))) } fn encode_text(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("encode_text not implemented yet"))) } fn copy_table(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("copy_table not implemented yet"))) } fn print_table(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("print_table not implemented yet"))) } fn check_arg_count(&self, state: &mut FrameStack) -> Result<InstructionResult,InfocomError> { Err(InfocomError::Memory(format!("check_arg_count not implemented yet"))) } pub fn execute<T>(&mut self, state: &mut FrameStack, interface: &mut T) -> Result<usize,InfocomError> where T: Interface { debug!("{:?}", self); let result = match state.get_memory().version { Version::V(3) => { if self.opcode < 0x80 || (self.opcode > 0xBf && self.opcode < 0xE0) { match self.opcode & 0x1F { 0x01 => self.je(state), 0x02 => self.jl(state), 0x03 => self.jg(state), 0x04 => self.dec_chk(state), 0x05 => self.inc_chk(state), 0x06 => self.jin(state), 0x07 => self.test(state), 0x08 => self.or(state), 0x09 => self.and(state), 0x0A => self.test_attr(state), 0x0B => self.set_attr(state), 0x0C => self.clear_attr(state), 0x0D => self.store(state), 0x0E => self.insert_obj(state), 0x0F => self.loadw(state), 0x10 => self.loadb(state), 0x11 => self.get_prop(state), 0x12 => self.get_prop_addr(state), 0x13 => self.get_next_prop(state), 0x14 => self.add(state), 0x15 => self.sub(state), 0x16 => self.mul(state), 0x17 => self.div(state), 0x18 => self.modulo(state), _ => Err(InfocomError::Memory(format!("Unimplemented opcode ${:02x}", self.opcode))) } } else if self.opcode > 0x7F && self.opcode < 0xB0 { match self.opcode & 0xF { 0x00 => self.jz(state), 0x01 => self.get_sibling(state), 0x02 => self.get_child(state), 0x03 => self.get_parent(state), 0x04 => self.get_prop_len(state), 0x05 => self.inc(state), 0x06 => self.dec(state), 0x07 => self.print_addr(state), 0x09 => self.remove_obj(state), 0x0A => self.print_obj(state, interface), 0x0B => self.ret(state), 0x0C => self.jump(state), 0x0D => self.print_paddr(state, interface), 0x0E => self.load(state), 0x0F => self.not(state), _ => Err(InfocomError::Memory(format!("Unimplemented opcode ${:02x}", self.opcode))) } } else if self.opcode > 0xAF && self.opcode < 0xC0 { match self.opcode & 0xF { 0x00 => self.rtrue(state), 0x01 => self.rfalse(state), 0x02 => self.print(state, interface), 0x03 => self.print_ret(state, interface), 0x04 => self.nop(state), 0x05 => self.save_v1(state), 0x06 => self.restore_v1(state), 0x07 => self.restart(state), 0x08 => self.ret_popped(state), 0x09 => self.pop(state), 0x0A => self.quit(state), 0x0B => self.new_line(state, interface), 0x0C => self.show_status(state, interface), 0x0D => self.verify(state), _ => Err(InfocomError::Memory(format!("Unimplemented opcode ${:02x}", self.opcode))) } } else { match self.opcode & 0x1F { 0x00 => self.call(state), 0x01 => self.storew(state), 0x02 => self.storeb(state), 0x03 => self.put_prop(state), 0x04 => self.sread_v1(state, interface), 0x05 => self.print_char(state, interface), 0x06 => self.print_num(state, interface), 0x07 => self.random(state), 0x08 => self.push(state), 0x09 => self.pull(state), 0x0A => self.split_window(state), 0x0B => self.set_window(state), 0x13 => self.output_stream(state), 0x14 => self.input_stream(state), 0x15 => self.sound_effect(state), _ => Err(InfocomError::Memory(format!("Unimplemented opcode ${:02x}", self.opcode))) } } }, _ => Err(InfocomError::Memory(format!("Unimplemented verison {:?}", state.get_memory().version))) }?; match result.store_value { Some(_) => debug!("{:?}", result), _ => match result.branch_condition { Some(_) => debug!("{:?}", result), _ => {} } } // CALL instructions have a store_variable, but do not store a result if let Some(var) = self.store_variable { if let Some(store_value) = result.store_value { state.set_variable(var, store_value, false)?; } } if let Some(offset) = &self.branch_offset { if result.branch_condition.unwrap() == offset.condition { if let Some(ret) = offset.return_value { return state.return_from(ret as u16) } return Ok(offset.address.unwrap()) } } if let Some(next_pc) = result.next_pc { Ok(next_pc) } else { Ok(self.next_pc) } } } fn read_byte(mem: &Vec<u8>, address: usize) -> u8 { mem[address] } fn read_word(mem: &Vec<u8>, address: usize) -> u16 { let high = mem[address]; let low = mem[address + 1]; (((high as u16) << 8) & 0xFF00) | (low as u16 & 0xFF) } fn get_store_variable(mem: &Vec<u8>, address: usize, opcode: u8, form: &OpcodeForm) -> Option<u8> { match form { OpcodeForm::Extended => { match opcode { 1 | 2 | 3 | 4 | 9 | 10 | 19 | 29 => { Some(read_byte(mem, address)) }, _ => None } }, _ => match opcode { // Long 2OP, Variable 2OP 0x00..=0x7F | 0xC0..=0xDF => { match opcode & 0x1F { 8 | 9 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 => { Some(read_byte(mem, address)) } _ => None } }, // Short 1OP 0x80..=0xAF => { match opcode & 0xF { 1 | 2 | 3 | 4| 8 | 14 => { Some(read_byte(mem, address)) }, 15 => if read_byte(mem, 0) < 5 { Some(read_byte(mem, address)) } else { None } _ => None, } }, // Short 0OP 0xB0..=0xBF => { match opcode & 0xF { 5 | 6 => if read_byte(mem, 0) == 4 { Some(read_byte(mem, address)) } else { None }, 9 => if read_byte(mem, 0) > 4 { Some(read_byte(mem, address)) } else { None }, _ => None, } }, // Variable VAR 0xE0..=0xFF => { match opcode & 0x1F { 0 | 7 | 12 | 22 | 23 | 24 => { Some(read_byte(mem, address)) }, 4 => if read_byte(mem, 0) > 4 { Some(read_byte(mem, address)) } else { None }, 9 => if read_byte(mem, 0) == 6 { Some(read_byte(mem, address)) } else { None }, _ => None } } } } } #[derive(Debug, Serialize)] struct BranchOffset { size: usize, condition: bool, return_value: Option<u8>, address: Option<usize>, } fn decode_branch_offset(mem: &Vec<u8>, address: usize) -> BranchOffset { let b1 = read_byte(mem, address); let condition = b1 & 0x80 == 0x80; if b1 & 0x40 == 0x40 { let offset = b1 & 0x3F; match offset { 0 => BranchOffset { size: 1, condition, return_value: Some(0), address: None }, 1 => BranchOffset { size: 1, condition, return_value: Some(1), address: None }, _ => BranchOffset { size: 1, condition, return_value: None, address: Some((address as isize + offset as isize - 1) as usize) } } } else { let mut high = b1 & 0x3F; if high & 0x20 == 0x20 { high |= 0xC0; } let low = read_byte(mem, address + 1); let offset:i16 = ((((high as u16) << 8) & 0xFF00) | (low as u16 & 0xFF)) as i16; match offset { 0 => BranchOffset { size: 2, condition, return_value: Some(0), address: None }, 1 => BranchOffset { size: 2, condition, return_value: Some(1), address: None }, _ => BranchOffset { size: 2, condition, return_value: None, address: Some((address as isize + offset as isize) as usize) } } } } fn get_branch_offset(mem: &Vec<u8>, address: usize, opcode: u8, form: &OpcodeForm) -> Option<BranchOffset> { match form { OpcodeForm::Extended => { match opcode { 6 | 24 | 27 => { Some(decode_branch_offset(mem, address)) }, _ => None } }, _ => match opcode { // Long 2OP, Variable 2OP 0x00..=0x7F | 0xC0..=0xDF => { match opcode & 0x1F { 1 | 2 | 3 | 4 | 5 | 6 | 7 | 10 => { Some(decode_branch_offset(mem, address)) }, _ => None } }, // Short 1OP 0x80..=0xAF => { match opcode & 0xF { 0 | 1 | 2 => { Some(decode_branch_offset(mem, address)) }, _ => None, } }, // Short 0OP 0xB0..=0xBF => { match opcode & 0xF { 13 | 15 => { Some(decode_branch_offset(mem, address)) }, 5 | 6 => if read_byte(mem, 0) < 4 { { Some(decode_branch_offset(mem, address)) } } else { None }, _ => None, } }, // Variable VAR 0xE0..=0xFF => { match opcode & 0x1F { 17 | 31 => { Some(decode_branch_offset(mem, address)) }, _ => None } } } } } fn get_literal_string(mem: &Vec<u8>, address: usize, opcode: u8, form: &OpcodeForm) -> Option<usize> { match form { OpcodeForm::Extended => None, _ => match opcode { 0xB2 | 0xB3 => { let mut size = 0; loop { let v = read_word(mem, address + size); size += 2; if v & 0x8000 == 0x8000 { break; } } Some(size) }, _ => None } } } pub fn decode_instruction(state: &FrameStack, address: usize) -> Result<Instruction, InfocomError> { let mem = state.get_memory().get_memory(); let mut opcode_byte = read_byte(&mem, address); let mut ext_opcode:Option<u8> = None; let form = OpcodeForm::from(opcode_byte); let mut operand_types:Vec<OperandType> = Vec::new(); let mut operands:Vec<u16> = Vec::new(); let mut skip = 1; match form { OpcodeForm::Long => { if opcode_byte & 0x40 == 0x40 { operand_types.push(OperandType::Variable); } else { operand_types.push(OperandType::SmallConstant); } if opcode_byte & 0x20 == 0x20 { operand_types.push(OperandType::Variable); } else { operand_types.push(OperandType::SmallConstant); } }, OpcodeForm::Short => { let ot = OperandType::from(opcode_byte >> 4); match ot { OperandType::Omitted => {}, _ => operand_types.push(ot) } }, OpcodeForm::Variable => { let types_1 = read_byte(&mem, address + 1); let oc = opcode_byte & 0x1F; // First operand type byte for i in 0..4 { let t = types_1 >> (6 - (i * 2)); let ot = OperandType::from(t); match ot { OperandType::Omitted => break, _ => operand_types.push(ot) } } skip += 1; // Optional second operand type byte if oc == 12 || oc == 26 { let types_2 = read_byte(&mem, address + 2); for i in 0..4 { let t = types_2 >> (6 - (i * 2)); let ot = OperandType::from(t); match ot { OperandType::Omitted => break, _ => operand_types.push(ot) } } skip += 1; } }, OpcodeForm::Extended => { ext_opcode = Some(read_byte(&mem, address + 1)); let types_1 = read_byte(&mem, address + 2); for i in 0..4 { let t = types_1 >> (6 - (i * 2)); let ot = OperandType::from(t); match ot { OperandType::Omitted => break, _ => operand_types.push(ot) } } skip += 2; } }; for operand_type in &operand_types { match operand_type { OperandType::SmallConstant | OperandType::Variable => { let v = read_byte(&mem, address + skip); operands.push(v as u16); skip += 1 }, OperandType::LargeConstant => { let v = read_word(&mem, address + skip); operands.push(v); skip += 2 }, OperandType::Omitted => { break } } } let store_variable = get_store_variable(&mem, address + skip, opcode_byte, &form); if let Some(_) = store_variable { skip = skip + 1; } let branch_offset = get_branch_offset(&mem, address + skip, opcode_byte, &form); if let Some(b) = &branch_offset { skip += b.size; } if let Some(l) = get_literal_string(&mem, address + skip, opcode_byte, &form) { skip += l; } let name = match opcode_byte { 0x01 | 0x21 | 0x41 | 0x61 | 0xC1 => String::from("je"), 0x02 | 0x22 | 0x42 | 0x62 | 0xC2 => String::from("jl"), 0x03 | 0x23 | 0x43 | 0x63 | 0xC3 => String::from("jg"), 0x04 | 0x24 | 0x44 | 0x64 | 0xC4 => String::from("dec_chk"), 0x05 | 0x25 | 0x45 | 0x65 | 0xC5 => String::from("inc_chk"), 0x06 | 0x26 | 0x46 | 0x66 | 0xC6 => String::from("jin"), 0x07 | 0x27 | 0x47 | 0x67 | 0xC7 => String::from("test"), 0x08 | 0x28 | 0x48 | 0x68 | 0xC8 => String::from("or"), 0x09 | 0x29 | 0x49 | 0x69 | 0xC9 => String::from("and"), 0x0A | 0x2A | 0x4A | 0x6A | 0xCA => String::from("test_attr"), 0x0B | 0x2B | 0x4B | 0x6B | 0xCB => String::from("set_attr"), 0x0C | 0x2C | 0x4C | 0x6C | 0xCC => String::from("clear_attr"), 0x0D | 0x2D | 0x4D | 0x6D | 0xCD => String::from("store"), 0x0E | 0x2E | 0x4E | 0x6E | 0xCE => String::from("insert_obj"), 0x0F | 0x2F | 0x4F | 0x6F | 0xCF => String::from("loadw"), 0x10 | 0x30 | 0x50 | 0x70 | 0xD0 => String::from("loadb"), 0x11 | 0x31 | 0x51 | 0x71 | 0xD1 => String::from("get_prop"), 0x12 | 0x32 | 0x52 | 0x72 | 0xD2 => String::from("get_prop_addr"), 0x13 | 0x33 | 0x53 | 0x73 | 0xD3 => String::from("get_next_prop"), 0x14 | 0x34 | 0x54 | 0x74 | 0xD4 => String::from("add"), 0x15 | 0x35 | 0x55 | 0x75 | 0xD5 => String::from("sub"), 0x16 | 0x36 | 0x56 | 0x76 | 0xD6 => String::from("mul"), 0x17 | 0x37 | 0x57 | 0x77 | 0xD7 => String::from("div"), 0x18 | 0x38 | 0x58 | 0x78 | 0xD8 => String::from("mod"), 0x19 | 0x39 | 0x59 | 0x79 | 0xD9 => String::from("call_2s"), 0x1A | 0x3A | 0x5A | 0x7A | 0xDA => String::from("call_2n"), 0x1B | 0x3B | 0x5B | 0x7B | 0xDB => String::from("set_colour"), 0x1C | 0x3C | 0x5C | 0x7C | 0xDC => String::from("throw"), 0x80 | 0x90 | 0xA0 => String::from("jz"), 0x81 | 0x91 | 0xA1 => String::from("get_sibling"), 0x82 | 0x92 | 0xA2 => String::from("get_child"), 0x83 | 0x93 | 0xA3 => String::from("get_parent"), 0x84 | 0x94 | 0xA4 => String::from("get_prop_len"), 0x85 | 0x95 | 0xA5 => String::from("inc"), 0x86 | 0x96 | 0xA6 => String::from("dec"), 0x87 | 0x97 | 0xA7 => String::from("print_addr"), 0x88 | 0x98 | 0xA8 => String::from("call_1s"), 0x89 | 0x99 | 0xA9 => String::from("remove_obj"), 0x8A | 0x9A | 0xAA => String::from("print_obj"), 0x8B | 0x9B | 0xAB => String::from("ret"), 0x8C | 0x9C | 0xAC => String::from("jump"), 0x8D | 0x9D | 0xAD => String::from("print_paddr"), 0x8E | 0x9E | 0xAE => String::from("load"), 0x8F | 0x9F | 0xAF => match state.get_memory().version { Version::V(1) | Version::V(2) | Version::V(3) | Version::V(4) => String::from("not"), _ => String::from("call_1n") }, 0xB0 => String::from("rtrue"), 0xB1 => String::from("rfalse"), 0xB2 => String::from("print"), 0xB3 => String::from("print_ret"), 0xB4 => String::from("nop"), 0xB5 => String::from("save"), 0xB6 => String::from("restore"), 0xB7 => String::from("restart"), 0xB8 => String::from("ret_popped"), 0xB9 => String::from("rtrue"), 0xBA => String::from("quit"), 0xBB => String::from("new_line"), 0xBC => String::from("show_status"), 0xBD => String::from("verify"), 0xBE => { let mut s = String::from("EXT "); let default = format!("${:02x}", ext_opcode.unwrap()); s.push_str(match ext_opcode.unwrap() { 0x00 => "save", 0x01 => "restore", 0x02 => "log_shift", 0x03 => "art_shift", 0x04 => "set_font", 0x05 => "draw_picture", 0x06 => "picture_data", 0x07 => "erase_picture", 0x08 => "set_margins", 0x09 => "save_undo", 0x0A => "restore_undo", 0x0B => "print_unicode", 0x0C => "check_unicode", 0x0D => "set_true_colour", 0x10 => "move_window", 0x11 => "window_size", 0x12 => "window_style", 0x13 => "get_wind_prop", 0x14 => "scroll_window", 0x15 => "pop_stack", 0x16 => "read_mouse", 0x17 => "mouse_window", 0x18 => "push_stack", 0x19 => "put_wind_prop", 0x1A => "print_form", 0x1B => "make_menu", 0x1C => "picture_table", 0x1D => "buffer_screen", _ => &default }); s }, 0xBF => String::from("piracy"), 0xE0 => match state.get_memory().version { Version::V(1) | Version::V(2) | Version::V(3) => String::from("call"), _ => String::from("call_vs") }, 0xE1 => String::from("storew"), 0xE2 => String::from("storeb"), 0xE3 => String::from("put"), 0xE4 => match state.get_memory().version { Version::V(v) => { match v { 1..=4 => String::from("sread"), _ => String::from("aread") } } }, 0xE5 => String::from("print_char"), 0xE6 => String::from("print_num"), 0xE7 => String::from("random"), 0xE8 => String::from("push"), 0xE9 => String::from("pull"), 0xEA => String::from("split_window"), 0xEB => String::from("set_window"), 0xEC => String::from("call_vs2"), 0xED => String::from("erase_window"), 0xEE => String::from("erase_line"), 0xEF => String::from("set_cursor"), 0xF0 => String::from("get_cursor"), 0xF1 => String::from("set_text_style"), 0xF2 => String::from("buffer_mode"), 0xF3 => String::from("output_stream"), 0xF4 => String::from("input_stream"), 0xF5 => String::from("sound_effect"), 0xF6 => String::from("read_char"), 0xF7 => String::from("scan_table"), 0xF8 => String::from("not"), 0xF9 => String::from("call_vn"), 0xFA => String::from("call_vn2"), 0xFB => String::from("tokenize"), 0xFC => String::from("encode_text"), 0xFD => String::from("copy_table"), 0xFE => String::from("print_table"), 0xFF => String::from("check_arg_count"), _ => format!("${:02x}", opcode_byte) }; if let Some(o) = ext_opcode { opcode_byte = o; } Ok(Instruction { address, name, form, opcode: opcode_byte, operand_types, operands, store_variable, branch_offset, next_pc: address + skip }) }
pub struct PpuStatus { // 1: VBlank clear by reading this register pub vblank_flag: bool, // 1: sprite hit pub sprite_hit: bool, // 0: less than 8, 1: 9 or more pub sprite_overflow: bool, } impl PpuStatus { pub fn new() -> Self { PpuStatus { vblank_flag: false, sprite_hit: false, sprite_overflow: false, } } pub fn to_u8(&self) -> u8 { (self.sprite_overflow as u8) << 5 | (self.sprite_hit as u8) << 6 | (self.vblank_flag as u8) << 7 } } #[cfg(test)] mod ppu_status_test { use super::*; #[test] fn to_u8_test() { let mut status = PpuStatus::new(); assert_eq!(status.to_u8(), 0b00000000); status.vblank_flag = true; assert_eq!(status.to_u8(), 0b10000000); status.sprite_hit = true; assert_eq!(status.to_u8(), 0b11000000); status.sprite_overflow = true; assert_eq!(status.to_u8(), 0b11100000); } }
fn main() { let mut x; let mut y; x = y = "hello world"; // x : (), y : int println!("{:?} {:?}", x, y); y = "world"; println!("{} {}", x, y) }
mod utils; use wasm_bindgen::prelude::*; use js_sys::Array; use serde::ser::{Serialize, Serializer, SerializeStruct}; use std::cmp; extern crate web_sys; // A macro to provide `println!(..)`-style syntax for `console.log` logging. macro_rules! log { ( $( $t:tt )* ) => { web_sys::console::log_1(&format!( $( $t )* ).into()); } } // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global // allocator. #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] pub struct Segment { pub x1: i32, pub y1: i32, pub x2: i32, pub y2: i32, pub width: i32, pub height: i32, pub area: i32, } impl Serialize for Segment { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { // 3 is the number of fields in the struct. let mut state = serializer.serialize_struct("Segment", 7)?; state.serialize_field("x1", &self.x1)?; state.serialize_field("y1", &self.y1)?; state.serialize_field("x2", &self.x2)?; state.serialize_field("y2", &self.y2)?; state.serialize_field("width", &self.width)?; state.serialize_field("height", &self.height)?; state.serialize_field("area", &self.area)?; state.end() } } #[wasm_bindgen] extern { fn alert(s: &str); } fn get_index(x: &i32, y: &i32, width: i32, part: i32) -> usize { return (((y * width) + x) * 4 + part) as usize; } pub fn grayscale(image: &mut Vec<u8>, width: i32, height: i32) { for y in 0..height { for x in 0..width { let r = image[get_index(&x, &y, width, 0)] as f32; let g = image[get_index(&x, &y, width, 1)] as f32; let b = image[get_index(&x, &y, width, 2)] as f32; let r = (r * 0.3) as u8; let g = (g * 0.59) as u8; let b = (b * 0.11) as u8; image[get_index(&x, &y, width, 0)] = r; image[get_index(&x, &y, width, 1)] = g; image[get_index(&x, &y, width, 2)] = b; } } } pub fn thresholding(image: &mut Vec<u8>, width: i32, height: i32, threshold: u8) { grayscale(image, width, height); for y in 0..height { for x in 0..width { let gray = image[get_index(&x, &y, width, 0)] as u8; let alpha = image[get_index(&x, &y, width, 3)] as u8; let value = 255 * (gray < threshold) as u8; image[get_index(&x, &y, width, 0)] = value; image[get_index(&x, &y, width, 1)] = value; image[get_index(&x, &y, width, 2)] = value; image[get_index(&x, &y, width, 3)] = alpha; } } } fn find_text(image: Vec<u8>, height: i32, width: i32, max_white_space: i32, max_font_line_width: i32, min_text_width: i32) -> Vec<Segment> { let mut result: Vec<Segment> = Vec::new(); let mut segments: Vec<Vec<[i32;4]>> = Vec::new(); let mut raw_segments: Vec<Vec<[i32;4]>> = Vec::new(); for _ in 0..height { segments.push(Vec::new()); raw_segments.push(Vec::new()); } let mut pattern_start_x = -1; let mut pattern_length = 0; let mut white_pixels = 0; let mut black_pixels = 0; for y in 0..height { for x in 0..width { let color = image[get_index(&x, &y, width, 0)] as u8; if color == 255 && pattern_start_x != -1 { white_pixels += 1; black_pixels = 0; } else if color == 0 { black_pixels += 1; if pattern_start_x == -1 { pattern_start_x = x; } white_pixels = 0; } if white_pixels > max_white_space || black_pixels > max_font_line_width || x == width - 1 { if pattern_length >= min_text_width { segments[y as usize].push([pattern_start_x, y, pattern_start_x + pattern_length, y]); } white_pixels = 0; black_pixels = 0; pattern_length = 0; pattern_start_x = -1; } if pattern_start_x != -1 { pattern_start_x += 1; } } } for y in 0..height-2 { let list_y: &Vec<[i32; 4]> = &segments[y as usize]; for raw_w in 0..2 { let w = y + raw_w; let list_w: &Vec<[i32; 4]> = &segments[w as usize]; let mut i = 0; while i < list_y.len() { let s_a = &list_y[i as usize]; let mut j = 0; while j < list_w.len() { let s_b = &list_w[j as usize]; if (s_a[0] <= s_b[0] && s_a[2] >= s_b[2]) || (s_a[0] >= s_b[0] && s_a[0] <= s_b[2]) || (s_a[2] >= s_b[0] && s_a[2] <= s_b[2]) { let result = [ cmp::min(s_a[0], s_b[0]), s_a[1], cmp::min(s_a[2], s_b[2]), s_b[3], ]; raw_segments[y as usize].push(result); break; } j += 1; } i += 1; } } } for y in 0..height { let list: &Vec<[i32;4]> = &raw_segments[y as usize]; for seg in list { result.push(Segment{ x1: seg[0], y1: seg[1], x2: seg[2], y2: seg[3], width: seg[2] - seg[0], height: seg[3] - seg[1], area: (seg[2] - seg[0]) * (seg[3] - seg[1]), }); } } return result; } #[wasm_bindgen] pub fn find_text_segments( mut image_data: Vec<u8>, width: i32, height: i32, max_white_space: i32, max_font_line_width: i32, min_text_width: i32, gray_scale_threshold: u8 ) -> Array { thresholding(&mut image_data, width, height, gray_scale_threshold); let result = find_text(image_data, height, width, max_white_space, max_font_line_width, min_text_width); log!("Found {} results", result.len()); let final_result = Array::new_with_length(result.len() as u32); for (index, tmp_segment) in result.iter().enumerate() { let json = JsValue::from_serde(tmp_segment) .expect("Could not turn segment into json"); final_result.set(index as u32, json) } return final_result; }
pub mod char; pub mod scan_iter; pub use scan_iter::CmpType; pub use scan_iter::Newline; pub use scan_iter::ScanIter;
#[doc = "Register `EXTICR2` reader"] pub type R = crate::R<EXTICR2_SPEC>; #[doc = "Register `EXTICR2` writer"] pub type W = crate::W<EXTICR2_SPEC>; #[doc = "Field `EXTI0_7` reader - GPIO port selection"] pub type EXTI0_7_R = crate::FieldReader<EXTI0_7_A>; #[doc = "GPIO port selection\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum EXTI0_7_A { #[doc = "0: GPIO port A selected"] Pa = 0, #[doc = "1: GPIO port B selected"] Pb = 1, #[doc = "2: GPIO port C selected"] Pc = 2, #[doc = "3: GPIO port D selected"] Pd = 3, #[doc = "5: GPIO port F selected"] Pf = 5, } impl From<EXTI0_7_A> for u8 { #[inline(always)] fn from(variant: EXTI0_7_A) -> Self { variant as _ } } impl crate::FieldSpec for EXTI0_7_A { type Ux = u8; } impl EXTI0_7_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<EXTI0_7_A> { match self.bits { 0 => Some(EXTI0_7_A::Pa), 1 => Some(EXTI0_7_A::Pb), 2 => Some(EXTI0_7_A::Pc), 3 => Some(EXTI0_7_A::Pd), 5 => Some(EXTI0_7_A::Pf), _ => None, } } #[doc = "GPIO port A selected"] #[inline(always)] pub fn is_pa(&self) -> bool { *self == EXTI0_7_A::Pa } #[doc = "GPIO port B selected"] #[inline(always)] pub fn is_pb(&self) -> bool { *self == EXTI0_7_A::Pb } #[doc = "GPIO port C selected"] #[inline(always)] pub fn is_pc(&self) -> bool { *self == EXTI0_7_A::Pc } #[doc = "GPIO port D selected"] #[inline(always)] pub fn is_pd(&self) -> bool { *self == EXTI0_7_A::Pd } #[doc = "GPIO port F selected"] #[inline(always)] pub fn is_pf(&self) -> bool { *self == EXTI0_7_A::Pf } } #[doc = "Field `EXTI0_7` writer - GPIO port selection"] pub type EXTI0_7_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O, EXTI0_7_A>; impl<'a, REG, const O: u8> EXTI0_7_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "GPIO port A selected"] #[inline(always)] pub fn pa(self) -> &'a mut crate::W<REG> { self.variant(EXTI0_7_A::Pa) } #[doc = "GPIO port B selected"] #[inline(always)] pub fn pb(self) -> &'a mut crate::W<REG> { self.variant(EXTI0_7_A::Pb) } #[doc = "GPIO port C selected"] #[inline(always)] pub fn pc(self) -> &'a mut crate::W<REG> { self.variant(EXTI0_7_A::Pc) } #[doc = "GPIO port D selected"] #[inline(always)] pub fn pd(self) -> &'a mut crate::W<REG> { self.variant(EXTI0_7_A::Pd) } #[doc = "GPIO port F selected"] #[inline(always)] pub fn pf(self) -> &'a mut crate::W<REG> { self.variant(EXTI0_7_A::Pf) } } #[doc = "Field `EXTI8_15` reader - GPIO port selection"] pub use EXTI0_7_R as EXTI8_15_R; #[doc = "Field `EXTI16_23` reader - GPIO port selection"] pub use EXTI0_7_R as EXTI16_23_R; #[doc = "Field `EXTI24_31` reader - GPIO port selection"] pub use EXTI0_7_R as EXTI24_31_R; #[doc = "Field `EXTI8_15` writer - GPIO port selection"] pub use EXTI0_7_W as EXTI8_15_W; #[doc = "Field `EXTI16_23` writer - GPIO port selection"] pub use EXTI0_7_W as EXTI16_23_W; #[doc = "Field `EXTI24_31` writer - GPIO port selection"] pub use EXTI0_7_W as EXTI24_31_W; impl R { #[doc = "Bits 0:7 - GPIO port selection"] #[inline(always)] pub fn exti0_7(&self) -> EXTI0_7_R { EXTI0_7_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:15 - GPIO port selection"] #[inline(always)] pub fn exti8_15(&self) -> EXTI8_15_R { EXTI8_15_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - GPIO port selection"] #[inline(always)] pub fn exti16_23(&self) -> EXTI16_23_R { EXTI16_23_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - GPIO port selection"] #[inline(always)] pub fn exti24_31(&self) -> EXTI24_31_R { EXTI24_31_R::new(((self.bits >> 24) & 0xff) as u8) } } impl W { #[doc = "Bits 0:7 - GPIO port selection"] #[inline(always)] #[must_use] pub fn exti0_7(&mut self) -> EXTI0_7_W<EXTICR2_SPEC, 0> { EXTI0_7_W::new(self) } #[doc = "Bits 8:15 - GPIO port selection"] #[inline(always)] #[must_use] pub fn exti8_15(&mut self) -> EXTI8_15_W<EXTICR2_SPEC, 8> { EXTI8_15_W::new(self) } #[doc = "Bits 16:23 - GPIO port selection"] #[inline(always)] #[must_use] pub fn exti16_23(&mut self) -> EXTI16_23_W<EXTICR2_SPEC, 16> { EXTI16_23_W::new(self) } #[doc = "Bits 24:31 - GPIO port selection"] #[inline(always)] #[must_use] pub fn exti24_31(&mut self) -> EXTI24_31_W<EXTICR2_SPEC, 24> { EXTI24_31_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "EXTI external interrupt selection register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`exticr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`exticr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct EXTICR2_SPEC; impl crate::RegisterSpec for EXTICR2_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`exticr2::R`](R) reader structure"] impl crate::Readable for EXTICR2_SPEC {} #[doc = "`write(|w| ..)` method takes [`exticr2::W`](W) writer structure"] impl crate::Writable for EXTICR2_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets EXTICR2 to value 0"] impl crate::Resettable for EXTICR2_SPEC { const RESET_VALUE: Self::Ux = 0; }
pub enum SnakeError { OutOfBounds, EatSelf, }
//! Traits and types for decoding CBOR. //! //! This module defines the trait [`Decode`] and the actual [`Decoder`]. mod decoder; mod error; pub use decoder::{Decoder, Probe}; pub use decoder::{ArrayIter, BytesIter, MapIter, StrIter}; pub use error::Error; /// A type that can be decoded from CBOR. pub trait Decode<'b>: Sized { /// Decode a value using the given `Decoder`. fn decode(d: &mut Decoder<'b>) -> Result<Self, Error>; } #[cfg(feature = "std")] impl<'b, T: Decode<'b>> Decode<'b> for Box<T> { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { T::decode(d).map(Box::new) } } impl<'a, 'b: 'a> Decode<'b> for &'a [u8] { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.bytes() } } impl<'a, 'b: 'a> Decode<'b> for &'a str { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.str() } } #[cfg(feature = "std")] impl<'b, T> Decode<'b> for std::borrow::Cow<'_, T> where T: std::borrow::ToOwned + ?Sized, T::Owned: Decode<'b> { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.decode().map(std::borrow::Cow::Owned) } } #[cfg(feature = "std")] impl<'b> Decode<'b> for String { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.str().map(String::from) } } impl<'b, T: Decode<'b>> Decode<'b> for Option<T> { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { if crate::data::Type::Null == d.datatype()? { d.skip()?; return Ok(None) } T::decode(d).map(Some) } } #[cfg(feature = "std")] impl<'b, T> Decode<'b> for std::collections::BinaryHeap<T> where T: Decode<'b> + Ord { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let iter: ArrayIter<T> = d.array_iter()?; let mut v = std::collections::BinaryHeap::new(); for x in iter { v.push(x?) } Ok(v) } } #[cfg(feature = "std")] impl<'b, T> Decode<'b> for std::collections::HashSet<T> where T: Decode<'b> + Eq + std::hash::Hash { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let iter: ArrayIter<T> = d.array_iter()?; let mut v = std::collections::HashSet::new(); for x in iter { v.insert(x?); } Ok(v) } } #[cfg(feature = "std")] impl<'b, T> Decode<'b> for std::collections::BTreeSet<T> where T: Decode<'b> + Ord { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let iter: ArrayIter<T> = d.array_iter()?; let mut v = std::collections::BTreeSet::new(); for x in iter { v.insert(x?); } Ok(v) } } #[cfg(feature = "std")] impl<'b, K, V> Decode<'b> for std::collections::HashMap<K, V> where K: Decode<'b> + Eq + std::hash::Hash, V: Decode<'b> { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let mut m = std::collections::HashMap::new(); let iter: MapIter<K, V> = d.map_iter()?; for x in iter { let (k, v) = x?; m.insert(k, v); } Ok(m) } } #[cfg(feature = "std")] impl<'b, K, V> Decode<'b> for std::collections::BTreeMap<K, V> where K: Decode<'b> + Eq + Ord, V: Decode<'b> { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let mut m = std::collections::BTreeMap::new(); let iter: MapIter<K, V> = d.map_iter()?; for x in iter { let (k, v) = x?; m.insert(k, v); } Ok(m) } } impl<'b, T> Decode<'b> for core::marker::PhantomData<T> { fn decode(_: &mut Decoder<'b>) -> Result<Self, Error> { Ok(core::marker::PhantomData) } } impl<'b> Decode<'b> for () { fn decode(_: &mut Decoder<'b>) -> Result<Self, Error> { Ok(()) } } #[cfg(target_pointer_width = "32")] impl<'b> Decode<'b> for usize { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.u32().map(|n| n as usize) } } #[cfg(target_pointer_width = "64")] impl<'b> Decode<'b> for usize { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.u64().map(|n| n as usize) } } #[cfg(target_pointer_width = "32")] impl<'b> Decode<'b> for isize { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.i32().map(|n| n as isize) } } #[cfg(target_pointer_width = "64")] impl<'b> Decode<'b> for isize { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.i64().map(|n| n as isize) } } macro_rules! decode_basic { ($($t:ident)*) => { $( impl<'b> Decode<'b> for $t { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { d.$t() } } )* } } decode_basic!(u8 i8 u16 i16 u32 i32 u64 i64 bool f32 f64 char); macro_rules! decode_nonzero { ($($t:ty, $msg:expr)*) => { $( impl<'b> Decode<'b> for $t { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { Ok(<$t>::new(Decode::decode(d)?).ok_or(Error::Message($msg))?) } } )* } } decode_nonzero! { core::num::NonZeroU8, "unexpected 0 when decoding a `NonZeroU8`" core::num::NonZeroU16, "unexpected 0 when decoding a `NonZeroU16`" core::num::NonZeroU32, "unexpected 0 when decoding a `NonZeroU32`" core::num::NonZeroU64, "unexpected 0 when decoding a `NonZeroU64`" core::num::NonZeroI8, "unexpected 0 when decoding a `NonZeroI8`" core::num::NonZeroI16, "unexpected 0 when decoding a `NonZeroI16`" core::num::NonZeroI32, "unexpected 0 when decoding a `NonZeroI32`" core::num::NonZeroI64, "unexpected 0 when decoding a `NonZeroI64`" } #[cfg(feature = "std")] macro_rules! decode_sequential { ($($t:ty, $push:ident)*) => { $( impl<'b, T: Decode<'b>> Decode<'b> for $t { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let iter: ArrayIter<T> = d.array_iter()?; let mut v = <$t>::new(); for x in iter { v.$push(x?) } Ok(v) } } )* } } #[cfg(feature = "std")] decode_sequential! { Vec<T>, push std::collections::VecDeque<T>, push_back std::collections::LinkedList<T>, push_back } macro_rules! decode_arrays { ($($n:expr)*) => { $( impl<'b, T: Decode<'b> + Default> Decode<'b> for [T; $n] { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let iter: ArrayIter<T> = d.array_iter()?; let mut a: [T; $n] = Default::default(); let mut i = 0; for x in iter { if i >= a.len() { let msg = concat!("array has more than ", $n, " elements"); return Err(Error::Message(msg)) } a[i] = x?; i += 1; } if i < a.len() { let msg = concat!("array has less than ", $n, " elements"); return Err(Error::Message(msg)) } Ok(a) } } )* } } decode_arrays!(0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16); macro_rules! decode_tuples { ($( $len:expr => { $($T:ident)+ } )+) => { $( impl<'b, $($T: Decode<'b>),+> Decode<'b> for ($($T,)+) { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let n = d.array()?; if n != Some($len) { return Err(Error::Message(concat!("invalid ", $len, "-tuple length"))) } Ok(($($T::decode(d)?,)+)) } } )+ } } decode_tuples! { 1 => { A } 2 => { A B } 3 => { A B C } 4 => { A B C D } 5 => { A B C D E } 6 => { A B C D E F } 7 => { A B C D E F G } 8 => { A B C D E F G H } 9 => { A B C D E F G H I } 10 => { A B C D E F G H I J } 11 => { A B C D E F G H I J K } 12 => { A B C D E F G H I J K L } 13 => { A B C D E F G H I J K L M } 14 => { A B C D E F G H I J K L M N } 15 => { A B C D E F G H I J K L M N O } 16 => { A B C D E F G H I J K L M N O P } } macro_rules! decode_fields { ($d:ident | $($n:literal $x:ident => $t:ty ; $msg:literal)*) => { $(let mut $x = None;)* match $d.array()? { Some(n) => for i in 0 .. n { match i { $($n => $x = Some(Decode::decode($d)?),)* _ => $d.skip()? } } None => { let mut i = 0; while $d.datatype()? != crate::data::Type::Break { match i { $($n => $x = Some(Decode::decode($d)?),)* _ => $d.skip()? } i += 1 } $d.skip()? } } $(let $x = if let Some(x) = $x { x } else { return Err(Error::MissingValue($n, $msg)) };)* } } impl<'b> Decode<'b> for core::time::Duration { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { decode_fields! { d | 0 secs => u64 ; "Duration::secs" 1 nanos => u32 ; "Duration::nanos" } Ok(core::time::Duration::new(secs, nanos)) } } #[cfg(feature = "std")] impl<'b> Decode<'b> for std::net::IpAddr { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { if Some(2) != d.array()? { return Err(Error::Message("expected enum (2-element array)")) } match d.u32()? { 0 => Ok(std::net::Ipv4Addr::decode(d)?.into()), 1 => Ok(std::net::Ipv6Addr::decode(d)?.into()), n => Err(Error::UnknownVariant(n)) } } } #[cfg(feature = "std")] impl<'b> Decode<'b> for std::net::Ipv4Addr { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let octets: [u8; 4] = Decode::decode(d)?; Ok(octets.into()) } } #[cfg(feature = "std")] impl<'b> Decode<'b> for std::net::Ipv6Addr { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { let octets: [u8; 16] = Decode::decode(d)?; Ok(octets.into()) } } #[cfg(feature = "std")] impl<'b> Decode<'b> for std::net::SocketAddr { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { if Some(2) != d.array()? { return Err(Error::Message("expected enum (2-element array)")) } match d.u32()? { 0 => Ok(std::net::SocketAddrV4::decode(d)?.into()), 1 => Ok(std::net::SocketAddrV6::decode(d)?.into()), n => Err(Error::UnknownVariant(n)) } } } #[cfg(feature = "std")] impl<'b> Decode<'b> for std::net::SocketAddrV4 { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { decode_fields! { d | 0 ip => std::net::Ipv4Addr ; "SocketAddrV4::ip" 1 port => u16 ; "SocketAddrV4::port" } Ok(std::net::SocketAddrV4::new(ip, port)) } } #[cfg(feature = "std")] impl<'b> Decode<'b> for std::net::SocketAddrV6 { fn decode(d: &mut Decoder<'b>) -> Result<Self, Error> { decode_fields! { d | 0 ip => std::net::Ipv6Addr ; "SocketAddrV6::ip" 1 port => u16 ; "SocketAddrV6::port" } Ok(std::net::SocketAddrV6::new(ip, port, 0, 0)) } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub mod operations { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!("{}/providers/Microsoft.Storage/operations", &operation_config.base_path,); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: OperationListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; list::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod skus { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, ) -> std::result::Result<StorageSkuListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Storage/skus", &operation_config.base_path, subscription_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: StorageSkuListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; list::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod storage_accounts { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn check_name_availability( operation_config: &crate::OperationConfig, account_name: &StorageAccountCheckNameAvailabilityParameters, subscription_id: &str, ) -> std::result::Result<CheckNameAvailabilityResult, check_name_availability::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Storage/checkNameAvailability", &operation_config.base_path, subscription_id ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(check_name_availability::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(account_name); let req = req_builder.build().context(check_name_availability::BuildRequestError)?; let rsp = client.execute(req).await.context(check_name_availability::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(check_name_availability::ResponseBytesError)?; let rsp_value: CheckNameAvailabilityResult = serde_json::from_slice(&body).context(check_name_availability::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(check_name_availability::ResponseBytesError)?; check_name_availability::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod check_name_availability { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn get_properties( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, expand: Option<&str>, ) -> std::result::Result<StorageAccount, get_properties::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get_properties::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); if let Some(expand) = expand { req_builder = req_builder.query(&[("$expand", expand)]); } let req = req_builder.build().context(get_properties::BuildRequestError)?; let rsp = client.execute(req).await.context(get_properties::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get_properties::ResponseBytesError)?; let rsp_value: StorageAccount = serde_json::from_slice(&body).context(get_properties::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get_properties::ResponseBytesError)?; get_properties::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod get_properties { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn create( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, parameters: &StorageAccountCreateParameters, subscription_id: &str, ) -> std::result::Result<create::Response, create::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(create::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(create::BuildRequestError)?; let rsp = client.execute(req).await.context(create::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; let rsp_value: StorageAccount = serde_json::from_slice(&body).context(create::DeserializeError { body })?; Ok(create::Response::Ok200(rsp_value)) } StatusCode::ACCEPTED => Ok(create::Response::Accepted202), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(create::ResponseBytesError)?; create::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod create { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(StorageAccount), Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn update( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, parameters: &StorageAccountUpdateParameters, subscription_id: &str, ) -> std::result::Result<StorageAccount, update::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.patch(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(update::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(update::BuildRequestError)?; let rsp = client.execute(req).await.context(update::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?; let rsp_value: StorageAccount = serde_json::from_slice(&body).context(update::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(update::ResponseBytesError)?; update::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod update { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(delete::Response::Ok200), StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; delete::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, ) -> std::result::Result<StorageAccountListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Storage/storageAccounts", &operation_config.base_path, subscription_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: StorageAccountListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; list::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn list_by_resource_group( operation_config: &crate::OperationConfig, resource_group_name: &str, subscription_id: &str, ) -> std::result::Result<StorageAccountListResult, list_by_resource_group::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts", &operation_config.base_path, subscription_id, resource_group_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_by_resource_group::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list_by_resource_group::BuildRequestError)?; let rsp = client.execute(req).await.context(list_by_resource_group::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?; let rsp_value: StorageAccountListResult = serde_json::from_slice(&body).context(list_by_resource_group::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_resource_group::ResponseBytesError)?; list_by_resource_group::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list_by_resource_group { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn list_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, expand: Option<&str>, ) -> std::result::Result<StorageAccountListKeysResult, list_keys::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/listKeys", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_keys::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); if let Some(expand) = expand { req_builder = req_builder.query(&[("$expand", expand)]); } req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0); let req = req_builder.build().context(list_keys::BuildRequestError)?; let rsp = client.execute(req).await.context(list_keys::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_keys::ResponseBytesError)?; let rsp_value: StorageAccountListKeysResult = serde_json::from_slice(&body).context(list_keys::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_keys::ResponseBytesError)?; list_keys::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list_keys { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn regenerate_key( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, regenerate_key: &StorageAccountRegenerateKeyParameters, subscription_id: &str, ) -> std::result::Result<StorageAccountListKeysResult, regenerate_key::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/regenerateKey", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(regenerate_key::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(regenerate_key); let req = req_builder.build().context(regenerate_key::BuildRequestError)?; let rsp = client.execute(req).await.context(regenerate_key::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(regenerate_key::ResponseBytesError)?; let rsp_value: StorageAccountListKeysResult = serde_json::from_slice(&body).context(regenerate_key::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(regenerate_key::ResponseBytesError)?; regenerate_key::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod regenerate_key { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn list_account_sas( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, parameters: &AccountSasParameters, subscription_id: &str, ) -> std::result::Result<ListAccountSasResponse, list_account_sas::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/ListAccountSas", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_account_sas::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(list_account_sas::BuildRequestError)?; let rsp = client.execute(req).await.context(list_account_sas::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_account_sas::ResponseBytesError)?; let rsp_value: ListAccountSasResponse = serde_json::from_slice(&body).context(list_account_sas::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_account_sas::ResponseBytesError)?; list_account_sas::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list_account_sas { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn list_service_sas( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, parameters: &ServiceSasParameters, subscription_id: &str, ) -> std::result::Result<ListServiceSasResponse, list_service_sas::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/ListServiceSas", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_service_sas::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(list_service_sas::BuildRequestError)?; let rsp = client.execute(req).await.context(list_service_sas::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_service_sas::ResponseBytesError)?; let rsp_value: ListServiceSasResponse = serde_json::from_slice(&body).context(list_service_sas::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_service_sas::ResponseBytesError)?; list_service_sas::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list_service_sas { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn failover( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, ) -> std::result::Result<failover::Response, failover::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/failover", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(failover::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0); let req = req_builder.build().context(failover::BuildRequestError)?; let rsp = client.execute(req).await.context(failover::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(failover::Response::Ok200), StatusCode::ACCEPTED => Ok(failover::Response::Accepted202), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(failover::ResponseBytesError)?; failover::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod failover { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn restore_blob_ranges( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, parameters: &BlobRestoreParameters, ) -> std::result::Result<restore_blob_ranges::Response, restore_blob_ranges::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/restoreBlobRanges", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(restore_blob_ranges::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(parameters); let req = req_builder.build().context(restore_blob_ranges::BuildRequestError)?; let rsp = client.execute(req).await.context(restore_blob_ranges::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(restore_blob_ranges::ResponseBytesError)?; let rsp_value: BlobRestoreStatus = serde_json::from_slice(&body).context(restore_blob_ranges::DeserializeError { body })?; Ok(restore_blob_ranges::Response::Ok200(rsp_value)) } StatusCode::ACCEPTED => { let body: bytes::Bytes = rsp.bytes().await.context(restore_blob_ranges::ResponseBytesError)?; let rsp_value: BlobRestoreStatus = serde_json::from_slice(&body).context(restore_blob_ranges::DeserializeError { body })?; Ok(restore_blob_ranges::Response::Accepted202(rsp_value)) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(restore_blob_ranges::ResponseBytesError)?; restore_blob_ranges::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod restore_blob_ranges { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(BlobRestoreStatus), Accepted202(BlobRestoreStatus), } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn revoke_user_delegation_keys( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, ) -> std::result::Result<(), revoke_user_delegation_keys::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/revokeUserDelegationKeys", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.post(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(revoke_user_delegation_keys::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.header(reqwest::header::CONTENT_LENGTH, 0); let req = req_builder.build().context(revoke_user_delegation_keys::BuildRequestError)?; let rsp = client .execute(req) .await .context(revoke_user_delegation_keys::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(()), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(revoke_user_delegation_keys::ResponseBytesError)?; revoke_user_delegation_keys::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod revoke_user_delegation_keys { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod usages { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list_by_location( operation_config: &crate::OperationConfig, subscription_id: &str, location: &str, ) -> std::result::Result<UsageListResult, list_by_location::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/providers/Microsoft.Storage/locations/{}/usages", &operation_config.base_path, subscription_id, location ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_by_location::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list_by_location::BuildRequestError)?; let rsp = client.execute(req).await.context(list_by_location::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_location::ResponseBytesError)?; let rsp_value: UsageListResult = serde_json::from_slice(&body).context(list_by_location::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_location::ResponseBytesError)?; list_by_location::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list_by_location { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod management_policies { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, management_policy_name: &str, ) -> std::result::Result<ManagementPolicy, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/managementPolicies/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, management_policy_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ManagementPolicy = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; get::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, management_policy_name: &str, properties: &ManagementPolicy, ) -> std::result::Result<ManagementPolicy, create_or_update::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/managementPolicies/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, management_policy_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(create_or_update::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(properties); let req = req_builder.build().context(create_or_update::BuildRequestError)?; let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?; let rsp_value: ManagementPolicy = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?; create_or_update::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod create_or_update { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, management_policy_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/managementPolicies/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, management_policy_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(delete::Response::Ok200), StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; delete::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod private_endpoint_connections { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, ) -> std::result::Result<PrivateEndpointConnectionListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/privateEndpointConnections", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: PrivateEndpointConnectionListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; list::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, private_endpoint_connection_name: &str, ) -> std::result::Result<PrivateEndpointConnection, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/privateEndpointConnections/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, private_endpoint_connection_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: PrivateEndpointConnection = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn put( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, private_endpoint_connection_name: &str, properties: &PrivateEndpointConnection, ) -> std::result::Result<PrivateEndpointConnection, put::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/privateEndpointConnections/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, private_endpoint_connection_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(put::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(properties); let req = req_builder.build().context(put::BuildRequestError)?; let rsp = client.execute(req).await.context(put::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?; let rsp_value: PrivateEndpointConnection = serde_json::from_slice(&body).context(put::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(put::DeserializeError { body })?; put::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod put { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, private_endpoint_connection_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/privateEndpointConnections/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, private_endpoint_connection_name ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(delete::Response::Ok200), StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod private_link_resources { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list_by_storage_account( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, ) -> std::result::Result<PrivateLinkResourceListResult, list_by_storage_account::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/privateLinkResources", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list_by_storage_account::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list_by_storage_account::BuildRequestError)?; let rsp = client.execute(req).await.context(list_by_storage_account::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_storage_account::ResponseBytesError)?; let rsp_value: PrivateLinkResourceListResult = serde_json::from_slice(&body).context(list_by_storage_account::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list_by_storage_account::ResponseBytesError)?; list_by_storage_account::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list_by_storage_account { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod object_replication_policies { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, ) -> std::result::Result<ObjectReplicationPolicies, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/objectReplicationPolicies", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ObjectReplicationPolicies = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(list::DeserializeError { body })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, object_replication_policy_id: &str, ) -> std::result::Result<ObjectReplicationPolicy, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/objectReplicationPolicies/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, object_replication_policy_id ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ObjectReplicationPolicy = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create_or_update( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, object_replication_policy_id: &str, properties: &ObjectReplicationPolicy, ) -> std::result::Result<ObjectReplicationPolicy, create_or_update::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/objectReplicationPolicies/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, object_replication_policy_id ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(create_or_update::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(properties); let req = req_builder.build().context(create_or_update::BuildRequestError)?; let rsp = client.execute(req).await.context(create_or_update::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?; let rsp_value: ObjectReplicationPolicy = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(create_or_update::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(create_or_update::DeserializeError { body })?; create_or_update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create_or_update { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, object_replication_policy_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/objectReplicationPolicies/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, object_replication_policy_id ); let mut req_builder = client.delete(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(delete::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(delete::BuildRequestError)?; let rsp = client.execute(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => Ok(delete::Response::Ok200), StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let body: bytes::Bytes = rsp.bytes().await.context(delete::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(delete::DeserializeError { body })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod encryption_scopes { use crate::models::*; use reqwest::StatusCode; use snafu::{ResultExt, Snafu}; pub async fn get( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, encryption_scope_name: &str, ) -> std::result::Result<EncryptionScope, get::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/encryptionScopes/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, encryption_scope_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(get::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(get::BuildRequestError)?; let rsp = client.execute(req).await.context(get::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: EncryptionScope = serde_json::from_slice(&body).context(get::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(get::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(get::DeserializeError { body })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn put( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, encryption_scope_name: &str, encryption_scope: &EncryptionScope, ) -> std::result::Result<put::Response, put::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/encryptionScopes/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, encryption_scope_name ); let mut req_builder = client.put(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(put::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(encryption_scope); let req = req_builder.build().context(put::BuildRequestError)?; let rsp = client.execute(req).await.context(put::ExecuteRequestError)?; match rsp.status() { StatusCode::CREATED => { let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?; let rsp_value: EncryptionScope = serde_json::from_slice(&body).context(put::DeserializeError { body })?; Ok(put::Response::Created201(rsp_value)) } StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?; let rsp_value: EncryptionScope = serde_json::from_slice(&body).context(put::DeserializeError { body })?; Ok(put::Response::Ok200(rsp_value)) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(put::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(put::DeserializeError { body })?; put::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod put { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug)] pub enum Response { Created201(EncryptionScope), Ok200(EncryptionScope), } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn patch( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, encryption_scope_name: &str, encryption_scope: &EncryptionScope, ) -> std::result::Result<EncryptionScope, patch::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/encryptionScopes/{}", &operation_config.base_path, subscription_id, resource_group_name, account_name, encryption_scope_name ); let mut req_builder = client.patch(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(patch::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); req_builder = req_builder.json(encryption_scope); let req = req_builder.build().context(patch::BuildRequestError)?; let rsp = client.execute(req).await.context(patch::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(patch::ResponseBytesError)?; let rsp_value: EncryptionScope = serde_json::from_slice(&body).context(patch::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(patch::ResponseBytesError)?; let rsp_value: ErrorResponse = serde_json::from_slice(&body).context(patch::DeserializeError { body })?; patch::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod patch { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: StatusCode, value: models::ErrorResponse, }, BuildRequestError { source: reqwest::Error, }, ExecuteRequestError { source: reqwest::Error, }, ResponseBytesError { source: reqwest::Error, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list( operation_config: &crate::OperationConfig, resource_group_name: &str, account_name: &str, subscription_id: &str, ) -> std::result::Result<EncryptionScopeListResult, list::Error> { let client = &operation_config.client; let uri_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}/encryptionScopes", &operation_config.base_path, subscription_id, resource_group_name, account_name ); let mut req_builder = client.get(uri_str); if let Some(token_credential) = &operation_config.token_credential { let token_response = token_credential .get_token(&operation_config.token_credential_resource) .await .context(list::GetTokenError)?; req_builder = req_builder.bearer_auth(token_response.token.secret()); } req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); let req = req_builder.build().context(list::BuildRequestError)?; let rsp = client.execute(req).await.context(list::ExecuteRequestError)?; match rsp.status() { StatusCode::OK => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; let rsp_value: EncryptionScopeListResult = serde_json::from_slice(&body).context(list::DeserializeError { body })?; Ok(rsp_value) } status_code => { let body: bytes::Bytes = rsp.bytes().await.context(list::ResponseBytesError)?; list::UnexpectedResponse { status_code, body: body }.fail() } } } pub mod list { use crate::{models, models::*}; use reqwest::StatusCode; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: StatusCode, body: bytes::Bytes }, BuildRequestError { source: reqwest::Error }, ExecuteRequestError { source: reqwest::Error }, ResponseBytesError { source: reqwest::Error }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } }