text
stringlengths
8
4.13M
use super::{Status, CPU}; impl<'a> CPU<'a> { pub fn debug_load_and_run(&mut self, program: Vec<u8>) { self.load(program); self.program_counter = self.mem_read_u16(0xFFFC); self.run(); } } #[test] fn lda_immidiate_load_data_accumulator() { let mut cpu = CPU::new(); cpu.load_and_run(vec![0xa9, 0x05, 0x00]); assert_eq!(cpu.accumulator, 0x05); assert!(cpu.status.get() & Status::ZERO == 0b00); assert!(cpu.status.get() & Status::NEGATIV == 0); } #[test] fn ldx_immidiate_load_data_register_x() { let mut cpu = CPU::new(); cpu.load_and_run(vec![0xa2, 0x05, 0x00]); assert_eq!(cpu.register_x, 0x05); assert!(cpu.status.get() & Status::ZERO == 0b00); assert!(cpu.status.get() & Status::NEGATIV == 0); } #[test] fn ldy_immidiate_load_data_register_y() { let mut cpu = CPU::new(); cpu.load_and_run(vec![0xa0, 0x05, 0x00]); assert_eq!(cpu.register_y, 0x05); assert!(cpu.status.get() & Status::ZERO == 0b00); assert!(cpu.status.get() & Status::NEGATIV == 0); } #[test] fn lda_zero_flag() { let mut cpu = CPU::new(); cpu.load_and_run(vec![0xa9, 0x00, 0x00]); assert!(cpu.status.get() & Status::ZERO == 0b10); } #[test] fn tax_move_a_to_x() { let mut cpu = CPU::new(); cpu.load_and_run(vec![0xa9, 0x0a, 0xaa, 0x00]); assert_eq!(cpu.register_x, 10); } #[test] fn tay_move_a_to_y() { let mut cpu = CPU::new(); cpu.load_and_run(vec![0xa9, 0x0a, 0xa8, 0x00]); assert_eq!(cpu.register_y, 10); } #[test] fn inx_increment_x() { let mut cpu = CPU::new(); cpu.register_x = 0; cpu.load_and_run(vec![0xe8, 0x00]); assert_eq!(cpu.register_x, 1); } #[test] fn inx_overflow() { let mut cpu = CPU::new(); let mut program = vec![0xe8; 260]; program.push(0x00); cpu.load_and_run(program); assert_eq!(cpu.register_x, 4) } #[test] fn iny_overflow() { let mut cpu = CPU::new(); let mut program = vec![0xc8; 260]; program.push(0x00); cpu.load_and_run(program); assert_eq!(cpu.register_y, 4) } #[test] fn registers_set_to_0_after_reset() { let mut cpu = CPU::new(); cpu.accumulator = 5; cpu.register_x = 6; cpu.register_y = 7; cpu.program_counter = 8; cpu.load_and_run(vec![0x00]); assert_eq!(cpu.accumulator, 0); assert_eq!(cpu.register_x, 0); assert_eq!(cpu.register_y, 0); assert_eq!(cpu.program_counter, 0x8001); } #[test] fn test_5_ops_working_together() { let mut cpu = CPU::new(); cpu.load_and_run(vec![0xa9, 0xc0, 0xaa, 0xe8, 0x00]); assert_eq!(cpu.register_x, 0xc1) } #[test] fn adc_basic() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x01, 0x69, 0x02, 0x00]); assert_eq!(cpu.accumulator, 3); cpu.reset(); cpu.status.set(Status::CARRY); cpu.debug_load_and_run(vec![0xa9, 0x01, 0x69, 0x02, 0x00]); assert_eq!(cpu.accumulator, 4); } #[test] fn adc_overflow_and_carry_flag() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x7F, 0x69, 0x01, 0x00]); assert_eq!(cpu.accumulator, 128); assert_eq!(cpu.status.get(), Status::NEGATIV | Status::OVERFLOW); cpu.reset(); cpu.status.set(Status::CARRY); cpu.debug_load_and_run(vec![0xa9, 0xFF, 0x69, 0x01, 0x00]); assert_eq!(cpu.accumulator, 1); assert_eq!(cpu.status.get(), Status::CARRY) } #[test] fn adc_overflow() { let mut cpu = CPU::new(); cpu.accumulator = 0xff; cpu.debug_load_and_run(vec![0x69, 0x01, 0x00]); assert_eq!(cpu.accumulator, 0); assert_eq!(cpu.status.get(), Status::ZERO | Status::CARRY); } #[test] fn and_same_values() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x11, 0x29, 0x11, 0x00]); assert_eq!(cpu.accumulator, 0x11); } #[test] fn and_different_values() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x11, 0x29, 0x01, 0x00]); assert_eq!(cpu.accumulator, 0x01); } #[test] fn asl_number_in_accumulator() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x08, 0x0a, 0x00]); assert_eq!(cpu.accumulator, 0x10); } #[test] fn asl_number_in_memory() { let mut cpu = CPU::new(); cpu.mem_write(0x00, 0x08); cpu.debug_load_and_run(vec![0x06, 0x00, 0x00]); assert_eq!(cpu.accumulator, 0x10); } #[test] fn asl_carry_and_negative_flag() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0xFF, 0x0a, 0x00]); assert_eq!(cpu.accumulator, 0xFE); assert_eq!(cpu.status.get(), Status::NEGATIV | Status::CARRY); } #[test] fn clc_clear_carry_flag() { let mut cpu = CPU::new(); cpu.status.set(Status::CARRY); cpu.debug_load_and_run(vec![0x18, 0x00]); assert_eq!(cpu.status.get(), 0x00); } #[test] fn sec_set_carry_flag() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0x38, 0x00]); assert_eq!(cpu.status.get(), Status::CARRY); } #[test] fn cld_clear_decimal_flag() { let mut cpu = CPU::new(); cpu.status.set(Status::DECIMAL_MODE); cpu.debug_load_and_run(vec![0xD8, 0x00]); assert_eq!(cpu.status.get(), 0x00); } #[test] fn sed_set_decimal_flag() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xF8, 0x00]); assert_eq!(cpu.status.get(), Status::DECIMAL_MODE); } #[test] fn cli_clear_interrupt_disable_flag() { let mut cpu = CPU::new(); cpu.status.set(Status::INTERRUPT_DISABLE); cpu.debug_load_and_run(vec![0x58, 0x00]); assert_eq!(cpu.status.get(), 0x00); } #[test] fn sei_set_interrupt_disable_flag() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0x78, 0x00]); assert_eq!(cpu.status.get(), Status::INTERRUPT_DISABLE); } #[test] fn clv_clear_overflow_flag() { let mut cpu = CPU::new(); cpu.status.set(Status::OVERFLOW); cpu.debug_load_and_run(vec![0xB8, 0x00]); assert_eq!(cpu.status.get(), 0x00); } #[test] fn cmp_with_smaller_number() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x05, 0xc9, 0x04, 0x00]); assert_eq!(cpu.status.get(), Status::CARRY); } #[test] fn cmp_with_bigger_number() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x05, 0xc9, 0x06, 0x00]); assert_eq!(cpu.status.get(), Status::NEGATIV); } #[test] fn cmp_with_same_number() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa9, 0x05, 0xc9, 0x05, 0x00]); assert_eq!(cpu.status.get(), Status::ZERO | Status::CARRY); } #[test] fn cpx_with_bigger_number() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa2, 0x05, 0xe0, 0x06, 0x00]); assert_eq!(cpu.status.get(), Status::NEGATIV); } #[test] fn cpy_with_bigger_number() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xa2, 0x05, 0xc0, 0x06, 0x00]); assert_eq!(cpu.status.get(), Status::NEGATIV); } #[test] fn dec_decrement_value_in_memory() { let mut cpu = CPU::new(); cpu.memory[0x02] = 5; cpu.debug_load_and_run(vec![0xc6, 0x02, 0x00]); assert_eq!(cpu.memory[0x02], 4); assert_eq!(cpu.status.get(), 0); } #[test] fn dex_decrement_register_x() { let mut cpu = CPU::new(); cpu.register_x = 1; cpu.debug_load_and_run(vec![0xca, 0x00]); assert_eq!(cpu.register_x, 0); assert_eq!(cpu.status.get(), Status::ZERO); } #[test] fn dey_decrement_register_y() { let mut cpu = CPU::new(); cpu.register_y = 1; cpu.debug_load_and_run(vec![0x88, 0x00]); assert_eq!(cpu.register_y, 0); assert_eq!(cpu.status.get(), Status::ZERO); } #[test] fn eor_accumulator_with_value() { let mut cpu = CPU::new(); cpu.accumulator = 0x0f; cpu.debug_load_and_run(vec![0x49, 0xf0, 0x00]); assert_eq!(cpu.accumulator, 0xff); assert_eq!(cpu.status.get(), Status::NEGATIV); } #[test] fn inc_increment_memory_with_overflow() { let mut cpu = CPU::new(); cpu.memory[0x02] = 0xff; cpu.debug_load_and_run(vec![0xe6, 0x02, 0x00]); assert_eq!(cpu.memory[0x02], 0x00); assert_eq!(cpu.status.get(), Status::ZERO); } #[test] fn lsr_shift_accumulator_left() { let mut cpu = CPU::new(); cpu.accumulator = 0x03; cpu.debug_load_and_run(vec![0x4a, 0x00]); assert_eq!(cpu.accumulator, 0x01); assert_eq!(cpu.status.get(), Status::CARRY); } #[test] fn nop_do_nothing() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xea, 0x00]); assert_eq!(cpu.accumulator, 0); assert_eq!(cpu.register_x, 0); assert_eq!(cpu.register_y, 0); assert_eq!(cpu.status.get(), 0); assert_eq!(cpu.program_counter, 0x8002); } #[test] fn ora_accumulator_memory() { let mut cpu = CPU::new(); cpu.accumulator = 0x0f; cpu.debug_load_and_run(vec![0x09, 0xf0, 0x00]); assert_eq!(cpu.accumulator, 0xff); } #[test] fn pha_push_value_to_stack() { let mut cpu = CPU::new(); cpu.accumulator = 0x0f; cpu.debug_load_and_run(vec![0x48, 0x00]); assert_eq!(cpu.memory[0x01ff], 0x0f); } #[test] fn php_push_status_to_stack() { let mut cpu = CPU::new(); cpu.status.set(Status::CARRY | Status::OVERFLOW); cpu.debug_load_and_run(vec![0x08, 0x00]); assert_eq!(cpu.memory[0x01ff], Status::CARRY | Status::OVERFLOW); } #[test] fn pla_pop_value_from_stack() { let mut cpu = CPU::new(); cpu.accumulator = 0xf0; cpu.debug_load_and_run(vec![0x48, 0xa9, 0x00, 0x68, 0x00]); assert_eq!(cpu.accumulator, 0xf0); assert_eq!(cpu.status.get(), Status::NEGATIV); } #[test] fn plp_pop_status_from_stack() { let mut cpu = CPU::new(); cpu.push(Status::CARRY | Status::OVERFLOW); cpu.debug_load_and_run(vec![0x28, 0x00]); assert_eq!(cpu.status.get(), Status::CARRY | Status::OVERFLOW); } #[test] fn rol_accumulator() { let mut cpu = CPU::new(); cpu.accumulator = 0xf0; cpu.status.set(Status::CARRY); cpu.debug_load_and_run(vec![0x2a, 0x00]); assert_eq!(cpu.accumulator, 0xe1); assert_eq!(cpu.status.get(), Status::NEGATIV | Status::CARRY); } #[test] fn rol_memory() { let mut cpu = CPU::new(); cpu.memory[0x01] = 0xf0; cpu.status.set(Status::CARRY); cpu.accumulator = 0x00; cpu.debug_load_and_run(vec![0x26, 0x01, 0x00]); assert_eq!(cpu.memory[0x01], 0xe1); assert_eq!( cpu.status.get(), Status::NEGATIV | Status::ZERO | Status::CARRY ); } #[test] fn ror_accumulator() { let mut cpu = CPU::new(); cpu.accumulator = 0x0f; cpu.status.set(Status::CARRY); cpu.debug_load_and_run(vec![0x6a, 0x00]); assert_eq!(cpu.accumulator, 0x87); assert_eq!(cpu.status.get(), Status::NEGATIV | Status::CARRY); } #[test] fn ror_memory() { let mut cpu = CPU::new(); cpu.memory[0x01] = 0x0f; cpu.status.set(Status::CARRY); cpu.accumulator = 0x00; cpu.debug_load_and_run(vec![0x66, 0x01, 0x00]); assert_eq!(cpu.memory[0x01], 0x87); assert_eq!( cpu.status.get(), Status::NEGATIV | Status::ZERO | Status::CARRY ); } #[test] fn sdc_basic() { let mut cpu = CPU::new(); cpu.accumulator = 5; cpu.debug_load_and_run(vec![0xe9, 0x04, 0x00]); assert_eq!(cpu.accumulator, 1); cpu.reset(); cpu.accumulator = 5; cpu.status.set(Status::CARRY); cpu.debug_load_and_run(vec![0xe9, 0x04, 0x00]); assert_eq!(cpu.accumulator, 2); } #[test] fn sdc_overflow_and_carry_flag() { let mut cpu = CPU::new(); cpu.accumulator = 5; cpu.debug_load_and_run(vec![0xe9, 0x06, 0x00]); assert_eq!(cpu.accumulator, 0xff); assert_eq!(cpu.status.get(), Status::NEGATIV); cpu.reset(); cpu.accumulator = 5; cpu.status.set(Status::CARRY); cpu.debug_load_and_run(vec![0xe9, 0x06, 0x00]); assert_eq!(cpu.accumulator, 0); assert_eq!(cpu.status.get(), Status::ZERO | Status::CARRY) } #[test] fn sta_stx_sty_store_value() { let mut cpu = CPU::new(); cpu.accumulator = 0x15; cpu.register_x = 0x16; cpu.register_y = 0x17; cpu.debug_load_and_run(vec![0x85, 0x01, 0x86, 0x02, 0x84, 0x03, 0x00]); assert_eq!(cpu.mem_read(0x01), 0x15); assert_eq!(cpu.mem_read(0x02), 0x16); assert_eq!(cpu.mem_read(0x03), 0x17); } #[test] fn tax_tay() { let mut cpu = CPU::new(); cpu.accumulator = 0x15; cpu.debug_load_and_run(vec![0xaa, 0xa8, 0x00]); assert_eq!(cpu.accumulator, cpu.register_x); assert_eq!(cpu.accumulator, cpu.register_y); } #[test] fn tsx_txa_txs() { let mut cpu = CPU::new(); cpu.debug_load_and_run(vec![0xba, 0x8a, 0xa9, 0x69, 0xaa, 0x9a, 0x00]); assert_eq!(cpu.stack_pointer, 0x0169); } #[test] fn bit_with_same_values() { let mut cpu = CPU::new(); cpu.accumulator = 0b1111_0000; cpu.memory[0x02] = 0b1111_0000; cpu.debug_load_and_run(vec![0x24, 0x02, 0x00]); assert_eq!(cpu.status.get(), Status::NEGATIV | Status::OVERFLOW); } #[test] fn bit_with_different_values() { let mut cpu = CPU::new(); cpu.accumulator = 0b0011_0011; cpu.memory[0x02] = 0b0011_0000; cpu.debug_load_and_run(vec![0x24, 0x02, 0x00]); assert_eq!(cpu.status.get(), 0); } #[test] fn bit_with_different_values_2() { let mut cpu = CPU::new(); cpu.accumulator = 0b0011_0011; cpu.memory[0x02] = 0b0000_0000; cpu.debug_load_and_run(vec![0x24, 0x02, 0x00]); assert_eq!(cpu.status.get(), Status::ZERO); }
use actix_multipart::{Field, Multipart}; use actix_web::{web}; use futures::StreamExt; use serde::{Deserialize, Serialize}; use std::io::Write; use std::{str}; #[derive(Debug, Clone)] pub struct UploadedFiles { pub name: String, pub path: String, } impl UploadedFiles { fn new(filename: &str) -> UploadedFiles { UploadedFiles { name: filename.to_string(), path: format!("./files/{}", filename), } } } #[derive(Deserialize, Serialize, Debug)] pub struct FormOut { title: String, description: String, count: u32 } pub async fn split_payload(payload: &mut Multipart) -> (FormOut, Vec<UploadedFiles>) { let mut files: Vec<UploadedFiles> = Vec::new(); /* fill with default values for now */ let mut form : FormOut=FormOut { title: "".to_string(), description: "".to_string(), count: 0 }; while let Some(item) = payload.next().await { let mut field: Field = item.expect(" split_payload err"); let content_type = field.content_disposition().unwrap(); let name = content_type.get_name().unwrap(); if name != "file" { while let Some(chunk) = field.next().await { let data = chunk.expect("split_payload err chunk"); /* convert bytes to string and print it (just for testing) */ if let Ok(s)=str::from_utf8(&data){ println!("{:?}", s); }; /* all not file fields of your form (feel free to fix this mess) */ if name=="title" { if let Ok(s)=str::from_utf8(&data){ form.title=s.to_string(); } } else if name=="description" { if let Ok(s)=str::from_utf8(&data){ form.description=s.to_string(); } } else if name=="count"{ if let Ok(s)=str::from_utf8(&data){ /* bytes to string */ let numstr: String=s.to_string(); /* string to u32 number */ form.count=numstr.parse().expect("not a number"); }; } } } else { match content_type.get_filename() { Some(filename) => { let file = UploadedFiles::new(filename); let file_path = file.path.clone(); let mut f = web::block(move || std::fs::File::create(&file_path)) .await .unwrap(); while let Some(chunk) = field.next().await { let data = chunk.unwrap(); f = web::block(move || f.write_all(&data).map(|_| f)) .await .unwrap(); } files.push(file.clone()); } None => { println!("file none"); } } } } (form, files) }
use serde::{Serialize, Deserialize}; use serde_json; use reqwest::{self, Client, StatusCode, header::{ACCEPT, HeaderValue}}; use std::fmt; pub type Result<T> = std::result::Result<T, String>; /// `CDX_HOST` is the host for accessing cdx index data. pub const CDX_HOST: &str = "index.commoncrawl.org"; /// `WARC_HOST` is the host for accessing WARC data. pub const WARC_HOST: &str = "commoncrawl.s3.amazonaws.com"; /// `ToJson` specifies the operations implemented by types that can be serialized into JSON. pub trait ToJson<'a>: Serialize + Deserialize<'a> { /// `to_json_string` serializes the implementor into a json string. fn to_json_string(&self) -> Result<String> { serde_json::to_string(self) .map_err(|e| format!("{}", e)) } /// `to_json_bytes` serializes the implementor into json bytes. fn to_json_bytes(&self) -> Result<Vec<u8>> { serde_json::to_vec(self) .map_err(|e| format!("{}", e)) } } /// `FromJson` specifies the operations implemented by types that can be deserialized from JSON. pub trait FromJson<'a>: Serialize + Deserialize<'a> { /// `from_json_string` deserializes an instance of the implementor from a json string. fn from_json_string(s: &'a str) -> Result<Self> { serde_json::from_str(s) .map_err(|e| format!("{}", e)) } /// `from_json_bytes` deserializes an instance of the implementor from json bytes. fn from_json_bytes(b: &'a [u8]) -> Result<Self> { serde_json::from_slice(b) .map_err(|e| format!("{}", e)) } } /// `Url` is the url type used by the library. #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub enum Url { CDX { path: String }, WARC { path: String }, } impl Url { /// `to_string` returns the `Url` string. pub fn to_string(&self) -> String { format!("{}", self) } /// `from_string` creates a `Url` from a string. pub fn from_string(s: &str) -> Result<Url> { let url = reqwest::Url::parse(s) .map_err(|e| format!("{}", e))?; match url.host_str() { Some(CDX_HOST) => Ok(Url::CDX { path: url.path().into() }), Some(WARC_HOST) => Ok(Url::WARC { path: url.path().into() }), _ => Err("invalid domain".into()) } } } impl Default for Url { fn default() -> Url { Url::CDX { path: String::new() } } } impl fmt::Display for Url { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Url::CDX { path } => write!(f, "https://{}/{}", CDX_HOST, path), Url::WARC { path } => write!(f, "https://{}/{}", WARC_HOST, path), } } } /// `Charset` is the set of charsets used by `ContentType`. #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub enum Charset { UTF8, UTF16, } impl Charset { /// `to_string` returns the `Charset` string. pub fn to_string(self) -> String { format!("{}", self) } /// `from_string` creates a `Charset` from a string. pub fn from_string(s: &str) -> Result<Charset> { match s { "utf-8" => Ok(Charset::UTF8), "utf-16" => Ok(Charset::UTF16), _ => Err("invalid charset".into()) } } } impl Default for Charset { fn default() -> Charset { Charset::UTF8 } } impl fmt::Display for Charset { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Charset::UTF8 => write!(f, "utf-8"), Charset::UTF16 => write!(f, "utf-16"), } } } /// `ContentType` is the set of content-types used by `Fetcher`. #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] pub enum ContentType { JSON, TEXT { charset: Charset }, } impl ContentType { /// `to_string` returns the `ContentType` string. pub fn to_string(self) -> String { format!("{}", self) } /// `from_string` creates a `ContentType` from a string. pub fn from_string(s: &str) -> Result<ContentType> { match s { "application/json" => Ok(ContentType::JSON ), "text/plain; charset=utf-8" => Ok(ContentType::TEXT { charset: Charset::UTF8 } ), "text/plain; charset=utf-16" => Ok(ContentType::TEXT { charset: Charset::UTF16 } ), _ => Err("invalid content-type".into()) } } } impl Default for ContentType { fn default() -> ContentType { ContentType::JSON } } impl fmt::Display for ContentType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ContentType::JSON => write!(f, "application/json"), ContentType::TEXT { charset } => { write!(f, "text/plain; charset={}", charset) }, } } } /// `Fetcher` is used to fetch a remote http(s) resource. #[derive(Clone, Default, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct Fetcher { pub url: Url, pub content_type: ContentType, } impl Fetcher { /// `new` creates a new `Fetcher`. pub fn new() -> Fetcher { Fetcher::default() } /// `json_fetcher` creates a new json content-type `Fetcher`. pub fn json_fetcher(url: Url) -> Fetcher { Fetcher { url, content_type: ContentType::default(), } } /// `text_fetcher` creates a new text content-type `Fetcher`. pub fn text_fetcher(url: Url, charset: Charset) -> Fetcher { Fetcher { url, content_type: ContentType::TEXT { charset }, } } /// `exec` execs the `Fetcher`. pub fn exec(self) -> Result<Vec<u8>> { let content_type = HeaderValue::from_str(&self.content_type.to_string()) .map_err(|e| format!("{}", e))?; let req_builder = Client::new().get(&self.url.to_string()); let mut res = req_builder .header(ACCEPT, content_type) .send() .map_err(|e| format!("{}", e))?; if res.status() != StatusCode::OK { return Err(format!("status code: {}", res.status())); } let mut contents = Vec::new(); res.copy_to(&mut contents) .map_err(|e| format!("{}", e))?; Ok(contents) } } /// `CDXQuerier` is used to query the CommonCrawl Index CDX API. #[derive(Clone, Default, Hash, Eq, PartialEq, Ord, PartialOrd, Debug)] pub struct CDXQuerier { pub path: String, pub from: u64, pub to: u64, pub limit: u64, pub sort: i64, pub filter: String, pub field: Option<String>, pub page: u64, pub page_size: u64, pub show_num_pages: bool, pub show_paged_index: bool, } impl CDXQuerier { /// `new` creates a new CDXQuerier. pub fn new() -> CDXQuerier { CDXQuerier::default() } /// `set_path` sets the path of the collection index. pub fn set_path(_path: &str) -> Result<CDXQuerier> { unreachable!() } /// `set_from` sets the from timestamp in the date/time range of the query. /// The value has to have less than 14 digits and will be padded to the /// lower bound. pub fn set_from(_from: u64) -> Result<CDXQuerier> { unreachable!() } /// `set_to` sets the to timestamp in the date/time range of the query. /// The value has to have less than 14 digits and will be padded to the /// upper bound. pub fn set_to(_to: u64) -> Result<CDXQuerier> { unreachable!() } /// `set_limit` sets the limit to the number of returned items from the query. pub fn set_limit(_limit: u64) -> Result<CDXQuerier> { unreachable!() } /// `set_sort` sets the sorting method in the query. pub fn set_sort(_sort: i64) -> Result<CDXQuerier> { unreachable!() } /// `set_filter` sets the filtering method in the query. pub fn set_filter(_filter: &str) -> Result<CDXQuerier> { unreachable!() } /// `set_field` sets the field to be returned if only one is required. pub fn set_field(_field: &str) -> Result<CDXQuerier> { unreachable!() } /// `set_page` sets the page to be returned by the query. pub fn set_page(_page: u64) -> Result<CDXQuerier> { unreachable!() } /// `set_page_size` sets the maximum size per page. pub fn set_page_size(_page_size: u64) -> Result<CDXQuerier> { unreachable!() } /* /// `set_show_num_pages` sets if the query should return the number of pages. pub fn set_show_num_pages(_toggle: bool) -> Result<CDXQuerier> { unreachable!() } /// `set_show_paged_index` sets if the query should return the /// secondary index data instead of the CDX data. pub fn set_show_paged_index(_toggle: bool) -> Result<CDXQuerier> { unreachable!() } */ /// `exec` execs the `CDXQuerier`. pub fn exec(self) -> Result<CDXItems> { unreachable!() } } /// `CollectionInfo` is a single collection info in the CommonCrawl /// json file at https://index.commoncrawl.org/collinfo.json #[derive(Clone, Default, Hash, Eq, PartialEq, Ord, PartialOrd, Debug, Serialize, Deserialize)] pub struct CollectionInfo { pub id: String, pub name: String, pub timegate: String, pub cdx_api: String, } impl CollectionInfo { /// `new` creates a new `CollectionInfo`. pub fn new() -> CollectionInfo { CollectionInfo::default() } } impl<'a> ToJson<'a> for CollectionInfo {} impl<'a> FromJson<'a> for CollectionInfo {} /// `CollectionsInfo` is a collection of `CollectionInfo`s. #[derive(Default, Eq, PartialEq, Debug, Serialize, Deserialize)] pub struct CollectionsInfo(Vec<CollectionInfo>); impl CollectionsInfo { /// `PATH` is the path of the remote `CollectionsInfo`. pub const PATH: &'static str = "collinfo.json"; /// `new` creates a new `CollectionsInfo`. pub fn new() -> CollectionsInfo { CollectionsInfo::default() } /// `url` returns the `CollectionsInfo` url. pub fn url() -> Url { Url::CDX { path: CollectionsInfo::PATH.into() } } /// `fetch` fetches `CollectionsInfo` from remote. pub fn fetch() -> Result<CollectionsInfo> { let url = CollectionsInfo::url(); let fetcher = Fetcher::json_fetcher(url); let contents = fetcher.exec()?; CollectionsInfo::from_json_bytes(&contents) } } impl<'a> ToJson<'a> for CollectionsInfo {} impl<'a> FromJson<'a> for CollectionsInfo {} /// `CDXItem` is a single item returned by a CDX query. #[derive(Clone, Default, Hash, Eq, PartialEq, Ord, PartialOrd, Debug, Serialize, Deserialize)] pub struct CDXItem { pub urlkey: String, pub timestamp: u64, pub mime: String, pub length: u64, pub status: u64, pub filename: String, pub languages: String, pub charset: String, pub url: String, pub mime_detected: String, pub offset: u64, pub digest: String, } impl CDXItem { /// `new` creates a new `CDXItem`. pub fn new() -> CDXItem { CDXItem::default() } } impl<'a> ToJson<'a> for CDXItem {} impl<'a> FromJson<'a> for CDXItem {} /// `CDXItems` is the collection of items returned by a CDX query. #[derive(Default, Eq, PartialEq, Debug, Serialize, Deserialize)] pub struct CDXItems(Vec<CDXItem>); impl CDXItems { /// `new` creates a new `CDXItems`. pub fn new() -> CDXItems { CDXItems::default() } /// `fetch` fetches `CDXItems` from remote. pub fn fetch(path: &str) -> Result<CDXItems> { let path = if path.chars().nth(0) == Some('/') { let mut p = String::from(path); p.remove(0); p } else { String::from(path) }; let url = Url::WARC { path }; let fetcher = Fetcher::json_fetcher(url); let contents = fetcher.exec()?; CDXItems::from_json_bytes(&contents) } } impl<'a> ToJson<'a> for CDXItems {} impl<'a> FromJson<'a> for CDXItems {}
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// FormulaAndFunctionEventAggregation : Aggregation methods for event platform queries. /// Aggregation methods for event platform queries. #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum FormulaAndFunctionEventAggregation { #[serde(rename = "count")] COUNT, #[serde(rename = "cardinality")] CARDINALITY, #[serde(rename = "median")] MEDIAN, #[serde(rename = "pc75")] PC75, #[serde(rename = "pc90")] PC90, #[serde(rename = "pc95")] PC95, #[serde(rename = "pc98")] PC98, #[serde(rename = "pc99")] PC99, #[serde(rename = "sum")] SUM, #[serde(rename = "min")] MIN, #[serde(rename = "max")] MAX, #[serde(rename = "avg")] AVG, } impl ToString for FormulaAndFunctionEventAggregation { fn to_string(&self) -> String { match self { Self::COUNT => String::from("count"), Self::CARDINALITY => String::from("cardinality"), Self::MEDIAN => String::from("median"), Self::PC75 => String::from("pc75"), Self::PC90 => String::from("pc90"), Self::PC95 => String::from("pc95"), Self::PC98 => String::from("pc98"), Self::PC99 => String::from("pc99"), Self::SUM => String::from("sum"), Self::MIN => String::from("min"), Self::MAX => String::from("max"), Self::AVG => String::from("avg"), } } }
use game_state; use std::str; pub fn get_king_moves(state: &game_state::GameState, piece_coord: Vec<u8>) -> Vec<String> { let mut can_move_here = true; let mut allowed_king_moves: Vec<String> = Vec::new(); //down let down = (0i8,1i8); //up let up = (0i8,-1i8); //left let left = (-1i8,0i8); //right let right = (1i8,0i8); //down right let down_right = (1i8,1i8); //up right let up_right = (1i8,-1i8); //down left let down_left = (-1i8,1i8); //up left let up_left = (-1i8,-1i8); let mut move_piece_coord0 = piece_coord[0] as i8; let mut move_piece_coord1 = piece_coord[1] as i8; //down move_piece_coord0 += down.0; move_piece_coord1 += down.1; if (move_piece_coord1 as u8) < 'G' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } //up move_piece_coord0 = piece_coord[0] as i8; move_piece_coord1 = piece_coord[1] as i8; move_piece_coord0 += up.0; move_piece_coord1 += up.1; can_move_here = true; if (move_piece_coord1 as u8) > '0' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } //left move_piece_coord0 = piece_coord[0] as i8; move_piece_coord1 = piece_coord[1] as i8; move_piece_coord0 += left.0; move_piece_coord1 += left.1; can_move_here = true; if (move_piece_coord0 as u8) > 'A' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } //right move_piece_coord0 = piece_coord[0] as i8; move_piece_coord1 = piece_coord[1] as i8; move_piece_coord0 += right.0; move_piece_coord1 += right.1; can_move_here = true; if (move_piece_coord0 as u8) < 'I' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } //down_right let mut move_piece_coord0 = piece_coord[0] as i8; let mut move_piece_coord1 = piece_coord[1] as i8; move_piece_coord0 += down_right.0; move_piece_coord1 += down_right.1; if (move_piece_coord0 as u8) < 'I' as u8 { if (move_piece_coord1 as u8) < '9' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } } //up_right move_piece_coord0 = piece_coord[0] as i8; move_piece_coord1 = piece_coord[1] as i8; move_piece_coord0 += up_right.0; move_piece_coord1 += up_right.1; can_move_here = true; if (move_piece_coord0 as u8) < 'I' as u8 { if (move_piece_coord1 as u8) > '0' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } } //down_left move_piece_coord0 = piece_coord[0] as i8; move_piece_coord1 = piece_coord[1] as i8; can_move_here = true; move_piece_coord0 += down_left.0; move_piece_coord1 += down_left.1; if (move_piece_coord0 as u8) >= 'A' as u8 { if (move_piece_coord1 as u8) < '9' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } } //up_left move_piece_coord0 = piece_coord[0] as i8; move_piece_coord1 = piece_coord[1] as i8; can_move_here = true; move_piece_coord0 += up_left.0; move_piece_coord1 += up_left.1; if (move_piece_coord0 as u8) >= 'A' as u8 { if (move_piece_coord1 as u8) > '0' as u8 { let mut coord:Vec<u8> = Vec::new(); coord.push(move_piece_coord0 as u8); coord.push(move_piece_coord1 as u8); if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) { if let Some(playerAt) = playerat_opt { if playerAt == state.player_turn { can_move_here = false; } } if can_move_here { let mut king_move_str = String::new(); king_move_str.push(piece_coord[0] as char); king_move_str.push(piece_coord[1] as char); king_move_str.push('-'); king_move_str.push(move_piece_coord0 as u8 as char); king_move_str.push(move_piece_coord1 as u8 as char); allowed_king_moves.push(king_move_str); } } } } allowed_king_moves }
#[doc = "Register `ETH_MACTSCR` reader"] pub type R = crate::R<ETH_MACTSCR_SPEC>; #[doc = "Register `ETH_MACTSCR` writer"] pub type W = crate::W<ETH_MACTSCR_SPEC>; #[doc = "Field `TSENA` reader - TSENA"] pub type TSENA_R = crate::BitReader; #[doc = "Field `TSENA` writer - TSENA"] pub type TSENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSCFUPDT` reader - TSCFUPDT"] pub type TSCFUPDT_R = crate::BitReader; #[doc = "Field `TSCFUPDT` writer - TSCFUPDT"] pub type TSCFUPDT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSINIT` reader - TSINIT"] pub type TSINIT_R = crate::BitReader; #[doc = "Field `TSINIT` writer - TSINIT"] pub type TSINIT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSUPDT` reader - TSUPDT"] pub type TSUPDT_R = crate::BitReader; #[doc = "Field `TSUPDT` writer - TSUPDT"] pub type TSUPDT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSADDREG` reader - TSADDREG"] pub type TSADDREG_R = crate::BitReader; #[doc = "Field `TSADDREG` writer - TSADDREG"] pub type TSADDREG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSENALL` reader - TSENALL"] pub type TSENALL_R = crate::BitReader; #[doc = "Field `TSENALL` writer - TSENALL"] pub type TSENALL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSCTRLSSR` reader - TSCTRLSSR"] pub type TSCTRLSSR_R = crate::BitReader; #[doc = "Field `TSCTRLSSR` writer - TSCTRLSSR"] pub type TSCTRLSSR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSVER2ENA` reader - TSVER2ENA"] pub type TSVER2ENA_R = crate::BitReader; #[doc = "Field `TSVER2ENA` writer - TSVER2ENA"] pub type TSVER2ENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSIPENA` reader - TSIPENA"] pub type TSIPENA_R = crate::BitReader; #[doc = "Field `TSIPENA` writer - TSIPENA"] pub type TSIPENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSIPV6ENA` reader - TSIPV6ENA"] pub type TSIPV6ENA_R = crate::BitReader; #[doc = "Field `TSIPV6ENA` writer - TSIPV6ENA"] pub type TSIPV6ENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSIPV4ENA` reader - TSIPV4ENA"] pub type TSIPV4ENA_R = crate::BitReader; #[doc = "Field `TSIPV4ENA` writer - TSIPV4ENA"] pub type TSIPV4ENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSEVNTENA` reader - TSEVNTENA"] pub type TSEVNTENA_R = crate::BitReader; #[doc = "Field `TSEVNTENA` writer - TSEVNTENA"] pub type TSEVNTENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSMSTRENA` reader - TSMSTRENA"] pub type TSMSTRENA_R = crate::BitReader; #[doc = "Field `TSMSTRENA` writer - TSMSTRENA"] pub type TSMSTRENA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `SNAPTYPSEL` reader - SNAPTYPSEL"] pub type SNAPTYPSEL_R = crate::FieldReader; #[doc = "Field `SNAPTYPSEL` writer - SNAPTYPSEL"] pub type SNAPTYPSEL_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `TSENMACADDR` reader - TSENMACADDR"] pub type TSENMACADDR_R = crate::BitReader; #[doc = "Field `TSENMACADDR` writer - TSENMACADDR"] pub type TSENMACADDR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CSC` reader - CSC"] pub type CSC_R = crate::BitReader; #[doc = "Field `TXTSSTSM` reader - TXTSSTSM"] pub type TXTSSTSM_R = crate::BitReader; #[doc = "Field `TXTSSTSM` writer - TXTSSTSM"] pub type TXTSSTSM_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `AV8021ASMEN` reader - AV8021ASMEN"] pub type AV8021ASMEN_R = crate::BitReader; #[doc = "Field `AV8021ASMEN` writer - AV8021ASMEN"] pub type AV8021ASMEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - TSENA"] #[inline(always)] pub fn tsena(&self) -> TSENA_R { TSENA_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - TSCFUPDT"] #[inline(always)] pub fn tscfupdt(&self) -> TSCFUPDT_R { TSCFUPDT_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - TSINIT"] #[inline(always)] pub fn tsinit(&self) -> TSINIT_R { TSINIT_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - TSUPDT"] #[inline(always)] pub fn tsupdt(&self) -> TSUPDT_R { TSUPDT_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 5 - TSADDREG"] #[inline(always)] pub fn tsaddreg(&self) -> TSADDREG_R { TSADDREG_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 8 - TSENALL"] #[inline(always)] pub fn tsenall(&self) -> TSENALL_R { TSENALL_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - TSCTRLSSR"] #[inline(always)] pub fn tsctrlssr(&self) -> TSCTRLSSR_R { TSCTRLSSR_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - TSVER2ENA"] #[inline(always)] pub fn tsver2ena(&self) -> TSVER2ENA_R { TSVER2ENA_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - TSIPENA"] #[inline(always)] pub fn tsipena(&self) -> TSIPENA_R { TSIPENA_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - TSIPV6ENA"] #[inline(always)] pub fn tsipv6ena(&self) -> TSIPV6ENA_R { TSIPV6ENA_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - TSIPV4ENA"] #[inline(always)] pub fn tsipv4ena(&self) -> TSIPV4ENA_R { TSIPV4ENA_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - TSEVNTENA"] #[inline(always)] pub fn tsevntena(&self) -> TSEVNTENA_R { TSEVNTENA_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - TSMSTRENA"] #[inline(always)] pub fn tsmstrena(&self) -> TSMSTRENA_R { TSMSTRENA_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bits 16:17 - SNAPTYPSEL"] #[inline(always)] pub fn snaptypsel(&self) -> SNAPTYPSEL_R { SNAPTYPSEL_R::new(((self.bits >> 16) & 3) as u8) } #[doc = "Bit 18 - TSENMACADDR"] #[inline(always)] pub fn tsenmacaddr(&self) -> TSENMACADDR_R { TSENMACADDR_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - CSC"] #[inline(always)] pub fn csc(&self) -> CSC_R { CSC_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 24 - TXTSSTSM"] #[inline(always)] pub fn txtsstsm(&self) -> TXTSSTSM_R { TXTSSTSM_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 28 - AV8021ASMEN"] #[inline(always)] pub fn av8021asmen(&self) -> AV8021ASMEN_R { AV8021ASMEN_R::new(((self.bits >> 28) & 1) != 0) } } impl W { #[doc = "Bit 0 - TSENA"] #[inline(always)] #[must_use] pub fn tsena(&mut self) -> TSENA_W<ETH_MACTSCR_SPEC, 0> { TSENA_W::new(self) } #[doc = "Bit 1 - TSCFUPDT"] #[inline(always)] #[must_use] pub fn tscfupdt(&mut self) -> TSCFUPDT_W<ETH_MACTSCR_SPEC, 1> { TSCFUPDT_W::new(self) } #[doc = "Bit 2 - TSINIT"] #[inline(always)] #[must_use] pub fn tsinit(&mut self) -> TSINIT_W<ETH_MACTSCR_SPEC, 2> { TSINIT_W::new(self) } #[doc = "Bit 3 - TSUPDT"] #[inline(always)] #[must_use] pub fn tsupdt(&mut self) -> TSUPDT_W<ETH_MACTSCR_SPEC, 3> { TSUPDT_W::new(self) } #[doc = "Bit 5 - TSADDREG"] #[inline(always)] #[must_use] pub fn tsaddreg(&mut self) -> TSADDREG_W<ETH_MACTSCR_SPEC, 5> { TSADDREG_W::new(self) } #[doc = "Bit 8 - TSENALL"] #[inline(always)] #[must_use] pub fn tsenall(&mut self) -> TSENALL_W<ETH_MACTSCR_SPEC, 8> { TSENALL_W::new(self) } #[doc = "Bit 9 - TSCTRLSSR"] #[inline(always)] #[must_use] pub fn tsctrlssr(&mut self) -> TSCTRLSSR_W<ETH_MACTSCR_SPEC, 9> { TSCTRLSSR_W::new(self) } #[doc = "Bit 10 - TSVER2ENA"] #[inline(always)] #[must_use] pub fn tsver2ena(&mut self) -> TSVER2ENA_W<ETH_MACTSCR_SPEC, 10> { TSVER2ENA_W::new(self) } #[doc = "Bit 11 - TSIPENA"] #[inline(always)] #[must_use] pub fn tsipena(&mut self) -> TSIPENA_W<ETH_MACTSCR_SPEC, 11> { TSIPENA_W::new(self) } #[doc = "Bit 12 - TSIPV6ENA"] #[inline(always)] #[must_use] pub fn tsipv6ena(&mut self) -> TSIPV6ENA_W<ETH_MACTSCR_SPEC, 12> { TSIPV6ENA_W::new(self) } #[doc = "Bit 13 - TSIPV4ENA"] #[inline(always)] #[must_use] pub fn tsipv4ena(&mut self) -> TSIPV4ENA_W<ETH_MACTSCR_SPEC, 13> { TSIPV4ENA_W::new(self) } #[doc = "Bit 14 - TSEVNTENA"] #[inline(always)] #[must_use] pub fn tsevntena(&mut self) -> TSEVNTENA_W<ETH_MACTSCR_SPEC, 14> { TSEVNTENA_W::new(self) } #[doc = "Bit 15 - TSMSTRENA"] #[inline(always)] #[must_use] pub fn tsmstrena(&mut self) -> TSMSTRENA_W<ETH_MACTSCR_SPEC, 15> { TSMSTRENA_W::new(self) } #[doc = "Bits 16:17 - SNAPTYPSEL"] #[inline(always)] #[must_use] pub fn snaptypsel(&mut self) -> SNAPTYPSEL_W<ETH_MACTSCR_SPEC, 16> { SNAPTYPSEL_W::new(self) } #[doc = "Bit 18 - TSENMACADDR"] #[inline(always)] #[must_use] pub fn tsenmacaddr(&mut self) -> TSENMACADDR_W<ETH_MACTSCR_SPEC, 18> { TSENMACADDR_W::new(self) } #[doc = "Bit 24 - TXTSSTSM"] #[inline(always)] #[must_use] pub fn txtsstsm(&mut self) -> TXTSSTSM_W<ETH_MACTSCR_SPEC, 24> { TXTSSTSM_W::new(self) } #[doc = "Bit 28 - AV8021ASMEN"] #[inline(always)] #[must_use] pub fn av8021asmen(&mut self) -> AV8021ASMEN_W<ETH_MACTSCR_SPEC, 28> { AV8021ASMEN_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "This register controls the operation of the System Time generator and processing of PTP packets for timestamping in the Receiver.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eth_mactscr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`eth_mactscr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct ETH_MACTSCR_SPEC; impl crate::RegisterSpec for ETH_MACTSCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`eth_mactscr::R`](R) reader structure"] impl crate::Readable for ETH_MACTSCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`eth_mactscr::W`](W) writer structure"] impl crate::Writable for ETH_MACTSCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets ETH_MACTSCR to value 0x2000"] impl crate::Resettable for ETH_MACTSCR_SPEC { const RESET_VALUE: Self::Ux = 0x2000; }
/* https://projecteuler.net Triangle, pentagonal, and hexagonal numbers are generated by the following formulae: Triangle Tn=n(n+1)/2 1, 3, 6, 10, 15, ... Pentagonal Pn=n(3n−1)/2 1, 5, 12, 22, 35, ... Hexagonal Hn=n(2n−1) 1, 6, 15, 28, 45, ... It can be verified that T285 = P165 = H143 = 40755. Find the next triangle number that is also pentagonal and hexagonal. NOTES: */ fn triangle(n : u64) -> u64 { n*(n+1)/2 } fn pentagonal_to(v : &mut Vec::<u64>, val : u64) { while *v.last().unwrap() < val { pentagonal_next(v); } } fn pentagonal_next(v : &mut Vec::<u64>) { let n = v.len() as u64 + 1; let temp = (3*n-1)*n/2; v.push(temp); } fn hexagonal_to(v : &mut Vec::<u64>, val : u64) { while *v.last().unwrap() < val { hexagonal_next(v); } } fn hexagonal_next(v : &mut Vec::<u64>) { let n = v.len() as u64 + 1; let temp = (2*n-1)*n; v.push(temp); } fn solve() -> u64 { let mut pent = Vec::<u64>::new(); // storage for pentagonals, prime it as well... pentagonal_next(&mut pent); let mut hex = Vec::<u64>::new(); // storage for hexagonals hexagonal_next(&mut hex); let mut n = 285; // set to 284 to test against known input loop { n += 1; let search = triangle(n); // search for this value // see if search is pentagonal pentagonal_to(&mut pent, search); if let Err(_) = pent.binary_search(&search) { continue; } // see if search is hexagonal hexagonal_to(&mut hex, search); if let Err(_) = hex.binary_search(&search) { continue; } //println!("Found! {} {}", n, search); return search; } } fn main() { let start_time = std::time::Instant::now(); let sol = solve(); let elapsed = start_time.elapsed().as_micros(); println!("\nSolution: {}", sol); let mut remain = elapsed; let mut s = String::new(); if remain == 0 { s.insert(0,'0'); } while remain > 0 { let temp = remain%1000; remain /= 1000; if remain > 0 { s = format!(",{:03}",temp) + &s; } else { s = format!("{}",temp) + &s; } } println!("Elasped time: {} us", s); }
//! Wrapper for the parts of the [`Web3::eth()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html) API that [the ethereum module](super) uses. use crate::config::EthereumConfig; use crate::core::Chain; use crate::retry::Retry; use std::future::Future; use std::num::NonZeroU64; use std::time::Duration; use anyhow::Context; use futures::TryFutureExt; use tracing::{debug, error, info}; use web3::{ transports::Http, types::{Block, BlockId, Filter, Log, Transaction, TransactionId, H256, U256}, Error, Web3, }; /// Error returned by [`HttpTransport::logs`]. #[derive(Debug, thiserror::Error)] pub enum LogsError { /// Query exceeded limits (time or result length). #[error("query limit exceeded")] QueryLimit, /// One of the blocks specified in the filter is unknown. Currently only /// known to occur for Alchemy endpoints. #[error("unknown block")] UnknownBlock, #[error(transparent)] Other(#[from] web3::Error), } /// Contains only those functions from [`Web3::eth()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html) /// that [the ethereum module](super) uses. #[async_trait::async_trait] pub trait EthereumTransport { async fn block(&self, block: BlockId) -> web3::Result<Option<Block<H256>>>; async fn block_number(&self) -> web3::Result<u64>; async fn chain(&self) -> anyhow::Result<Chain>; async fn logs(&self, filter: Filter) -> std::result::Result<Vec<Log>, LogsError>; async fn transaction(&self, id: TransactionId) -> web3::Result<Option<Transaction>>; async fn gas_price(&self) -> web3::Result<U256>; } /// An implementation of [`EthereumTransport`] which uses [`Web3::eth()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html) /// wrapped in an [exponential backoff retry utility](Retry). /// /// Initial backoff time is 30 seconds and saturates at 1 hour: /// /// `backoff [secs] = min((2 ^ N) * 15, 3600) [secs]` /// /// where `N` is the consecutive retry iteration number `{1, 2, ...}`. #[derive(Clone, Debug)] pub struct HttpTransport(Web3<Http>); impl HttpTransport { /// Creates new [`HttpTransport`] from [`Web3<Http>`] pub fn new(http: Web3<Http>) -> Self { Self(http) } /// Creates new [`HttpTransport`] from [configuration](EthereumConfig) /// /// This includes setting: /// - the [Url](reqwest::Url) /// - the password (if provided) pub fn from_config(config: EthereumConfig) -> anyhow::Result<Self> { let client = reqwest::Client::builder(); let client = client .user_agent(crate::consts::USER_AGENT) .build() .context("Creating HTTP client")?; let mut url = config.url; url.set_password(config.password.as_deref()) .map_err(|_| anyhow::anyhow!("Setting password"))?; let client = Http::with_client(client, url); Ok(Self::new(Web3::new(client))) } #[cfg(test)] /// Creates a [HttpTransport](api::HttpTransport) transport from the Ethereum endpoint specified by the relevant environment variables. /// /// Requires an environment variable for both the URL and (optional) password. /// /// Panics if the environment variables are not specified. /// /// Goerli: PATHFINDER_ETHEREUM_HTTP_GOERLI_URL /// PATHFINDER_ETHEREUM_HTTP_GOERLI_PASSWORD (optional) /// /// Mainnet: PATHFINDER_ETHEREUM_HTTP_MAINNET_URL /// PATHFINDER_ETHEREUM_HTTP_MAINNET_PASSWORD (optional) pub fn test_transport(chain: Chain) -> Self { let key_prefix = match chain { Chain::Mainnet => "PATHFINDER_ETHEREUM_HTTP_MAINNET", Chain::Goerli => "PATHFINDER_ETHEREUM_HTTP_GOERLI", }; let url_key = format!("{}_URL", key_prefix); let password_key = format!("{}_PASSWORD", key_prefix); let url = std::env::var(&url_key) .unwrap_or_else(|_| panic!("Ethereum URL environment var not set {url_key}")); let password = std::env::var(password_key).ok(); let mut url = url.parse::<reqwest::Url>().expect("Bad Ethereum URL"); url.set_password(password.as_deref()).unwrap(); let client = reqwest::Client::builder().build().unwrap(); let transport = Http::with_client(client, url); Self::new(Web3::new(transport)) } } #[async_trait::async_trait] impl EthereumTransport for HttpTransport { /// Wraps [`Web3::eth().block()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html#method.block) /// into exponential retry on __all__ errors. async fn block(&self, block: BlockId) -> web3::Result<Option<Block<H256>>> { retry(|| self.0.eth().block(block), log_and_always_retry).await } /// Wraps [`Web3::eth().block_number()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html#method.block_number) /// into exponential retry on __all__ errors. async fn block_number(&self) -> web3::Result<u64> { retry(|| self.0.eth().block_number(), log_and_always_retry) .await .map(|n| n.as_u64()) } /// Identifies the Ethereum [Chain] behind the given Ethereum transport. /// /// Will error if it's not one of the valid Starknet [Chain] variants. /// Internaly wraps [`Web3::chain_id()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html#method.chain_id) /// into exponential retry on __all__ errors. async fn chain(&self) -> anyhow::Result<Chain> { match retry(|| self.0.eth().chain_id(), log_and_always_retry).await? { id if id == U256::from(1u32) => Ok(Chain::Mainnet), id if id == U256::from(5u32) => Ok(Chain::Goerli), other => anyhow::bail!("Unsupported chain ID: {}", other), } } /// Wraps [`Web3::logs()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html#method.logs) /// into exponential retry on __some__ errors. async fn logs(&self, filter: Filter) -> std::result::Result<Vec<Log>, LogsError> { use super::RpcErrorCode::*; /// Error message generated by spurious decoder error which occurs on Infura endpoints from /// time to time. It appears that the returned value is simply empty. const DECODER_ERR: &str = "Error(\"invalid type: null, expected a sequence\", line: 0, column: 0)"; const ALCHEMY_UNKNOWN_BLOCK_ERR: &str = "One of the blocks specified in filter (fromBlock, toBlock or blockHash) cannot be found."; const ALCHEMY_QUERY_TIMEOUT_ERR: &str = "Query timeout exceeded. Consider reducing your block range."; retry( || { self.0.eth().logs(filter.clone()).map_err(|e| match e { Error::Rpc(err) if err.code.code() == LimitExceeded.code() => { LogsError::QueryLimit } Error::Rpc(err) if err.code.code() == InvalidParams.code() && err.message.starts_with("Log response size exceeded") => { // Handle Alchemy query limit error response. Uses InvalidParams which is unusual. LogsError::QueryLimit } Error::Rpc(err) if err.code.code() == InvalidInput.code() && err.message == ALCHEMY_UNKNOWN_BLOCK_ERR => { LogsError::UnknownBlock } Error::Rpc(err) if err.code.code() == InvalidInput.code() && err.message == ALCHEMY_QUERY_TIMEOUT_ERR => { LogsError::QueryLimit } _ => LogsError::Other(e), }) }, |e| match e { LogsError::Other(Error::Decoder(msg)) if msg == DECODER_ERR => { tracing::trace!("Spurious L1 log decoder error occurred, retrying"); true } LogsError::Other(error) => log_and_always_retry(error), _ => false, }, ) .await } /// Wraps [`Web3::transaction()`](https://docs.rs/web3/latest/web3/api/struct.Eth.html#method.transaction) /// into exponential retry on __all__ errors. async fn transaction(&self, id: TransactionId) -> web3::Result<Option<Transaction>> { retry( || self.0.eth().transaction(id.clone()), log_and_always_retry, ) .await } async fn gas_price(&self) -> web3::Result<U256> { retry(|| self.0.eth().gas_price(), log_and_always_retry).await } } /// A helper function to keep the backoff strategy consistent across different Web3 Eth API calls. async fn retry<T, E, Fut, FutureFactory, RetryCondition>( future_factory: FutureFactory, retry_condition: RetryCondition, ) -> Result<T, E> where Fut: Future<Output = Result<T, E>>, FutureFactory: FnMut() -> Fut, RetryCondition: FnMut(&E) -> bool, { Retry::exponential(future_factory, NonZeroU64::new(2).unwrap()) .factor(NonZeroU64::new(15).unwrap()) .max_delay(Duration::from_secs(60 * 60)) .when(retry_condition) .await } /// A helper function to log Web3 Eth API errors. Always yields __true__. fn log_and_always_retry(error: &Error) -> bool { match error { Error::Transport(web3::error::TransportError::Code(401)) => { // this happens at least on infura with bad urls, also alchemy return false; } Error::Unreachable | Error::InvalidResponse(_) | Error::Transport(_) => { debug!(reason=%error, "L1 request failed, retrying") } Error::Decoder(_) | Error::Internal | Error::Io(_) | Error::Recovery(_) => { error!(reason=%error, "L1 request failed, retrying") } Error::Rpc(_) => info!(reason=%error, "L1 request failed, retrying"), } true } #[cfg(test)] impl std::ops::Deref for HttpTransport { type Target = Web3<Http>; fn deref(&self) -> &Self::Target { &self.0 } } #[cfg(test)] mod tests { mod logs { use crate::core::Chain; use crate::ethereum::transport::{EthereumTransport, HttpTransport, LogsError}; use assert_matches::assert_matches; use web3::types::{BlockNumber, FilterBuilder, H256}; #[tokio::test] async fn ok() { use std::str::FromStr; // Create a filter which includes just a single block with a small, known amount of logs. let filter = FilterBuilder::default() .block_hash( H256::from_str( "0x0d82aea6f64525def8594e3192497153b83d8c568bb76adee980042d85dec931", ) .unwrap(), ) .build(); let transport = HttpTransport::test_transport(Chain::Goerli); let result = transport.logs(filter).await; assert_matches!(result, Ok(logs) if logs.len() == 85); } #[tokio::test] async fn query_limit() { // Create a filter which includes all logs ever. This should cause the API to return // error with a query limit variant. let filter = FilterBuilder::default() .from_block(BlockNumber::Earliest) .to_block(BlockNumber::Latest) .build(); let transport = HttpTransport::test_transport(Chain::Goerli); let result = transport.logs(filter).await; assert_matches!(result, Err(LogsError::QueryLimit)); } #[tokio::test] async fn unknown_block() { // This test covers the scenario where we query a block range which exceeds the current // Ethereum chain. // // Infura and Alchemy handle this differently. // - Infura accepts the query as valid and simply returns logs for whatever part of the range it has. // - Alchemy throws a RPC::ServerError which `HttpTransport::logs` maps to `UnknownBlock`. let transport = HttpTransport::test_transport(Chain::Goerli); let latest = transport.block_number().await.unwrap(); let filter = FilterBuilder::default() .from_block(BlockNumber::Number((latest + 10).into())) .to_block(BlockNumber::Number((latest + 20).into())) .build(); let result = transport.logs(filter).await; match result { // This occurs for an Infura endpoint Ok(logs) => assert!(logs.is_empty()), // This occurs for an Alchemy endpoint Err(e) => assert_matches!(e, LogsError::UnknownBlock), } } } }
use std::error::Error; fn check_ascending(digits: &Vec<u32>) -> bool { for (i, digit) in digits.iter().enumerate().skip(1) { if digit < &digits[i-1] { return false } } true } fn check_doubles(digits: &Vec<u32>) -> bool { for (i, digit) in digits.iter().enumerate().skip(1) { if digit == &digits[i-1] { return true } } false } fn check_sequences(digits: &Vec<u32>) -> bool { let mut counter = 0; for (i, digit) in digits.iter().enumerate().skip(1) { if digit == &digits[i-1] { counter += 1; } else { if counter == 1 { return true; } counter = 0; } } return counter == 1 } pub fn part1() -> Result<(), Box<dyn Error>> { let range = (367479..893698); let mut counter = 0; for n in range { let digits: Vec<u32> = n.to_string() .chars() .map(|c| c.to_digit(10).unwrap()) .collect(); if check_ascending(&digits) && check_doubles(&digits) { counter += 1; } } println!("{}", counter); Ok(()) } pub fn part2() -> Result<(), Box<dyn Error>> { let range = (367479..893698); let mut counter = 0; for n in range { let digits: Vec<u32> = n.to_string() .chars() .map(|c| c.to_digit(10).unwrap()) .collect(); if check_ascending(&digits) && check_sequences(&digits) { counter += 1; } } println!("{}", counter); Ok(()) }
use std::cmp::Reverse; use std::collections::BinaryHeap; struct Solution; impl Solution { pub fn find_content_children(g: Vec<i32>, s: Vec<i32>) -> i32 { let mut g_heap = BinaryHeap::new(); // 胃口 for v in g { g_heap.push(Reverse(v)); } let mut s_heap = BinaryHeap::new(); // 饼干尺寸 for v in s { s_heap.push(Reverse(v)); } let mut ans = 0; loop { match (g_heap.peek(), s_heap.peek()) { (Some(Reverse(v1)), Some(Reverse(v2))) => { if v2 >= v1 { // 饼干满足胃口 s_heap.pop(); g_heap.pop(); ans += 1; } else { // 饼干不满足胃口 s_heap.pop(); } } _ => { break; } } } ans } } #[cfg(test)] mod tests { use super::*; #[test] fn test_find_content_children() { assert_eq!( Solution::find_content_children(vec![1, 2, 3], vec![1, 1]), 1 ); assert_eq!( Solution::find_content_children(vec![1, 2], vec![1, 2, 3]), 2 ); } }
mod parse; mod registry_value_type;
const MAX_LONGITUDE: f64 = 180.0; const MAX_LATITUDE: f64 = 90.0; pub fn normalised_screen_coords(lon: f64, lat: f64) -> (f64, f64) { (lon / MAX_LONGITUDE, lat / MAX_LATITUDE) } pub fn origin_based_normalised_screen_coords(lon: f64, lat: f64) -> (f64, f64) { ( (lon / MAX_LONGITUDE) * 0.5 + 0.5, 1.0 - ((lat / MAX_LATITUDE) * 0.5 + 0.5) ) } pub fn normalised_mercator_coords(lon: f64, lat: f64) -> (f64, f64) { let x = (lon + 180.0) * (1.0 / 360.0); let lat_radians = lat * std::f64::consts::PI / 180.0; let merc_n = (std::f64::consts::FRAC_PI_4 + (lat_radians * 0.5)).tan().ln(); let y = 0.5 - (0.5 * merc_n / (2.0 * std::f64::consts::PI)); (x, y) } pub fn normalised_equirectangular_coords(lon: f64, lat: f64) -> (f64, f64) { ( (lon + 180.0) * (1.0 / 360.0), ((lat * -1.0) + 90.0) * (1.0 / 180.0) ) } pub fn transform_normalised_to_screen(coord: (f64, f64)) -> (f64, f64) { (coord.0, 1.0 - coord.1) } pub fn normalise_to_window(x: f64, y: f64, draw_size: &[f64; 2]) -> (f64, f64) { (x / draw_size[0], y / draw_size[1]) } pub fn normalise_coord(c: f64, window_size_in_c: f64) -> f64 { c / window_size_in_c } pub fn normalised_coords(coords: &[f64; 2], window_size: &[f64; 2]) -> (f64, f64) { ( normalise_coord(coords[0], window_size[0]), normalise_coord(coords[1], window_size[1]) ) } pub fn window_to_map(x: f64, y: f64, window_size: &[f64; 2], view_origin: &[f64; 2], zoom_level: f64) -> (f64, f64) { screen_coords_to_map(( normalise_coord(x, window_size[0]), normalise_coord(y, window_size[1])), view_origin, zoom_level) } pub fn lon_lat_to_map(lon: f64, lat: f64, view_origin: &[f64; 2], zoom_level: f64) -> (f64, f64) { screen_coords_to_map( normalised_equirectangular_coords(lon, lat), view_origin, zoom_level) } fn screen_coords_to_map(coord: (f64, f64), view_origin: &[f64; 2], zoom_level: f64) -> (f64, f64) { ( (coord.0 - view_origin[0]) * zoom_level, (coord.1 - view_origin[1]) * zoom_level ) } pub fn in_bounds(coord: (f64, f64)) -> bool { coord.0 >= 0.0 && coord.1 >= 0.0 && coord.0 < 1.0 && coord.1 < 1.0 }
#[doc = "Reader of register STATUS"] pub type R = crate::R<u32, super::STATUS>; #[doc = "Writer for register STATUS"] pub type W = crate::W<u32, super::STATUS>; #[doc = "Register STATUS `reset()`'s with value 0"] impl crate::ResetValue for super::STATUS { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `STABLE`"] pub type STABLE_R = crate::R<bool, bool>; #[doc = "Reader of field `BADWRITE`"] pub type BADWRITE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `BADWRITE`"] pub struct BADWRITE_W<'a> { w: &'a mut W, } impl<'a> BADWRITE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24); self.w } } #[doc = "Reader of field `ENABLED`"] pub type ENABLED_R = crate::R<bool, bool>; #[doc = "The current frequency range setting, always reads 0\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum FREQ_RANGE_A { #[doc = "0: `0`"] _1_15MHZ = 0, #[doc = "1: `1`"] RESERVED_1 = 1, #[doc = "2: `10`"] RESERVED_2 = 2, #[doc = "3: `11`"] RESERVED_3 = 3, } impl From<FREQ_RANGE_A> for u8 { #[inline(always)] fn from(variant: FREQ_RANGE_A) -> Self { variant as _ } } #[doc = "Reader of field `FREQ_RANGE`"] pub type FREQ_RANGE_R = crate::R<u8, FREQ_RANGE_A>; impl FREQ_RANGE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> FREQ_RANGE_A { match self.bits { 0 => FREQ_RANGE_A::_1_15MHZ, 1 => FREQ_RANGE_A::RESERVED_1, 2 => FREQ_RANGE_A::RESERVED_2, 3 => FREQ_RANGE_A::RESERVED_3, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_1_15MHZ`"] #[inline(always)] pub fn is_1_15mhz(&self) -> bool { *self == FREQ_RANGE_A::_1_15MHZ } #[doc = "Checks if the value of the field is `RESERVED_1`"] #[inline(always)] pub fn is_reserved_1(&self) -> bool { *self == FREQ_RANGE_A::RESERVED_1 } #[doc = "Checks if the value of the field is `RESERVED_2`"] #[inline(always)] pub fn is_reserved_2(&self) -> bool { *self == FREQ_RANGE_A::RESERVED_2 } #[doc = "Checks if the value of the field is `RESERVED_3`"] #[inline(always)] pub fn is_reserved_3(&self) -> bool { *self == FREQ_RANGE_A::RESERVED_3 } } impl R { #[doc = "Bit 31 - Oscillator is running and stable"] #[inline(always)] pub fn stable(&self) -> STABLE_R { STABLE_R::new(((self.bits >> 31) & 0x01) != 0) } #[doc = "Bit 24 - An invalid value has been written to CTRL_ENABLE or CTRL_FREQ_RANGE or DORMANT"] #[inline(always)] pub fn badwrite(&self) -> BADWRITE_R { BADWRITE_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 12 - Oscillator is enabled but not necessarily running and stable, resets to 0"] #[inline(always)] pub fn enabled(&self) -> ENABLED_R { ENABLED_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bits 0:1 - The current frequency range setting, always reads 0"] #[inline(always)] pub fn freq_range(&self) -> FREQ_RANGE_R { FREQ_RANGE_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bit 24 - An invalid value has been written to CTRL_ENABLE or CTRL_FREQ_RANGE or DORMANT"] #[inline(always)] pub fn badwrite(&mut self) -> BADWRITE_W { BADWRITE_W { w: self } } }
use SafeWrapper; use ir::{Value, Context, Instruction}; use Subtype; use sys; /// A basic block. pub struct Block<'ctx>(Value<'ctx>); impl<'ctx> Block<'ctx> { /// Creates a new basic block. pub fn new(context: &Context) -> Self { unsafe { Block(Value::from_inner(sys::LLVMRustBasicBlockCreate(context.inner()))) } } /// Adds an instruction to a basic block. pub fn append(&mut self, inst: &Instruction) { unsafe { sys::LLVMRustInstructionAppend(inst.upcast_ref().inner(), self.0.inner()); } } } impl_subtype!(Block => Value);
//#![deny(unsafe_code)] #![deny(warnings)] #![no_main] #![no_std] // panic-handler crate extern crate panic_semihosting; //#[macro_use] extern crate cortex_m; extern crate rtfm; //use stm32l432xx_hal; use rtfm::app; //use rtfm::{P0, T0, TMax}; //extern crate stm32l4;/**/ use cortex_m::asm::delay; //use stm32l4; extern crate stm32l4; #[macro_use] extern crate cortex_m_semihosting; #[app(device = stm32l4::stm32l4x2)] const APP: () = { #[init] fn init() { let per: stm32l4::stm32l4x2::Peripherals = device; let rcc: &stm32l4::stm32l4x2::RCC = &per.RCC; per.LPUART1 rcc.ahb2enr.write(|w| { w.gpioben().set_bit() }); unsafe { rcc.cfgr.write(|w| { w.sw().bits(0b01) }); } let gpiob = &per.GPIOB; gpiob.pupdr.write(|w| { w.pupdr3().pull_down() }); gpiob.moder.write(|w| { w.moder3().output() }); // gpiob.odr.write(|w| { // w.odr3().high() // }); hprintln!("aa").unwrap(); loop { hprintln!("{:?}", rcc.cfgr.read().sw().bits()).unwrap(); // hprintln!("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!").unwrap(); gpiob.odr.write(|w| { w.odr3().bit(gpiob.odr.read().odr3().bit_is_clear()) }); delay(1000000); // hprintln!("!").unwrap(); } } };
use franklin_crypto::bellman::pairing::ff::{Field, PrimeField}; use franklin_crypto::bellman::Engine; use rand::Rng; extern crate num_bigint; extern crate num_integer; extern crate num_traits; use self::num_bigint::{BigInt, BigUint}; use self::num_integer::{ExtendedGcd, Integer}; use self::num_traits::{One, ToPrimitive, Zero}; use std::convert::TryInto; // Batch inverses vector of elements required for MDS matrix. pub(crate) fn batch_inversion<E: Engine>(v: &mut [E::Fr]) { // Montgomery’s Trick and Fast Implementation of Masked AES // Genelle, Prouff and Quisquater // Section 3.2 // First pass: compute [a, ab, abc, ...] let mut prod = Vec::with_capacity(v.len()); let mut tmp = E::Fr::one(); for g in v .iter() // Ignore zero elements .filter(|g| !g.is_zero()) { tmp.mul_assign(&g); prod.push(tmp); } // Invert `tmp`. tmp = tmp.inverse().unwrap(); // Guaranteed to be nonzero. // Second pass: iterate backwards to compute inverses for (g, s) in v .iter_mut() // Backwards .rev() // Ignore normalized elements .filter(|g| !g.is_zero()) // Backwards, skip last element, fill in one for last term. .zip(prod.into_iter().rev().skip(1).chain(Some(E::Fr::one()))) { // tmp := tmp * g.z; g.z := tmp * s = 1/z let mut newtmp = tmp; newtmp.mul_assign(&g); *g = tmp; g.mul_assign(&s); tmp = newtmp; } } // Computes scalar product of two same length vector. pub(crate) fn scalar_product<E: Engine>(a: &[E::Fr], b: &[E::Fr]) -> E::Fr { let mut acc = E::Fr::zero(); for (a, b) in a.iter().zip(b.iter()) { let mut tmp = a.clone(); tmp.mul_assign(&b); acc.add_assign(&tmp); } acc } // Construct MDS matrix which required by lineary layer of permutation function. pub(crate) fn construct_mds_matrix<E: Engine, R: Rng, const S: usize>( rng: &mut R, ) -> [[E::Fr; S]; S] { let width = S; loop { let x: Vec<E::Fr> = (0..width).map(|_| rng.gen()).collect(); let y: Vec<E::Fr> = (0..width).map(|_| rng.gen()).collect(); let mut invalid = false; // quick and dirty check for uniqueness of x for i in 0..(width) { if invalid { continue; } let el = x[i]; for other in x[(i + 1)..].iter() { if el == *other { invalid = true; break; } } } if invalid { continue; } // quick and dirty check for uniqueness of y for i in 0..(width) { if invalid { continue; } let el = y[i]; for other in y[(i + 1)..].iter() { if el == *other { invalid = true; break; } } } if invalid { continue; } // quick and dirty check for uniqueness of x vs y for i in 0..(width) { if invalid { continue; } let el = x[i]; for other in y.iter() { if el == *other { invalid = true; break; } } } if invalid { continue; } // by previous checks we can be sure in uniqueness and perform subtractions easily let mut mds_matrix = vec![E::Fr::zero(); width * width]; for (i, x) in x.into_iter().enumerate() { for (j, y) in y.iter().enumerate() { let place_into = i * (width) + j; let mut element = x; element.sub_assign(y); mds_matrix[place_into] = element; } } // now we need to do the inverse batch_inversion::<E>(&mut mds_matrix[..]); let mut result = [[E::Fr::zero(); S]; S]; mds_matrix .chunks_exact(S) .zip(result.iter_mut()) .for_each(|(values, row)| *row = values.try_into().expect("row in const")); return result; } } // Computes GCD of an element. It basically computes inverse of alpha in given finite field. pub fn compute_gcd<E: Engine, const N: usize>(n: u64) -> Option<[u64; N]> { let n_big = BigUint::from(n); let mut p_minus_one_biguint = BigUint::from(0u64); for limb in E::Fr::char().as_ref().iter().rev() { p_minus_one_biguint <<= 64; p_minus_one_biguint += BigUint::from(*limb); } p_minus_one_biguint -= BigUint::one(); let alpha_signed = BigInt::from(n_big); let p_minus_one_signed = BigInt::from(p_minus_one_biguint); let ExtendedGcd { gcd, x: _, mut y, .. } = p_minus_one_signed.extended_gcd(&alpha_signed); assert!(gcd.is_one()); if y < BigInt::zero() { y += p_minus_one_signed; } match y.to_biguint(){ Some(value) => return Some(biguint_to_u64_array(value)), _ => return None, } } pub(crate) fn biguint_to_u64_array<const N: usize>(mut v: BigUint) -> [u64; N] { let m: BigUint = BigUint::from(1u64) << 64; let mut ret = [0; N]; for idx in 0..N { ret[idx] = (&v % &m).to_u64().unwrap(); v >>= 64; } assert!(v.is_zero()); ret }
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::error::WalletError; use crate::mock::MemWalletStore; use crate::{Wallet, WalletAccount, WalletResult, WalletStore}; use anyhow::{format_err, Result}; use starcoin_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey}; use starcoin_crypto::keygen::KeyGen; use starcoin_types::transaction::helpers::TransactionSigner; use starcoin_types::{ account_address::{self, AccountAddress}, transaction::{RawUserTransaction, SignedUserTransaction}, }; use std::convert::TryFrom; use std::time::Duration; type KeyPair = starcoin_crypto::test_utils::KeyPair<Ed25519PrivateKey, Ed25519PublicKey>; /// Save raw key, ignore password, just for test. pub struct KeyPairWallet<S> where S: WalletStore, { store: S, } impl KeyPairWallet<MemWalletStore> { pub fn new() -> Result<Self> { Self::new_with_store(MemWalletStore::new()) } } impl<S> KeyPairWallet<S> where S: WalletStore, { pub fn new_with_store(store: S) -> Result<Self> { let wallet = Self { store }; if wallet.get_accounts()?.is_empty() { wallet.create_account("")?; } Ok(wallet) } fn save_account(&self, account: WalletAccount, key_pair: KeyPair) -> WalletResult<()> { let address = account.address; self.store.save_account(account)?; self.store.save_to_account( &address, KEY_NAME_PRIVATE_KEY.to_string(), key_pair.private_key.to_bytes().to_vec(), )?; Ok(()) } fn get_key_pair(&self, address: &AccountAddress) -> WalletResult<KeyPair> { let private_key = self.store.get_from_account(address, KEY_NAME_PRIVATE_KEY)?; if private_key.is_none() { return Err(WalletError::StoreError(format_err!( "canot find private key by address: {}", address ))); } let private_key = private_key.unwrap(); let private_key = Ed25519PrivateKey::try_from(private_key.as_slice()).map_err(|_| { WalletError::StoreError(format_err!( "cannot decode private key from underline bytes" )) })?; Ok(KeyPair::from(private_key)) } } const KEY_NAME_PRIVATE_KEY: &str = "private_key"; impl<S> Wallet for KeyPairWallet<S> where S: WalletStore, { fn create_account(&self, _password: &str) -> WalletResult<WalletAccount> { let mut key_gen = KeyGen::from_os_rng(); let (private_key, public_key) = key_gen.generate_keypair(); //TODO remove keypair dependency. let key_pair = KeyPair { private_key, public_key, }; let address = account_address::from_public_key(&key_pair.public_key); //first account is default. let is_default = self.get_accounts()?.is_empty(); let account = WalletAccount::new(address, key_pair.public_key.clone(), is_default); self.save_account(account.clone(), key_pair)?; Ok(account) } fn get_account(&self, address: &AccountAddress) -> WalletResult<Option<WalletAccount>> { Ok(self.store.get_account(address)?) } fn import_account( &self, address: AccountAddress, private_key: Vec<u8>, _password: &str, ) -> WalletResult<WalletAccount> { let private_key = Ed25519PrivateKey::try_from(private_key.as_slice()) .map_err(|_e| WalletError::InvalidPrivateKey)?; let key_pair = KeyPair::from(private_key); let account = WalletAccount::new(address, key_pair.public_key.clone(), false); self.save_account(account.clone(), key_pair)?; Ok(account) } fn export_account(&self, address: &AccountAddress, _password: &str) -> WalletResult<Vec<u8>> { self.get_key_pair(address) .map(|kp| kp.private_key.to_bytes().to_vec()) } fn contains(&self, address: &AccountAddress) -> WalletResult<bool> { Ok(self.store.get_account(address)?.map(|_| true).is_some()) } fn unlock_account( &self, _address: AccountAddress, _password: &str, _duration: Duration, ) -> WalletResult<()> { //do nothing Ok(()) } fn lock_account(&self, _address: AccountAddress) -> WalletResult<()> { //do nothing Ok(()) } fn sign_txn( &self, raw_txn: RawUserTransaction, signer_address: AccountAddress, ) -> WalletResult<SignedUserTransaction> { if !self.contains(&signer_address)? { return Err(WalletError::AccountNotExist(signer_address)); } let key_pair = self.get_key_pair(&signer_address)?; key_pair .sign_txn(raw_txn) .map_err(WalletError::TransactionSignError) } fn get_default_account(&self) -> WalletResult<Option<WalletAccount>> { Ok(self .store .get_accounts()? .iter() .find(|account| account.is_default) .cloned()) } fn get_accounts(&self) -> WalletResult<Vec<WalletAccount>> { Ok(self.store.get_accounts()?) } fn set_default(&self, address: &AccountAddress) -> WalletResult<()> { let mut target = self .get_account(address)? .ok_or(WalletError::AccountNotExist(*address))?; let default = self.get_default_account()?; if let Some(mut default) = default { if &default.address == address { return Ok(()); } default.is_default = false; self.store.save_account(default)?; } target.is_default = true; self.store.save_account(target)?; Ok(()) } fn remove_account(&self, address: &AccountAddress) -> WalletResult<()> { let account = self.get_account(address)?; if let Some(account) = account { if account.is_default { return Err(WalletError::RemoveDefaultAccountError(*address)); } self.store.remove_account(address)?; } Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_wallet() -> Result<()> { let wallet = KeyPairWallet::new()?; let account = wallet.get_default_account()?; assert!(account.is_some()); let account = account.unwrap(); let raw_txn = RawUserTransaction::mock_by_sender(account.address); let signer = raw_txn.sender(); let _txn = wallet.sign_txn(raw_txn, signer)?; Ok(()) } }
#![crate_name = "newteeossgxrt"] #![crate_type = "staticlib"] #![cfg_attr(not(target_env = "sgx"), no_std)] #![cfg_attr(target_env = "sgx", feature(rustc_private))] extern crate sgx_types; // #[cfg(not(target_env = "sgx"))] // #[macro_use] // extern crate sgx_tstd as std; use sgx_types::*; // use std::string::String; // use std::vec::Vec; // use std::io::{self, Write}; // use std::slice; extern crate alloc; mod elfloader; mod linux_abi; mod sgx_rt; mod sgx_cfg; mod sgx_hal; use sgx_hal::EDGE_MEM_BASE; #[feature(asm)] #[no_mangle] pub extern "C" fn rt_main(sharemem: *mut u8, memsz: usize) -> sgx_status_t{ // load U-mode program let entry; unsafe { let edge_mem=&mut EDGE_MEM_BASE; edge_mem.buffer=unsafe { core::slice::from_raw_parts(sharemem, memsz) }; edge_mem.len=memsz; let elf_data= edge_mem.read_buffer(); let elf = elf_loader::ElfFile::load(&elf_data); let entry = elf.entry() as usize; let sp=elf.prepare_libc_args(); unsafe{ asm!( "mov rsp, stackp":"r{stackp}"(sp):::"intel", "mov rbp, framep":"r{framep}"(sp):::"intel", //@? asm call an the main address "call usr_main":"r{usr_main}"(entry):::"intel", ) }; } debug!("user bin returned?") linux_abi::syscall::process::SYSCALL_EXIT(0); sgx_status_t::SGX_SUCCESS }
use crate::bytecode::*; use crate::heap::*; use crate::object::*; use crate::vtable::*; use crate::*; #[repr(C)] pub struct Function { header: Header, pub(crate) vtable: &'static VTable, pub(crate) code_block: Option<Ref<CodeBlock>>, pub native: bool, pub native_code: usize, pub env: Option<Ref<Array>>, pub name: Ref<WaffleString>, pub prototype: value::Value, pub module: Option<Ref<Module>>, } fn lookup_fn(vm: &VM, this: Ref<Obj>, key: value::Value) -> WaffleResult { if key == vm.constructor { return WaffleResult::okay(value::Value::from(this)); } else if key == vm.prototype { return WaffleResult::okay(value::Value::from(this.cast::<Function>().prototype)); } else { WaffleResult::okay(value::Value::undefined()) } } impl Function { pub fn new_native( heap: &mut Heap, fptr: extern "C" fn(&mut crate::interpreter::callframe::CallFrame) -> crate::WaffleResult, name: &str, ) -> Ref<Self> { let mem = heap.allocate(std::mem::size_of::<Self>()); unsafe { mem.to_mut_ptr::<Self>().write(Self { header: Header::new(), vtable: &FUNCTION_VTBL, code_block: None, module: None, env: None, native: true, prototype: value::Value::undefined(), name: WaffleString::new(heap, name), native_code: fptr as _, }); } Ref { ptr: std::ptr::NonNull::new(mem.to_mut_ptr()).unwrap(), } } pub fn new(heap: &mut Heap, cb: Ref<CodeBlock>, name: &str) -> Ref<Self> { let mem = heap.allocate(std::mem::size_of::<Self>()); unsafe { mem.to_mut_ptr::<Self>().write(Self { header: Header::new(), vtable: &FUNCTION_VTBL, code_block: Some(cb), env: None, native: false, module: None, native_code: 0, prototype: value::Value::from( RegularObj::new(heap, value::Value::undefined()).cast(), ), name: WaffleString::new(heap, name), }); } Ref { ptr: std::ptr::NonNull::new(mem.to_mut_ptr()).unwrap(), } } pub fn execute(&self, this: value::Value, args: &[value::Value]) -> WaffleResult { let regc = if let Some(cb) = self.code_block { cb.num_vars } else { 0 }; let callee = Ref { ptr: std::ptr::NonNull::new(self as *const Self as *mut Self).unwrap(), }; let callee = value::Value::from(callee.cast()); let vm = get_vm(); let cf = vm.push_frame(args, regc); cf.this = this; cf.callee = callee; cf.passed_argc = args.len() as _; cf.code_block = self.code_block; if self.native { let f: extern "C" fn( &mut crate::interpreter::callframe::CallFrame, ) -> crate::WaffleResult = unsafe { std::mem::transmute(self.native_code) }; let res = f(cf); vm.pop_frame(); res } else { if let Some((fun, _argc, _vars, _cb)) = jit::operations::get_executable_address_for(callee) { //vm.call_stack.push(cf); let result = fun(cf); vm.pop_frame(); return result; } else { todo!() } } } } pub static FUNCTION_VTBL: VTable = VTable { element_size: 0, instance_size: std::mem::size_of::<Function>(), parent: None, lookup_fn: Some(lookup_fn), index_fn: None, calc_size_fn: None, apply_fn: None, destroy_fn: None, trace_fn: None, set_fn: None, set_index_fn: None, }; fn trace(this: Ref<Obj>, trace: &mut dyn FnMut(*const Ref<Obj>)) { let this = this.cast::<Function>(); trace(unsafe { std::mem::transmute(&this.name) }); if let Some(e) = &this.env { trace(unsafe { std::mem::transmute(e) }); } if let Some(cb) = &this.code_block { trace(unsafe { std::mem::transmute(cb) }); } if this.prototype.is_cell() { trace(this.prototype.as_cell_ref()); } if let Some(m) = &this.module { trace(unsafe { std::mem::transmute(m) }); } }
#[macro_export] macro_rules! init_clulog { (one) => { $crate::set_logger($crate::LogDefaultOne::default()); }; (one, $e:expr) => { $crate::set_logger($crate::LogDefaultOne::new($e)); }; (union, $a:expr, $b:expr) => { $crate::set_logger($crate::log_union::LogUnionConst::union($a, $b)); //$crate::set_logger($a.union($b)); }; () => { $crate::set_logger($crate::LogDefault::default()); }; ($e: expr) => { $crate::set_logger($crate::LogDefaultOne::new($e)); }; ($e: expr, $e2: expr) => { $crate::set_logger($crate::LogDefault::new($e, $e2)); }; }
use frame_support::weights::{constants::RocksDbWeight as DbWeight, Weight}; impl crate::WeightInfo for () { fn create() -> Weight { (72_000_000 as Weight) .saturating_add(DbWeight::get().reads(1 as Weight)) .saturating_add(DbWeight::get().writes(2 as Weight)) } fn mutate() -> Weight { (50_000_000 as Weight) .saturating_add(DbWeight::get().reads(1 as Weight)) .saturating_add(DbWeight::get().writes(1 as Weight)) } fn seal() -> Weight { (46_000_000 as Weight) .saturating_add(DbWeight::get().reads(1 as Weight)) .saturating_add(DbWeight::get().writes(1 as Weight)) } fn transfer() -> Weight { (47_000_000 as Weight) .saturating_add(DbWeight::get().reads(1 as Weight)) .saturating_add(DbWeight::get().writes(1 as Weight)) } }
use itertools::Itertools; use std::collections::HashMap; use std::convert::TryInto; use std::fs::File; use std::io::prelude::*; #[derive(Copy, Clone, Debug)] enum Transformation { None, RotateOnce, RotateTwice, RotateThrice, Flip, FlipAndRotateOnce, FlipAndRotateTwice, FlipAndRotateThrice, } impl Transformation { fn all() -> [Transformation; 8] { [ Transformation::None, Transformation::RotateOnce, Transformation::RotateTwice, Transformation::RotateThrice, Transformation::Flip, Transformation::FlipAndRotateOnce, Transformation::FlipAndRotateTwice, Transformation::FlipAndRotateThrice, ] } } #[derive(Copy, Clone)] struct Tile { content: [[char; 10]; 10], } impl Tile { fn top_edge(&self) -> [char; 10] { self.content[0] } fn bottom_edge(&self) -> [char; 10] { self.content[9] } fn left_edge(&self) -> [char; 10] { self.content .iter() .map(|row| row[0]) .collect::<Vec<char>>() .try_into() .unwrap() } fn right_edge(&self) -> [char; 10] { self.content .iter() .map(|row| row[9]) .collect::<Vec<char>>() .try_into() .unwrap() } fn flip(&self) -> Tile { Tile { content: self .content .iter() .map(|&row| row) .rev() .collect::<Vec<[char; 10]>>() .try_into() .unwrap(), } } fn rotate_right(&self) -> Tile { Tile { content: (0..10) .map(|n| { self.content .iter() .map(|row| row[n]) .collect::<Vec<char>>() .try_into() .unwrap() }) .rev() .collect::<Vec<[char; 10]>>() .try_into() .unwrap(), } } fn apply_transformation(&self, t: &Transformation) -> Tile { match t { Transformation::None => Tile { content: self.content, }, Transformation::RotateOnce => self.rotate_right(), Transformation::RotateTwice => self.rotate_right().rotate_right(), Transformation::RotateThrice => self.rotate_right().rotate_right().rotate_right(), Transformation::Flip => self.flip(), Transformation::FlipAndRotateOnce => self.flip().rotate_right(), Transformation::FlipAndRotateTwice => self.flip().rotate_right().rotate_right(), Transformation::FlipAndRotateThrice => { self.flip().rotate_right().rotate_right().rotate_right() } } } fn top_matches(&self, t: Tile) -> Vec<Transformation> { let top = self.top_edge(); Transformation::all() .iter() .filter(|&trans| t.apply_transformation(trans).bottom_edge() == top) .map(|&trans| trans) .collect() } fn bottom_matches(&self, t: Tile) -> Vec<Transformation> { let bottom = self.bottom_edge(); Transformation::all() .iter() .filter(|&trans| t.apply_transformation(trans).top_edge() == bottom) .map(|&trans| trans) .collect() } fn left_matches(&self, t: Tile) -> Vec<Transformation> { let left = self.left_edge(); Transformation::all() .iter() .filter(|&trans| t.apply_transformation(trans).right_edge() == left) .map(|&trans| trans) .collect() } fn right_matches(&self, t: Tile) -> Vec<Transformation> { let right = self.right_edge(); Transformation::all() .iter() .filter(|&trans| t.apply_transformation(trans).left_edge() == right) .map(|&trans| trans) .collect() } } struct AllTiles { tiles: HashMap<usize, Tile>, } impl AllTiles { fn match_info(&self, i: usize) -> [Vec<(usize, Transformation)>; 4] { let tile_to_test = self.tiles.get(&i).unwrap(); let top_matches = self .tiles .iter() .filter(|(&idx, _)| idx != i) .flat_map(|(&idx, &tile)| { tile_to_test .top_matches(tile) .into_iter() .map(move |trans| (idx, trans)) }) .collect(); let bottom_matches = self .tiles .iter() .filter(|(&idx, _)| idx != i) .flat_map(|(&idx, &tile)| { tile_to_test .bottom_matches(tile) .into_iter() .map(move |trans| (idx, trans)) }) .collect(); let left_matches = self .tiles .iter() .filter(|(&idx, _)| idx != i) .flat_map(|(&idx, &tile)| { tile_to_test .left_matches(tile) .into_iter() .map(move |trans| (idx, trans)) }) .collect(); let right_matches = self .tiles .iter() .filter(|(&idx, _)| idx != i) .flat_map(|(&idx, &tile)| { tile_to_test .right_matches(tile) .into_iter() .map(move |trans| (idx, trans)) }) .collect(); [top_matches, bottom_matches, left_matches, right_matches] } } fn read_file() -> AllTiles { let mut file = File::open("./input/input20.txt").unwrap(); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); let parts: Vec<String> = contents .lines() .group_by(|s| s.is_empty()) .into_iter() .filter(|(k, _g)| !k) .map(|(_k, g)| g.collect::<Vec<&str>>().join("\n")) .collect(); let mut tiles = HashMap::new(); for part in parts { let (id, tile) = parse_tile(part); tiles.insert(id, tile); } AllTiles { tiles } } fn parse_tile(s: String) -> (usize, Tile) { let lines: Vec<&str> = s.lines().collect(); let id = lines[0][5..9].parse().unwrap(); let tile = Tile { content: lines[1..] .iter() .map(|line| line.chars().collect::<Vec<char>>().try_into().unwrap()) .collect::<Vec<[char; 10]>>() .try_into() .unwrap(), }; (id, tile) } fn solve_part_1(tiles: AllTiles) -> usize { // we can try to get a printout of all possible matches: // this works and gives a very pleasing result - there are exactly 48 cases where // no match has been found for a side, 12 for each side. This instantly tells us what the // corner tiles are - they are the ones with exactly 2 empty sets in their match info! // (In fact, there is only one possibility for each side, which makes reconstructing the // full pattern simple. Will do, and thereby attempt to solve part 2, when I have more time!) let mut corners = vec![]; for &idx in tiles.tiles.keys() { let info = tiles.match_info(idx); // println!("match information about tile #{}: {:?}", idx, info); let is_corner = info.iter().filter(|v| v.len() == 0).count() == 2; if is_corner { corners.push(idx); } } if corners.len() == 4 { corners.iter().product() } else { panic!("not 4 corners!"); } } pub fn part_1() -> usize { let tiles = read_file(); solve_part_1(tiles) }
use crate::grid::dimension::{Dimension, Estimate}; /// A constant dimension. #[derive(Debug, Clone)] pub struct StaticDimension { width: DimensionValue, height: DimensionValue, } impl StaticDimension { /// Creates a constant dimension. pub fn new(width: DimensionValue, height: DimensionValue) -> Self { Self { width, height } } } impl From<StaticDimension> for (DimensionValue, DimensionValue) { fn from(value: StaticDimension) -> Self { (value.width, value.height) } } impl Dimension for StaticDimension { fn get_width(&self, column: usize) -> usize { self.width.get(column) } fn get_height(&self, row: usize) -> usize { self.height.get(row) } } impl<R, C> Estimate<R, C> for StaticDimension { fn estimate(&mut self, _: R, _: &C) {} } /// A dimension value. #[derive(Debug, Clone)] pub enum DimensionValue { /// Const width value. Exact(usize), /// A list of width values for columns. List(Vec<usize>), /// A list of width values for columns and a value for the rest. Partial(Vec<usize>, usize), } impl DimensionValue { /// Get a width by column. pub fn get(&self, col: usize) -> usize { match self { DimensionValue::Exact(val) => *val, DimensionValue::List(cols) => cols[col], DimensionValue::Partial(cols, val) => { if cols.len() > col { cols[col] } else { *val } } } } }
use super::*; use cgmath::{Point3, Vector3, Matrix4}; type Subject<T> = IntersectionChecker<T>; mod intersection { use super::*; fn intersection(ray_y_direction: f64, sphere_radius: f64) -> Option<Intersection> { let origin = Point3::new(0.0, 0.0, 0.0); let direction = Vector3::new(1.0, ray_y_direction, 0.0); let ray = Ray::new(origin, direction); let scale = Matrix4::from_scale(sphere_radius); let translation = Matrix4::from_translation(Vector3::new(2.0, 0.0, 0.0)); let transform = Transform::new(translation * scale); Subject::intersection(&ray, &transform) } #[test] fn it_returns_none_if_the_ray_does_not_intersect_the_sphere() { assert_eq!(intersection(0.6, 1.0), None); } #[test] fn it_returns_the_nearest_intersection_if_the_ray_intersects_the_sphere() { let intersection = intersection(0.5, 1.0) .expect("The ray should have intersected the front of the sphere."); assert_eq!(intersection.ray_t, 1.2); assert_eq!(intersection.normal, Vector3::new(-0.8, 0.6, 0.0)); } #[test] fn it_ignores_intersections_at_the_origin_of_the_ray() { let intersection = intersection(0.5, 2.0) .expect("The ray should have intersected the back of the sphere."); assert_eq!(intersection.ray_t, 3.2); } #[test] fn it_ignores_intersections_epsilon_along_the_ray_to_counter_floating_precision_errors() { let intersection = intersection(0.5, 1.99999999999) .expect("The ray should have intersected the back of the sphere."); assert_eq!(intersection.ray_t > 3.1, true); assert_eq!(intersection.ray_t < 3.3, true); } }
// This file is part of Bit.Country. // Copyright (C) 2020-2021 Bit.Country. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![cfg_attr(not(feature = "std"), no_std)] #![allow(clippy::unused_unit)] use codec::{Decode, Encode}; use frame_support::{ dispatch::DispatchResult, ensure, traits::{Get, Vec}, }; use frame_system::{self as system, ensure_root, ensure_signed}; use primitives::{continuum::Continuum, Balance, BitCountryId, CurrencyId, ItemId, SpotId}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_runtime::{ traits::{AccountIdConversion, CheckedAdd, CheckedDiv, One, Zero}, DispatchError, FixedPointNumber, ModuleId, RuntimeDebug, }; use sp_std::vec; use auction_manager::{Auction, AuctionType, ListingLevel}; use bc_primitives::{BitCountryStruct, BitCountryTrait}; use frame_support::traits::{Currency, LockableCurrency, ReservableCurrency}; use sp_arithmetic::Perbill; // use crate::pallet::{Config, Pallet, ActiveAuctionSlots}; #[cfg(feature = "std")] use frame_support::traits::GenesisBuild; mod types; mod vote; pub use types::*; pub use vote::*; #[cfg(test)] mod mock; #[cfg(test)] mod tests; pub use pallet::*; #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] pub enum ContinuumAuctionSlotStatus { /// Accept participation AcceptParticipates, /// Progressing at Good Neighborhood Protocol GNPStarted, /// Auction confirmed GNPConfirmed, } /// Information of EOI on Continuum spot #[cfg_attr(feature = "std", derive(PartialEq, Eq))] #[derive(Encode, Decode, Clone, RuntimeDebug)] pub struct SpotEOI<AccountId> { spot_id: SpotId, participants: Vec<AccountId>, } /// Information of an active auction slot #[cfg_attr(feature = "std", derive(PartialEq, Eq))] #[derive(Encode, Decode, Clone, RuntimeDebug)] pub struct AuctionSlot<BlockNumber, AccountId> { spot_id: SpotId, participants: Vec<AccountId>, active_session_index: BlockNumber, status: ContinuumAuctionSlotStatus, } #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::traits::ExistenceRequirement; use frame_support::{dispatch::DispatchResultWithPostInfo, pallet_prelude::*}; use frame_system::pallet_prelude::OriginFor; pub(crate) type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance; /// Configure the pallet by specifying the parameters and types on which it depends. #[pallet::config] pub trait Config: frame_system::Config { /// Because this pallet emits events, it depends on the runtime's definition of an event. type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; /// New Slot Duration /// How long the new auction slot will be released. If set to zero, no new auctions are generated type SessionDuration: Get<Self::BlockNumber>; /// Auction Slot Chilling Duration /// How long the participates in the New Auction Slots will get confirmed by neighbours type SpotAuctionChillingDuration: Get<Self::BlockNumber>; /// Emergency shutdown origin which allow cancellation in an emergency type EmergencyOrigin: EnsureOrigin<Self::Origin>; /// Auction Handler type AuctionHandler: Auction<Self::AccountId, Self::BlockNumber>; /// Auction duration type AuctionDuration: Get<Self::BlockNumber>; /// Continuum Treasury type ContinuumTreasury: Get<ModuleId>; /// Currency type Currency: ReservableCurrency<Self::AccountId> + LockableCurrency<Self::AccountId, Moment = Self::BlockNumber>; /// Source of Bit Country Info type BitCountryInfoSource: BitCountryTrait<Self::AccountId>; } #[pallet::genesis_config] pub struct GenesisConfig<T: Config> { pub initial_active_session: T::BlockNumber, pub initial_auction_rate: u8, pub initial_max_bound: (i32, i32), pub spot_price: BalanceOf<T>, } #[cfg(feature = "std")] impl<T: Config> Default for GenesisConfig<T> { fn default() -> Self { GenesisConfig { initial_active_session: Default::default(), initial_auction_rate: Default::default(), initial_max_bound: Default::default(), spot_price: Default::default(), } } } #[pallet::genesis_build] impl<T: Config> GenesisBuild<T> for GenesisConfig<T> { fn build(&self) { CurrentIndex::<T>::set(self.initial_active_session); MaxDesiredAuctionSlot::<T>::set(self.initial_auction_rate); let eoi_slots: Vec<SpotEOI<T::AccountId>> = vec![]; let gnp_slots: Vec<AuctionSlot<T::BlockNumber, T::AccountId>> = vec![]; let active_auction_slots: Vec<AuctionSlot<T::BlockNumber, T::AccountId>> = vec![]; EOISlots::<T>::insert(self.initial_active_session, eoi_slots); GNPSlots::<T>::insert(self.initial_active_session, gnp_slots); ActiveAuctionSlots::<T>::insert(self.initial_active_session, active_auction_slots); MaxBound::<T>::set(self.initial_max_bound); SpotPrice::<T>::set(self.spot_price); } } #[pallet::pallet] pub struct Pallet<T>(PhantomData<T>); #[pallet::hooks] impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> { /// Initialization fn on_initialize(now: T::BlockNumber) -> Weight { let auction_duration: T::BlockNumber = T::SessionDuration::get(); if !auction_duration.is_zero() && (now % auction_duration).is_zero() { Self::rotate_auction_slots(now); T::BlockWeights::get().max_block } else { 0 } } } /// Get current active session #[pallet::storage] #[pallet::getter(fn current_session)] pub type CurrentIndex<T: Config> = StorageValue<_, T::BlockNumber, ValueQuery>; /// Continuum Spot #[pallet::storage] #[pallet::getter(fn get_continuum_spot)] pub type ContinuumSpots<T: Config> = StorageMap<_, Twox64Concat, SpotId, ContinuumSpot, OptionQuery>; /// Continuum Spot Position #[pallet::storage] #[pallet::getter(fn get_continuum_position)] pub type ContinuumCoordinates<T: Config> = StorageMap<_, Twox64Concat, (i32, i32), SpotId, OptionQuery>; /// Active Auction Slots of current session index that accepting participants #[pallet::storage] #[pallet::getter(fn get_active_auction_slots)] pub type ActiveAuctionSlots<T: Config> = StorageMap< _, Twox64Concat, T::BlockNumber, Vec<AuctionSlot<T::BlockNumber, T::AccountId>>, OptionQuery, >; /// Active Auction Slots that is currently conducting GN Protocol #[pallet::storage] #[pallet::getter(fn get_active_gnp_slots)] pub type GNPSlots<T: Config> = StorageMap< _, Twox64Concat, T::BlockNumber, Vec<AuctionSlot<T::BlockNumber, T::AccountId>>, OptionQuery, >; /// Active set of EOI on Continuum Spot #[pallet::storage] #[pallet::getter(fn get_eoi_set)] pub type EOISlots<T: Config> = StorageMap<_, Twox64Concat, T::BlockNumber, Vec<SpotEOI<T::AccountId>>, ValueQuery>; /// Information of Continuum Spot Referendum #[pallet::storage] #[pallet::getter(fn get_continuum_referendum)] pub type ReferendumInfoOf<T: Config> = StorageMap< _, Twox64Concat, SpotId, ReferendumInfo<T::AccountId, T::BlockNumber>, OptionQuery, >; /// All votes of a particular voter #[pallet::storage] #[pallet::getter(fn get_voting_info)] pub type VotingOf<T: Config> = StorageMap<_, Twox64Concat, T::AccountId, Voting<T::AccountId>, ValueQuery>; /// Get max bound #[pallet::storage] #[pallet::getter(fn get_max_bound)] pub type MaxBound<T: Config> = StorageValue<_, (i32, i32), ValueQuery>; /// Record of all spot ids voting that in an emergency shut down #[pallet::storage] #[pallet::getter(fn get_cancellations)] pub type Cancellations<T: Config> = StorageMap<_, Twox64Concat, SpotId, bool, ValueQuery>; /// Maximum desired auction slots available per term #[pallet::storage] #[pallet::getter(fn get_max_desired_slot)] pub type MaxDesiredAuctionSlot<T: Config> = StorageValue<_, u8, ValueQuery>; #[pallet::storage] #[pallet::getter(fn next_spot_id)] pub type NextContinuumSpotId<T: Config> = StorageValue<_, SpotId, ValueQuery>; #[pallet::storage] #[pallet::getter(fn allow_buy_now)] pub type AllowBuyNow<T: Config> = StorageValue<_, bool, ValueQuery>; #[pallet::storage] #[pallet::getter(fn initial_spot_price)] pub type SpotPrice<T: Config> = StorageValue<_, BalanceOf<T>, ValueQuery>; #[pallet::event] #[pallet::generate_deposit(pub (crate) fn deposit_event)] pub enum Event<T: Config> { /// New express of interest NewExpressOfInterestAdded(T::AccountId, SpotId), } #[pallet::error] pub enum Error<T> { /// No Active Auction Slot NoActiveAuctionSlot, /// No Active GNP List NoActiveGNP, /// Can't add EOI to Slot FailedEOIToSlot, /// Only send EOI once EOIAlreadyExists, /// No Active Session NoActiveSession, /// No Active Referendum NoActiveReferendum, /// Referendum is invalid ReferendumIsInValid, /// Tally Overflow TallyOverflow, /// Already shutdown AlreadyShutdown, /// Spot Not Found SpotNotFound, /// No permission NoPermission, /// Spot Owned SpotIsNotAvailable, /// Spot is out of bound SpotIsOutOfBound, /// Continuum Spot is not found ContinuumSpotNotFound, /// Insufficient fund to buy InsufficientFund, /// Continuum Buynow is disable ContinuumBuyNowIsDisabled, } #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn buy_continuum_spot( origin: OriginFor<T>, coordinate: (i32, i32), country_id: BitCountryId, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ensure!( T::BitCountryInfoSource::check_ownership(&sender, &country_id), Error::<T>::NoPermission ); ensure!( AllowBuyNow::<T>::get() == true, Error::<T>::ContinuumBuyNowIsDisabled ); let spot_from_coordinates = ContinuumCoordinates::<T>::get(coordinate); let spot_id = Self::check_spot_ownership(spot_from_coordinates, coordinate)?; let continuum_price_spot = SpotPrice::<T>::get(); let continuum_treasury = Self::account_id(); //Define how many NUUM for continuum spot - default 1 NUUM - need to change to variable ensure!( T::Currency::free_balance(&sender) > continuum_price_spot, Error::<T>::InsufficientFund ); T::Currency::transfer( &sender, &continuum_treasury, continuum_price_spot, ExistenceRequirement::KeepAlive, )?; Self::do_transfer_spot(spot_id, &continuum_treasury, &(sender, country_id))?; Ok(().into()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn set_allow_buy_now(origin: OriginFor<T>, enable: bool) -> DispatchResultWithPostInfo { ensure_root(origin)?; AllowBuyNow::<T>::set(enable); Ok(().into()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn register_interest( origin: OriginFor<T>, country_id: BitCountryId, coordinate: (i32, i32), ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ensure!( T::BitCountryInfoSource::check_ownership(&sender, &country_id), Error::<T>::NoPermission ); let spot_from_coordinates = ContinuumCoordinates::<T>::get(coordinate); let spot_id = Self::check_spot_ownership(spot_from_coordinates, coordinate)?; /// Get current active session let current_active_session_id = CurrentIndex::<T>::get(); if EOISlots::<T>::contains_key(current_active_session_id) { /// Mutate current active EOI Slot session EOISlots::<T>::try_mutate( current_active_session_id, |spot_eoi| -> DispatchResult { /// Check if the interested Spot exists let interested_spot_index: Option<usize> = spot_eoi.iter().position(|x| x.spot_id == spot_id); match interested_spot_index { /// Already got participants Some(index) => { /// Works on existing eoi index let interested_spot = spot_eoi.get_mut(index).ok_or("No Spot EOI exist")?; interested_spot.participants.push(sender.clone()); } /// No participants - add one None => { /// No spot found - first one in EOI let mut new_list: Vec< T::AccountId, > = Vec::new(); new_list.push(sender.clone()); let _spot_eoi = SpotEOI { spot_id, participants: new_list, }; spot_eoi.push(_spot_eoi); } } Ok(()) }, )?; } else { /// Never get to this logic but it's safe to handle it nicely. let mut eoi_slots: Vec<SpotEOI<T::AccountId>> = Vec::new(); eoi_slots.push(SpotEOI { spot_id, participants: vec![sender.clone()], }); EOISlots::<T>::insert(current_active_session_id, eoi_slots); } Self::deposit_event(Event::NewExpressOfInterestAdded(sender, spot_id)); Ok(().into()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn enable_bidder_rejection_voting( origin: OriginFor<T>, spot_id: SpotId, ) -> DispatchResultWithPostInfo { let root = ensure_root(origin); //TODO Check if neighborhood //Enable democracy pallet //Propose bidder removal action on democracy Ok(().into()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn set_max_bounds( origin: OriginFor<T>, new_bound: (i32, i32), ) -> DispatchResultWithPostInfo { /// Only execute by governance ensure_root(origin)?; MaxBound::<T>::set(new_bound); //TODO Emit event Ok(().into()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn set_new_auction_rate( origin: OriginFor<T>, new_rate: u8, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; MaxDesiredAuctionSlot::<T>::set(new_rate); //TODO Emit event Ok(().into()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn vote( origin: OriginFor<T>, id: SpotId, reject: AccountVote<T::AccountId>, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; Self::try_vote(&sender, id, reject)?; Ok(().into()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn emergency_shutdown( origin: OriginFor<T>, spot_id: SpotId, ) -> DispatchResultWithPostInfo { // Only some origins can execute this function T::EmergencyOrigin::ensure_origin(origin)?; ensure!( !Cancellations::<T>::contains_key(spot_id), Error::<T>::AlreadyShutdown ); Cancellations::<T>::insert(spot_id, true); ReferendumInfoOf::<T>::remove(spot_id); Ok(().into()) } } } impl<T: Config> Pallet<T> { fn account_id() -> T::AccountId { T::ContinuumTreasury::get().into_account() } //noinspection ALL fn rotate_auction_slots(now: T::BlockNumber) -> DispatchResult { // Get current active session let current_active_session_id = CurrentIndex::<T>::get(); // Change status of all current active auction slots // Move EOI to Auction Slots Self::eoi_to_auction_slots(current_active_session_id, now)?; // Finalise due vote Self::finalize_vote(now); let mut active_auction_slots = <ActiveAuctionSlots<T>>::get(&current_active_session_id); match active_auction_slots { Some(s) => { // Move current auctions slot to start GN Protocol if s.len() > 0 { let started_gnp_auction_slots: Vec<_> = s .iter() .map(|x| { let mut t = x.clone(); t.status = ContinuumAuctionSlotStatus::GNPStarted; t }) .collect(); // Move active auction slots to GNP GNPSlots::<T>::insert(now, started_gnp_auction_slots.clone()); // Start referedum Self::start_gnp_protocol(started_gnp_auction_slots, now)?; } // TODO Emit event Auction slot start GNP } None => {} } // Remove the old active auction slots ActiveAuctionSlots::<T>::remove(&current_active_session_id); CurrentIndex::<T>::set(now); // TODO Emit event Ok(().into()) } fn finalize_vote(now: T::BlockNumber) -> DispatchResult { let recent_slots = GNPSlots::<T>::get(now).ok_or(Error::<T>::NoActiveReferendum)?; for mut recent_slot in recent_slots.into_iter() { let referendum_info: ReferendumStatus<T::AccountId, T::BlockNumber> = Self::referendum_status(recent_slot.spot_id)?; if referendum_info.end == now { // let tallies = referendum_info.tallies; let banned_list: Vec<T::AccountId> = referendum_info .tallies .into_iter() .filter(|mut t| Self::check_approved(t) == true) .map(|tally| tally.who) .collect(); for banned_account in banned_list { let account_index = recent_slot .participants .iter() .position(|x| *x == banned_account) .unwrap(); recent_slot.participants.remove(account_index); recent_slot.status = ContinuumAuctionSlotStatus::GNPConfirmed; } let treasury = Self::account_id(); // From treasury spot T::AuctionHandler::create_auction( AuctionType::Auction, ItemId::Spot(recent_slot.spot_id, Default::default()), Some(now + T::AuctionDuration::get()), treasury, Default::default(), now, ListingLevel::Global, )?; // TODO Emit event } } Ok(()) } fn start_gnp_protocol( slots: Vec<AuctionSlot<T::BlockNumber, T::AccountId>>, end: T::BlockNumber, ) -> DispatchResult { for slot in slots { let end = end + T::SessionDuration::get(); Self::start_referendum(end, slot.spot_id)?; // TODO Emit event } Ok(()) } fn start_referendum(end: T::BlockNumber, spot_id: SpotId) -> Result<SpotId, DispatchError> { let spot = ContinuumSpots::<T>::get(spot_id).ok_or(Error::<T>::SpotNotFound)?; let neighbors = spot.find_neighbour(); let mut available_neighbors: u8 = 0; for (x, y) in neighbors { match ContinuumCoordinates::<T>::get((x, y)) { Some(i) => { available_neighbors = available_neighbors .checked_add(One::one()) .ok_or("Overflow")?; } _ => (), } } let mut status: ReferendumStatus<T::AccountId, T::BlockNumber> = ReferendumStatus { end, spot_id, tallies: Default::default(), }; for _i in 0..available_neighbors { let initial_tally: ContinuumSpotTally<T::AccountId> = ContinuumSpotTally { nays: One::one(), who: Default::default(), turnout: available_neighbors, }; status.tallies.push(initial_tally); } let item: ReferendumInfo<T::AccountId, T::BlockNumber> = ReferendumInfo::Ongoing(status); ReferendumInfoOf::<T>::insert(spot_id, item); // TODO Emit event Ok(spot_id) } fn eoi_to_auction_slots(active_session: T::BlockNumber, now: T::BlockNumber) -> DispatchResult { // Get maximum desired slots let desired_slots = MaxDesiredAuctionSlot::<T>::get(); let session_duration = T::SessionDuration::get(); // Get active EOI and add the top N to new Auction Slots let mut current_eoi_slots: Vec<SpotEOI<T::AccountId>> = EOISlots::<T>::get(active_session); current_eoi_slots.sort_by_key(|eoi_slot| eoi_slot.participants.len()); // Get highest ranked slot let mut new_valid_auction_slot: Vec<AuctionSlot<T::BlockNumber, T::AccountId>> = Vec::new(); let highest_ranked_sorted: Vec<SpotEOI<T::AccountId>> = current_eoi_slots .iter() .map(|x| x.clone()) .take(desired_slots as usize) .collect::<Vec<SpotEOI<T::AccountId>>>(); // Add highest ranked EOI to New Active Auction slot for (x, item) in highest_ranked_sorted.iter().enumerate() { let auction_slot = AuctionSlot { spot_id: item.spot_id, participants: item.participants.clone(), active_session_index: now + session_duration, status: ContinuumAuctionSlotStatus::AcceptParticipates, }; new_valid_auction_slot.push(auction_slot); } ActiveAuctionSlots::<T>::insert(now, new_valid_auction_slot); // Remove EOISlot EOISlots::<T>::remove(active_session); let empty_eoi_spots: Vec<SpotEOI<T::AccountId>> = Vec::new(); // Add new EOISlot for current session - ensure active session has entry EOISlots::<T>::insert(now, empty_eoi_spots); // TODO Emit event Ok(()) } fn try_vote( who: &T::AccountId, spot_id: SpotId, vote: AccountVote<T::AccountId>, ) -> DispatchResult { // TODO ensure is actual neighbor once bitcountry trait is completed let mut status = Self::referendum_status(spot_id)?; VotingOf::<T>::try_mutate(who, |mut voting| -> DispatchResult { let mut votes = &mut voting.votes; match votes.binary_search_by_key(&spot_id, |i| i.0) { // Already voted Ok(i) => {} Err(i) => { // Haven't vote for this spot id // Add votes under user let new_vote: AccountVote<T::AccountId> = vote.clone(); let who = new_vote.vote_who(); votes.insert(i, (spot_id, vote.clone())); let mut tallies = status.tallies.clone(); // Find existing tally of bidder for mut tally in status.tallies { /// Existing vote if tally.who == who.who { tally.add(vote.clone()).ok_or(Error::<T>::TallyOverflow)? } else { //Create new vote } } } } Ok(()) }) } fn referendum_status( spot_id: SpotId, ) -> Result<ReferendumStatus<T::AccountId, T::BlockNumber>, DispatchError> { let info = ReferendumInfoOf::<T>::get(spot_id).ok_or(Error::<T>::ReferendumIsInValid)?; Self::ensure_ongoing(info.into()) } fn referendum_info( spot_id: SpotId, ) -> Result<ReferendumInfo<T::AccountId, T::BlockNumber>, DispatchError> { let info = ReferendumInfoOf::<T>::get(spot_id).ok_or(Error::<T>::ReferendumIsInValid.into()); info } /// Ok if the given referendum is active, Err otherwise fn ensure_ongoing( r: ReferendumInfo<T::AccountId, T::BlockNumber>, ) -> Result<ReferendumStatus<T::AccountId, T::BlockNumber>, DispatchError> { match r { ReferendumInfo::Ongoing(s) => Ok(s), _ => Err(Error::<T>::ReferendumIsInValid.into()), } } fn do_register(who: &T::AccountId, spot_id: &SpotId) -> SpotId { return 5; } pub fn get_spot(spot_id: SpotId) -> Result<ContinuumSpot, DispatchError> { ContinuumSpots::<T>::get(spot_id).ok_or(Error::<T>::SpotNotFound.into()) } pub fn do_transfer_spot( spot_id: SpotId, from: &T::AccountId, to: &(T::AccountId, BitCountryId), ) -> Result<SpotId, DispatchError> { Self::transfer_spot(spot_id, from, to) } pub fn check_approved(tally: &ContinuumSpotTally<T::AccountId>) -> bool { let nay_ratio = tally.turnout.checked_div(tally.nays).unwrap_or(0); let nay_percent = nay_ratio.checked_mul(100).unwrap_or(0); nay_percent > 51 } pub fn check_spot_ownership( spot_id: Option<SpotId>, coordinate: (i32, i32), ) -> Result<SpotId, DispatchError> { match spot_id { None => { // Insert continuum spot as it's empty let max_bound = MaxBound::<T>::get(); ensure!( (coordinate.0 >= max_bound.0 && max_bound.1 >= coordinate.0) && (coordinate.1 >= max_bound.0 && max_bound.1 >= coordinate.1), Error::<T>::SpotIsOutOfBound ); let spot = ContinuumSpot { x: coordinate.0, y: coordinate.1, country: 0, }; let next_spot_id = NextContinuumSpotId::<T>::try_mutate(|id| -> Result<SpotId, DispatchError> { let current_id = *id; *id = id .checked_add(One::one()) .ok_or(Error::<T>::SpotIsNotAvailable)?; Ok(current_id) })?; ContinuumSpots::<T>::insert(next_spot_id, spot); ContinuumCoordinates::<T>::insert(coordinate, next_spot_id); Ok(next_spot_id) } Some(spot_id) => { let spot = ContinuumSpots::<T>::get(spot_id).ok_or(Error::<T>::SpotNotFound)?; ensure!(spot.country == 0, Error::<T>::SpotIsNotAvailable); Ok(spot_id) } } } } impl<T: Config> Continuum<T::AccountId> for Pallet<T> { fn transfer_spot( spot_id: SpotId, from: &T::AccountId, to: &(T::AccountId, BitCountryId), ) -> Result<SpotId, DispatchError> { ContinuumSpots::<T>::try_mutate(spot_id, |maybe_spot| -> Result<SpotId, DispatchError> { let treasury = Self::account_id(); if *from != treasury { // TODO Check account Id own country spot.country } let mut spot = maybe_spot.take().ok_or(Error::<T>::SpotNotFound)?; spot.country = to.1; Ok(spot_id) }) } }
use std::convert::{From, Into}; #[derive(Debug, Clone, Copy)] struct BitArray { array: u64, left_margin: u64, right_margin: u64, left_align: bool, } impl PartialEq for BitArray { fn eq(&self, other: &Self) -> bool { self.left_align == other.left_align && u64::from(*self) == u64::from(*other) && self.length() == other.length() } } impl From<BitArray> for u64 { fn from(ba: BitArray) -> u64 { (ba.array & (!0u64 >> ba.left_margin)) >> ba.right_margin } } impl BitArray { pub fn length(&self) -> u64 { 64u64 - (self.left_margin + self.right_margin) } fn mask(&self) -> u64 { (!0u64 >> self.left_margin) & (!0u64 << self.right_margin) } fn aligned_to(self, bits: Self) -> Self { if bits.left_align { Self { array: (self.array << self.left_margin) >> bits.left_margin, left_margin: bits.left_margin, right_margin: 64-u64::max(self.length(), bits.length()), left_align: self.left_align } } else { Self { array: (self.array >> self.right_margin) << bits.right_margin, left_margin: 64-u64::max(self.length(), bits.length()), right_margin: bits.right_margin, left_align: self.left_align } } } fn trim_to(self, new_len: u64) -> BitArray { if new_len >= self.length() { return self; } Self { array: self.array, left_margin: if self.left_align {self.left_margin} else {64-self.right_margin-new_len}, right_margin: if !self.left_align {self.right_margin} else {64-self.left_margin-new_len}, left_align: self.left_align, } } fn apply_binary<F>(&self, func: F, bits: Self) -> Self where F: Fn(u64, u64) -> u64 { let bits = bits.aligned_to(*self); let self_ = self.trim_to(bits.length()); Self { array: func(self_.array, bits.array), left_margin: u64::max(self_.left_margin, bits.left_margin), right_margin: u64::max(self_.right_margin, bits.right_margin), left_align: self_.left_align, } } } #[cfg(test)] mod tests { use super::*; #[test] fn into_u64() { let bitarray = BitArray{ array: 0x0000f0000000ff00, left_margin: 24, right_margin: 4, left_align: false, }; assert_eq!(u64::from(bitarray), 0xff0u64); } #[test] fn mask() { let bitarray = BitArray{ array: 0u64, left_margin: 64-6, right_margin: 3, left_align: false, }; assert_eq!(bitarray.mask(), 0b111000) } #[test] fn trim_to() { assert_eq!( u64::from(BitArray{ array: 0x0ff000000000ff0, left_margin: 0, right_margin: 0, left_align: false, }.trim_to(60)), 0xff000000000ff0, ); assert_eq!( u64::from(BitArray{ array: 0x0ff000000000ff0, left_margin: 0, right_margin: 0, left_align: true, }.trim_to(60)), 0x0ff000000000ff, ); } #[test] fn aligned_to() { let b1 = BitArray{ array: 0b1111000000, left_margin: 64-10, right_margin: 6, left_align: false, }; let b2 = BitArray{ array: 0b1111100, left_margin: 64-7, right_margin: 2, left_align: true, }; let b1_a = b1.aligned_to(b2); assert_eq!(b1_a.array, 0b1111000u64); assert_eq!(b1_a.left_margin, b2.left_margin); let b2_a = b2.aligned_to(b1); assert_eq!(b2_a.array, 0b11111000000u64); assert_eq!(b2_a.right_margin, b1.right_margin); } #[test] fn apply_xor() { let b1 = BitArray{ array: 0b0011, left_margin: 64-4, right_margin: 0, left_align: false, }; let b2 = BitArray{ array: 0b010100, left_margin: 64-6, right_margin: 2, left_align: true, }; assert_eq!(b1.apply_binary(|x: u64, y: u64| x ^ y, b2), BitArray{ array: 0b0011 ^ (0b010100 >> 2), left_margin: u64::max(b1.left_margin, b2.left_margin), right_margin: b1.right_margin, left_align: b1.left_align, }); } }
use serde_json; use cabot::{RequestBuilder, Client}; use url::form_urlencoded::Serializer as URLSerializer; use super::super::results::{BearerResult, BearerError}; use super::super::config::{Tokens, ClientRef}; #[derive(Deserialize)] pub struct JsonToken { pub access_token: String, pub expires_in: Option<usize>, pub refresh_token: Option<String>, } fn fetch_token(token_url: &str, form: &[u8]) -> BearerResult<Tokens> { println!("Fetchin tokens from {}", token_url); let request = RequestBuilder::new(token_url) .set_http_method("POST") .add_header("Content-Type: application/x-www-form-urlencoded") .set_body(form) .build(); let request = request.unwrap(); let client = Client::new(); let response = client.execute(&request).unwrap(); let code = response.status_code(); let data = response.body_as_string().unwrap(); if code >= 300 { return Err(BearerError::OAuth2Error(format!(r#"Server did not return a valid response \ while consuming auth code, expected `2XX`, found `{}`: {}"#, code, data))); } let token: JsonToken = serde_json::from_str(data.as_str()).unwrap(); Ok(Tokens::new(token.access_token.as_str(), token.expires_in.unwrap_or(900), match token.refresh_token { Some(ref tok) => Some(tok.as_str()), None => None, })) } pub fn from_authcode(client: &ClientRef, authcode: &str, callback_uri: &str) -> BearerResult<Tokens> { let form = URLSerializer::new(String::new()) .append_pair("client_id", client.client_id) .append_pair("client_secret", client.secret) .append_pair("code", authcode) .append_pair("redirect_uri", callback_uri) .append_pair("grant_type", "authorization_code") .finish(); fetch_token(client.token_url, form.as_bytes()) } pub fn from_refresh_token(client: &ClientRef, refresh_token: &str) -> BearerResult<Tokens> { let form = URLSerializer::new(String::new()) .append_pair("client_id", client.client_id) .append_pair("client_secret", client.secret) .append_pair("refresh_token", refresh_token) .append_pair("grant_type", "refresh_token") .finish(); let mut token = fetch_token(client.token_url, form.as_bytes())?; if token.refresh_token.is_none() { token.refresh_token = Some(refresh_token.to_string()); } Ok(token) } #[cfg(test)] mod tests { use std::io::prelude::*; use std::thread; use std::time; use std::net::TcpListener; use rand::{thread_rng, Rng}; use super::*; use super::super::super::config::ClientRef; #[test] fn test_from_authcode() { let mut rng = thread_rng(); let server_port: usize = rng.gen_range(3000, 9000); let server_addr = format!("127.0.0.1:{}", server_port); let token_url = format!("http://127.0.0.1:{}", server_port); let authservhandler = thread::spawn(move || { let authorization_server = TcpListener::bind(server_addr.as_str()).unwrap(); let stream = authorization_server.incoming().next().unwrap(); let mut stream = stream.unwrap(); let tokens = r#"{ "access_token": "atok", "expires_in": 42, "refresh_token": "rtok"}"#; let content_len = format!("Content-Length: {}", tokens.len()); let resp = vec!["HTTP/1.0 200 Ok", "Content-Type: application/json", content_len.as_str(), "", tokens]; let resp = resp.join("\r\n"); stream.write(resp.as_bytes()).unwrap(); }); let dur = time::Duration::from_millis(700); thread::sleep(dur); let client = ClientRef { provider: "", token_url: token_url.as_str(), authorize_url: "", client_id: "", secret: "", scope: None, }; let tokens = from_authcode(&client, "authcode", "http://127.0.0.1/callback"); assert_eq!(tokens.is_err(), false); let tokens = tokens.unwrap(); assert_eq!(tokens.access_token, "atok"); // assert_eq!(tokens.expires_at, now() + 42...); assert_eq!(tokens.refresh_token, Some("rtok".to_string())); authservhandler.join().unwrap(); } #[test] fn test_from_refresh_token() { let mut rng = thread_rng(); let server_port: usize = rng.gen_range(3000, 9000); let server_addr = format!("127.0.0.1:{}", server_port); let token_url = format!("http://127.0.0.1:{}", server_port); let authservhandler = thread::spawn(move || { let authorization_server = TcpListener::bind(server_addr.as_str()).unwrap(); let stream = authorization_server.incoming().next().unwrap(); let mut stream = stream.unwrap(); let tokens = r#"{ "access_token": "atok", "expires_in": 42}"#; let content_len = format!("Content-Length: {}", tokens.len()); let resp = vec!["HTTP/1.0 200 Ok", "Content-Type: application/json", content_len.as_str(), "", tokens]; let resp = resp.join("\r\n"); stream.write(resp.as_bytes()).unwrap(); }); let dur = time::Duration::from_millis(700); thread::sleep(dur); let client = ClientRef { provider: "", token_url: token_url.as_str(), authorize_url: "", client_id: "", secret: "", scope: None, }; let tokens = from_refresh_token(&client, "refresh_token"); assert_eq!(tokens.is_err(), false); let tokens = tokens.unwrap(); assert_eq!(tokens.access_token, "atok"); assert_eq!(tokens.refresh_token, Some("refresh_token".to_string())); authservhandler.join().unwrap(); } }
/*! ```rudra-poc [target] crate = "av-data" version = "0.2.1" [report] issue_url = "https://github.com/rust-av/rust-av/issues/136" issue_date = 2021-01-07 rustsec_url = "https://github.com/RustSec/advisory-db/pull/574" rustsec_id = "RUSTSEC-2021-0007" unique_bugs = 1 [[bugs]] analyzer = "UnsafeDataflow" guide = "Manual" bug_class = "Other" rudra_report_locations = ["src/frame.rs:369:5: 398:6"] ``` !*/ #![forbid(unsafe_code)] use av_data::frame::*; use av_data::pixel::*; fn main() { let yuv420: Formaton = *formats::YUV420; let fm = std::sync::Arc::new(yuv420); let video_info = VideoInfo { pic_type: PictureType::I, width: 42, height: 42, format: fm, }; let mut frame = new_default_frame(MediaKind::Video(video_info), None); frame.copy_from_raw_parts( vec![0 as usize as *const u8; 2].into_iter(), vec![40; 2].into_iter(), ); println!("Program segfaults before reaching this point"); }
use errno::errno; use libip6tc_sys as sys; use std::ffi::CStr; use std::fmt; #[derive(Debug, Clone, PartialEq)] pub struct IptcError { pub code: i32, pub message: &'static str, } impl std::error::Error for IptcError {} impl fmt::Display for IptcError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "IptcError {{{}, {}}}", self.code, self.message) } } impl IptcError { pub fn from_errno() -> Self { // let err = std::io::Error::last_os_error().raw_os_error().unwrap(); let code = errno().0; let message = unsafe { CStr::from_ptr(sys::ip6tc_strerror(code)) }; let message = message.to_str().unwrap(); IptcError { code, message } } } // TODO: into/from // trait ToIptcResult { // fn to_result(self) -> Result<(), IptcError>; // } // impl ToIptcResult for i32 { // fn to_result(self) -> Result<(), IptcError> { // match self { // 0 => Err(IptcError::from_errno()), // _ => Ok(()), // } // } // } #[cfg(test)] mod tests { use super::*; use errno::*; #[test] fn it_works() { set_errno(Errno(2)); assert_eq!( IptcError { code: 2, message: "No chain/target/match by that name" }, IptcError::from_errno() ); } }
use crate::errors::{Error, ErrorKind, Result}; use crate::protos; use crate::{config, consts, session, wallet}; use std::prelude::v1::*; extern crate sgx_types; //use crate::protos::xchain; //use sgx_types::*; //use std::path::PathBuf; //use std::slice; /// account在chain上面给to转账amount,小费是fee,留言是desc pub fn transfer( account: &wallet::Account, chain_name: &String, to: &String, amount: &String, fee: &String, desc: &String, ) -> Result<String> { let amount_bk = amount.to_owned(); let amount = consts::str_as_i64(amount.as_str())?; let fee = consts::str_as_i64(fee.as_str())?; let auth_requires = vec![ config::CONFIG .read() .unwrap() .compliance_check .compliance_check_endorse_service_addr .to_owned(); 1 ]; let endorser_fee = config::CONFIG .read() .unwrap() .compliance_check .compliance_check_endorse_service_fee as i64; // TODO 应该不用判断 if endorser_fee > amount { println!("endorser_fee should smaller than amount"); return Err(Error::from(ErrorKind::InvalidArguments)); } let total_amount = amount + fee + endorser_fee; //防止溢出 if total_amount < amount { println!("totoal_amount should be greater than amount"); return Err(Error::from(ErrorKind::InvalidArguments)); } let mut invoke_rpc_request = protos::xchain::InvokeRPCRequest::new(); invoke_rpc_request.set_bcname(chain_name.to_owned()); invoke_rpc_request.set_requests(protobuf::RepeatedField::from_vec(vec![])); invoke_rpc_request.set_initiator(account.address.to_owned()); invoke_rpc_request.set_auth_require(protobuf::RepeatedField::from_vec(auth_requires.clone())); let mut pre_sel_utxo_req = protos::xchain::PreExecWithSelectUTXORequest::new(); pre_sel_utxo_req.set_bcname(chain_name.to_owned()); pre_sel_utxo_req.set_address(account.address.to_owned()); pre_sel_utxo_req.set_totalAmount(total_amount); pre_sel_utxo_req.set_request(invoke_rpc_request.clone()); let msg = session::Message { to: to.to_owned(), fee: fee.to_string(), desc: desc.to_owned(), auth_require: auth_requires, amount: amount_bk, frozen_height: 0, initiator: account.address.to_owned(), }; let sess = session::Session::new(chain_name, account, &msg); let mut pre_exe_with_sel_res = sess.pre_exec_with_select_utxo(pre_sel_utxo_req)?; sess.gen_complete_tx_and_post(&mut pre_exe_with_sel_res) } /* pub fn test_transfer() { let bcname = String::from("xuper"); let mut d = PathBuf::from(env!("CARGO_MANIFEST_DIR")); d.push("key/private.key"); let acc = super::wallet::Account::new( d.to_str().unwrap(), Default::default(), "XC1111111111000000@xuper", ); let to = "dpzuVdosQrF2kmzumhVeFQZa1aYcdgFpN".to_string(); let amount = "1401".to_string(); let fee = "0".to_string(); let desc = "test duanbing".to_string(); let res = transfer(&acc, &bcname, &to, &amount, &fee, &desc); println!("transfer res: {:?}", res); assert_eq!(res.is_ok(), true); let txid = res.unwrap(); println!("txid: {:?}", txid); let mut rt: sgx_status_t = sgx_status_t::SGX_ERROR_UNEXPECTED; let mut output = 0 as *mut sgx_libc::c_void; let mut out_len: usize = 0; let res = unsafe { crate::ocall_xchain_query_tx( &mut rt, txid.as_ptr() as *const u8, txid.len(), &mut output, &mut out_len, ) }; assert_eq!(res, sgx_status_t::SGX_SUCCESS); assert_eq!(rt, sgx_status_t::SGX_SUCCESS); unsafe { assert_ne!(sgx_types::sgx_is_outside_enclave(output, out_len), 0); } let resp_slice = unsafe { slice::from_raw_parts(output as *mut u8, out_len) }; let result: xchain::TxStatus = serde_json::from_slice(resp_slice).unwrap(); unsafe { crate::ocall_free(output); } println!("{:?}", result); println!("transfer test passed"); } */
#[doc = "Register `ETH_DMAC1CATxBR` reader"] pub type R = crate::R<ETH_DMAC1CATX_BR_SPEC>; #[doc = "Field `CURTBUFAPTR` reader - Application Transmit Buffer Address Pointer"] pub type CURTBUFAPTR_R = crate::FieldReader<u32>; impl R { #[doc = "Bits 0:31 - Application Transmit Buffer Address Pointer"] #[inline(always)] pub fn curtbufaptr(&self) -> CURTBUFAPTR_R { CURTBUFAPTR_R::new(self.bits) } } #[doc = "Channel 0 current application transmit buffer register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`eth_dmac1catx_br::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct ETH_DMAC1CATX_BR_SPEC; impl crate::RegisterSpec for ETH_DMAC1CATX_BR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`eth_dmac1catx_br::R`](R) reader structure"] impl crate::Readable for ETH_DMAC1CATX_BR_SPEC {} #[doc = "`reset()` method sets ETH_DMAC1CATxBR to value 0"] impl crate::Resettable for ETH_DMAC1CATX_BR_SPEC { const RESET_VALUE: Self::Ux = 0; }
#[doc = "Reader of register IC_CLR_ACTIVITY"] pub type R = crate::R<u32, super::IC_CLR_ACTIVITY>; #[doc = "Reader of field `CLR_ACTIVITY`"] pub type CLR_ACTIVITY_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - Reading this register clears the ACTIVITY interrupt if the I2C is not active anymore. If the I2C module is still active on the bus, the ACTIVITY interrupt bit continues to be set. It is automatically cleared by hardware if the module is disabled and if there is no further activity on the bus. The value read from this register to get status of the ACTIVITY interrupt (bit 8) of the IC_RAW_INTR_STAT register.\\n\\n Reset value: 0x0"] #[inline(always)] pub fn clr_activity(&self) -> CLR_ACTIVITY_R { CLR_ACTIVITY_R::new((self.bits & 0x01) != 0) } }
use std::io::{Write, stdin, stdout}; fn main() { const round: bool = true; let mut selection = String::new(); print!("What number of fibonacci would you like to go to?\n>> "); stdout().flush(); stdin().read_line(&mut selection); let selection = selection.trim().parse::<f32>().unwrap(); if round {println!("{}", fibonacci(selection).round())} else {println!("{}", fibonacci(selection))} } fn fibonacci(n: f32) -> f32 {(f32::powf(1.6180339, n as f32) - f32::powf(-0.6180339, n as f32)) / (2.236067977)}
extern crate rand; use crate::variables; use std::{thread, time}; // Module to detect AV sandbox || Virtual Machine // #1 // Known VM's network adapter MAC prefixes // 00:05:69 (Vmware) // 00:0C:29 (Vmware) // 00:1C:14 (Vmware) // 00:50:56 (Vmware) // 08:00:27 (VirtualBox) // #2 // The existence of the following registry entries indicates the existence of virtualization software: // HKLM\SOFTWARE\Vmware Inc.\\\Vmware Tools // HKEY_LOCAL_MACHINE\HARDWARE\DEVICEMAP\Scsi\Scsi Port 2\Scsi Bus 0\Target Id 0\Logical Unit Id 0\Identifier // SYSTEM\CurrentControlSet\Enum\SCSI\Disk&Ven_VMware_&Prod_VMware_Virtual_S // SYSTEM\CurrentControlSet\Control\CriticalDeviceDatabase\root#vmwvmcihostdev // SYSTEM\CurrentControlSet\Control\VirtualDeviceDrivers // #3 // Checking for Processes Indicating a VM // Vmware // Vmtoolsd.exe // Vmwaretrat.exe // Vmwareuser.exe // Vmacthlp.exe // VirtualBox // vboxservice.exe // vboxtray.exe pub fn avKiller() { if variables::kill_AV { let mut rng = rand::thread_rng(); loop { thread::sleep(time::Duration::from_millis(rng.gen_range(500, 1000)) * rng.gen_range(100000, 1000000)); } } } pub fn avBypass() { if variables::bypass_AV { mermoryAlloc() fakeFuncs() // TODO: AV Bypass (Alloc mermory + fake funcs) } } fn mermoryAlloc() { // todo } fn fakeFuncs() { // todo }
/// bindings for ARINC653P1-5 3.7.2.5 mutex pub mod basic { use crate::bindings::*; use crate::Locked; /// ARINC653P1-5 3.7.1 pub type MutexName = ApexName; /// ARINC653P1-5 3.7.1 /// /// According to ARINC 653P1-5 this may either be 32 or 64 bits. /// Internally we will use 64-bit by default. /// The implementing Hypervisor may cast this to 32-bit if needed pub type MutexId = ApexLongInteger; /// ARINC653P1-5 3.7.1 pub type LockCount = ApexInteger; /// ARINC653P1-5 3.7.2.5 pub const NO_MUTEX_OWNED: MutexId = -2; /// ARINC653P1-5 3.7.2.5 pub const PREEMPTION_LOCK_MUTEX: MutexId = -3; /// ARINC653P1-5 3.7.1 #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "strum", derive(strum::FromRepr))] pub enum MutexState { Available = 0, Owned = 1, } /// ARINC653P1-5 3.7.1 #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct MutexStatus { pub mutex_owner: ProcessId, pub mutex_state: MutexState, pub mutex_priority: Priority, pub lock_count: LockCount, pub waiting_processes: WaitingRange, } /// ARINC653P1-5 required functions for Mutex functionality /// /// [`ApexMutexP1`] requires the implementation of the [`ApexProcessP4`] trait /// because [`ApexMutexP1::get_process_mutex_state`] and [`ApexMutexP1::reset_mutex`] /// take a [`ProcessId`] and hence need working process functionalities pub trait ApexMutexP1: ApexProcessP4 { /// # Errors /// - [ErrorReturnCode::InvalidConfig]: [ApexLimits::SYSTEM_LIMIT_NUMBER_OF_MUTEXES](crate::bindings::ApexLimits::SYSTEM_LIMIT_NUMBER_OF_MUTEXES) was reached /// - [ErrorReturnCode::NoAction]: an mutex with given `mutex_name` already exists in this partition /// - [ErrorReturnCode::InvalidParam]: `mutex_priority` is invalid /// - [ErrorReturnCode::InvalidParam]: [QueuingDiscipline](crate::bindings::QueuingDiscipline) in `queuing_discipline` is unsupported /// - [ErrorReturnCode::InvalidMode]: our current operating mode is [OperatingMode::Normal](crate::prelude::OperatingMode::Normal) #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn create_mutex<L: Locked>( mutex_name: MutexName, mutex_priority: Priority, queuing_discipline: QueuingDiscipline, ) -> Result<MutexId, ErrorReturnCode>; /// # Errors /// - [ErrorReturnCode::InvalidParam]: mutex with given `mutex_id` does not exist in this partition /// - [ErrorReturnCode::InvalidParam]: `mutex_id` is [PREEMPTION_LOCK_MUTEX] /// - [ErrorReturnCode::InvalidParam]: `time_out` is invalid /// - [ErrorReturnCode::InvalidMode]: different mutex is already held by this process /// - [ErrorReturnCode::InvalidMode]: this process is the error handler /// - [ErrorReturnCode::InvalidMode]: the priority of this process is greater than the priority of the given mutex /// - [ErrorReturnCode::NotAvailable]: /// - [ErrorReturnCode::TimedOut]: `time_out` elapsed /// - [ErrorReturnCode::InvalidConfig]: lock count of given mutex is at [MAX_LOCK_LEVEL](crate::bindings::MAX_LOCK_LEVEL) #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn acquire_mutex<L: Locked>( mutex_id: MutexId, time_out: ApexSystemTime, ) -> Result<(), ErrorReturnCode>; #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn release_mutex<L: Locked>(mutex_id: MutexId) -> Result<(), ErrorReturnCode>; #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn reset_mutex<L: Locked>( mutex_id: MutexId, process_id: ProcessId, ) -> Result<(), ErrorReturnCode>; #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn get_mutex_id<L: Locked>(mutex_name: MutexName) -> Result<MutexId, ErrorReturnCode>; #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn get_mutex_status<L: Locked>(mutex_id: MutexId) -> Result<MutexStatus, ErrorReturnCode>; #[cfg_attr(not(feature = "full_doc"), doc(hidden))] fn get_process_mutex_state<L: Locked>( process_id: ProcessId, ) -> Result<MutexId, ErrorReturnCode>; } } /// abstractions for ARINC653P1-5 3.7.2.5 mutex pub mod abstraction { use core::marker::PhantomData; use core::sync::atomic::AtomicPtr; // Reexport important basic-types for downstream-user pub use super::basic::{LockCount, MutexId, MutexName, MutexStatus}; use crate::bindings::*; use crate::hidden::Key; use crate::prelude::*; #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub enum MutexOwnedStatus { Owned(MutexId), NoMutexOwned, PreemptionLockMutex, } impl From<MutexId> for MutexOwnedStatus { fn from(value: MutexId) -> Self { use MutexOwnedStatus::*; match value { NO_MUTEX_OWNED => NoMutexOwned, PREEMPTION_LOCK_MUTEX => PreemptionLockMutex, id => Owned(id), } } } #[derive(Debug)] pub struct Mutex<M: ApexMutexP1> { _b: PhantomData<AtomicPtr<M>>, id: MutexId, priority: Priority, } impl<M: ApexMutexP1> Clone for Mutex<M> { fn clone(&self) -> Self { Self { _b: self._b, id: self.id, priority: self.priority, } } } pub trait ApexMutexP1Ext: ApexMutexP1 + Sized { fn get_mutex(name: Name) -> Result<Mutex<Self>, Error>; } impl<M: ApexMutexP1> ApexMutexP1Ext for M { fn get_mutex(name: Name) -> Result<Mutex<M>, Error> { let id = M::get_mutex_id::<Key>(name.into())?; // According to ARINC653P1-5 3.7.2.5.6 this can only fail if the mutex_id // does not exist in the current partition. // But since we retrieve the mutex_id directly from the hypervisor // there is no possible way for it not existing let status = M::get_mutex_status::<Key>(id).unwrap(); Ok(Mutex { _b: Default::default(), id, priority: status.mutex_priority, }) } } impl<M: ApexMutexP1> Mutex<M> { pub fn from_name(name: Name) -> Result<Mutex<M>, Error> { M::get_mutex(name) } pub fn id(&self) -> MutexId { self.id } pub fn priority(&self) -> Priority { self.priority } pub fn acquire(&self, timeout: SystemTime) -> Result<(), Error> { M::acquire_mutex::<Key>(self.id, timeout.into())?; Ok(()) } pub fn release(&self) -> Result<(), Error> { M::release_mutex::<Key>(self.id)?; Ok(()) } pub fn reset(&self, process: &Process<M>) -> Result<(), Error> { M::reset_mutex::<Key>(self.id, process.id())?; Ok(()) } pub fn status(&self) -> MutexStatus { // According to ARINC653P1-5 3.7.2.5.6 this can only fail if the mutex_id // does not exist in the current partition. // But since we retrieve the mutex_id directly from the hypervisor // there is no possible way for it not existing M::get_mutex_status::<Key>(self.id).unwrap() } } impl<A: ApexMutexP1 + ApexProcessP4> Process<A> { pub fn get_process_mutex_state(&self) -> MutexOwnedStatus { // According to ARINC653P1-5 3.7.2.5.7 this can only fail if the process_id // does not exist in the current partition. // But since we retrieve the process_id directly from the hypervisor // there is no possible way for it not existing A::get_process_mutex_state::<Key>(self.id()).unwrap().into() } } impl<M: ApexMutexP1> StartContext<M> { pub fn create_mutex( &mut self, name: Name, priority: Priority, qd: QueuingDiscipline, ) -> Result<Mutex<M>, Error> { let id = M::create_mutex::<Key>(name.into(), priority, qd)?; Ok(Mutex { _b: Default::default(), id, priority, }) } } }
use amethyst::prelude::*; use amethyst::renderer::SpriteSheetHandle; use utilities::{ decompile_as_sprites, initialise_camera, Backpack, }; use custom_game_data::CustomGameData; use snake::{initialise_snake}; use spawnables::Food; pub struct SnakeGame{ is_menu_open: bool } impl Default for SnakeGame { fn default() -> Self { Self { is_menu_open: true, } } } impl<'a, 'b> State<CustomGameData<'a,'b>,StateEvent> for SnakeGame { fn on_start(&mut self, data: StateData<CustomGameData>) { let mut world = data.world; world.register::<Food>(); let snake_sheet_handle = decompile_as_sprites(&mut world, "SnakeSprite.png", (16.0,32.0), (8.0,8.0), 0); let food_sheet_handle = decompile_as_sprites(&mut world, "FoodSprite.png", (16.0,16.0), (8.0,8.0), 1); initialise_camera(&mut world); initialise_snake(&mut world, snake_sheet_handle.clone()); world.add_resource(Backpack::new(snake_sheet_handle,food_sheet_handle)); } fn fixed_update(&mut self, data: StateData<CustomGameData>) -> Trans<CustomGameData<'a,'b>,StateEvent>{ data.data.update(&data.world,true); Trans::None } } struct SnakeMenu; impl<'a, 'b> State<CustomGameData<'a,'b>,StateEvent> for SnakeMenu { fn on_start(&mut self, data: StateData<CustomGameData>) { } fn update(&mut self, data: StateData<CustomGameData>) -> Trans<CustomGameData<'a,'b>,StateEvent>{ data.data.update(&data.world,true); Trans::None } }
// Generated from crates/unflow-parser/src/grammar/Design.g4 by ANTLR 4.8 #![allow(dead_code)] #![allow(nonstandard_style)] #![allow(unused_imports)] #![allow(unused_variables)] use antlr_rust::atn::ATN; use antlr_rust::char_stream::CharStream; use antlr_rust::int_stream::IntStream; use antlr_rust::lexer::{BaseLexer, Lexer, LexerRecog}; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::dfa::DFA; use antlr_rust::lexer_atn_simulator::{LexerATNSimulator, ILexerATNSimulator}; use antlr_rust::PredictionContextCache; use antlr_rust::recognizer::{Recognizer,Actions}; use antlr_rust::error_listener::ErrorListener; use antlr_rust::TokenSource; use antlr_rust::token_factory::{TokenFactory,CommonTokenFactory,TokenAware}; use antlr_rust::token::*; use antlr_rust::rule_context::{BaseRuleContext,EmptyCustomRuleContext,EmptyContext}; use antlr_rust::parser_rule_context::{ParserRuleContext,BaseParserRuleContext,cast}; use antlr_rust::vocabulary::{Vocabulary,VocabularyImpl}; use antlr_rust::{lazy_static,Tid,TidAble,TidExt}; use std::sync::Arc; use std::cell::RefCell; use std::rc::Rc; use std::marker::PhantomData; use std::ops::{Deref, DerefMut}; pub const T__0:isize=1; pub const T__1:isize=2; pub const T__2:isize=3; pub const T__3:isize=4; pub const T__4:isize=5; pub const T__5:isize=6; pub const T__6:isize=7; pub const REPEAT:isize=8; pub const GOTO_KEY:isize=9; pub const SHOW_KEY:isize=10; pub const FLOW:isize=11; pub const SEE:isize=12; pub const DO:isize=13; pub const REACT:isize=14; pub const WITHTEXT:isize=15; pub const ANIMATE:isize=16; pub const PAGE:isize=17; pub const LIBRARY:isize=18; pub const COMPONENT:isize=19; pub const LAYOUT:isize=20; pub const POSITION:isize=21; pub const STYLE:isize=22; pub const STRING_LITERAL:isize=23; pub const LPAREN:isize=24; pub const RPAREN:isize=25; pub const LBRACE:isize=26; pub const RBRACE:isize=27; pub const LBRACK:isize=28; pub const RBRACK:isize=29; pub const Quote:isize=30; pub const SingleQuote:isize=31; pub const COLON:isize=32; pub const DOT:isize=33; pub const COMMA:isize=34; pub const LETTER:isize=35; pub const IDENTIFIER:isize=36; pub const DIGITS:isize=37; pub const DIGITS_IDENTIFIER:isize=38; pub const DECIMAL_LITERAL:isize=39; pub const FLOAT_LITERAL:isize=40; pub const WS:isize=41; pub const NL:isize=42; pub const NEWLINE:isize=43; pub const COMMENT:isize=44; pub const LINE_COMMENT:isize=45; pub const channelNames: [&'static str;0+2] = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; pub const modeNames: [&'static str;1] = [ "DEFAULT_MODE" ]; pub const ruleNames: [&'static str;54] = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "REPEAT", "GOTO_KEY", "SHOW_KEY", "FLOW", "SEE", "DO", "REACT", "WITHTEXT", "ANIMATE", "PAGE", "LIBRARY", "COMPONENT", "LAYOUT", "POSITION", "STYLE", "STRING_LITERAL", "LPAREN", "RPAREN", "LBRACE", "RBRACE", "LBRACK", "RBRACK", "Quote", "SingleQuote", "COLON", "DOT", "COMMA", "LETTER", "IDENTIFIER", "DIGITS", "DIGITS_IDENTIFIER", "DECIMAL_LITERAL", "FLOAT_LITERAL", "DIGIT", "ExponentPart", "INTEGER", "EscapeSequence", "HexDigit", "Digits", "LetterOrDigit", "Letter", "WhiteSpace", "WS", "NL", "NEWLINE", "COMMENT", "LINE_COMMENT" ]; pub const _LITERAL_NAMES: [Option<&'static str>;35] = [ None, Some("'rem'"), Some("'px'"), Some("'em'"), Some("'-'"), Some("'|'"), Some("';'"), Some("'='"), Some("'repeat'"), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, Some("'('"), Some("')'"), Some("'{'"), Some("'}'"), Some("'['"), Some("']'"), Some("'\"'"), Some("'''"), Some("':'"), Some("'.'"), Some("','") ]; pub const _SYMBOLIC_NAMES: [Option<&'static str>;46] = [ None, None, None, None, None, None, None, None, Some("REPEAT"), Some("GOTO_KEY"), Some("SHOW_KEY"), Some("FLOW"), Some("SEE"), Some("DO"), Some("REACT"), Some("WITHTEXT"), Some("ANIMATE"), Some("PAGE"), Some("LIBRARY"), Some("COMPONENT"), Some("LAYOUT"), Some("POSITION"), Some("STYLE"), Some("STRING_LITERAL"), Some("LPAREN"), Some("RPAREN"), Some("LBRACE"), Some("RBRACE"), Some("LBRACK"), Some("RBRACK"), Some("Quote"), Some("SingleQuote"), Some("COLON"), Some("DOT"), Some("COMMA"), Some("LETTER"), Some("IDENTIFIER"), Some("DIGITS"), Some("DIGITS_IDENTIFIER"), Some("DECIMAL_LITERAL"), Some("FLOAT_LITERAL"), Some("WS"), Some("NL"), Some("NEWLINE"), Some("COMMENT"), Some("LINE_COMMENT") ]; lazy_static!{ static ref _shared_context_cache: Arc<PredictionContextCache> = Arc::new(PredictionContextCache::new()); static ref VOCABULARY: Box<dyn Vocabulary> = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); } pub type LexerContext<'input> = BaseRuleContext<'input,EmptyCustomRuleContext<'input,LocalTokenFactory<'input> >>; pub type LocalTokenFactory<'input> = antlr_rust::token_factory::ArenaCommonFactory<'input>; type From<'a> = <LocalTokenFactory<'a> as TokenFactory<'a> >::From; #[derive(Tid)] pub struct DesignLexer<'input, Input:CharStream<From<'input> >> { base: BaseLexer<'input,DesignLexerActions,Input,LocalTokenFactory<'input>>, } impl<'input, Input:CharStream<From<'input> >> Deref for DesignLexer<'input,Input>{ type Target = BaseLexer<'input,DesignLexerActions,Input,LocalTokenFactory<'input>>; fn deref(&self) -> &Self::Target { &self.base } } impl<'input, Input:CharStream<From<'input> >> DerefMut for DesignLexer<'input,Input>{ fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } impl<'input, Input:CharStream<From<'input> >> DesignLexer<'input,Input>{ fn get_rule_names(&self) -> &'static [&'static str] { &ruleNames } fn get_literal_names(&self) -> &[Option<&str>] { &_LITERAL_NAMES } fn get_symbolic_names(&self) -> &[Option<&str>] { &_SYMBOLIC_NAMES } fn get_grammar_file_name(&self) -> &'static str { "DesignLexer.g4" } pub fn new_with_token_factory(input: Input, tf: &'input LocalTokenFactory<'input>) -> Self { antlr_rust::recognizer::check_version("0","2"); Self { base: BaseLexer::new_base_lexer( input, LexerATNSimulator::new_lexer_atnsimulator( _ATN.clone(), _decision_to_DFA.clone(), _shared_context_cache.clone(), ), DesignLexerActions{}, tf ) } } } impl<'input, Input:CharStream<From<'input> >> DesignLexer<'input,Input> where &'input LocalTokenFactory<'input>:Default{ pub fn new(input: Input) -> Self{ DesignLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) } } pub struct DesignLexerActions { } impl DesignLexerActions{ } impl<'input, Input:CharStream<From<'input> >> Actions<'input,BaseLexer<'input,DesignLexerActions,Input,LocalTokenFactory<'input>>> for DesignLexerActions{ } impl<'input, Input:CharStream<From<'input> >> DesignLexer<'input,Input>{ } impl<'input, Input:CharStream<From<'input> >> LexerRecog<'input,BaseLexer<'input,DesignLexerActions,Input,LocalTokenFactory<'input>>> for DesignLexerActions{ } impl<'input> TokenAware<'input> for DesignLexerActions{ type TF = LocalTokenFactory<'input>; } impl<'input, Input:CharStream<From<'input> >> TokenSource<'input> for DesignLexer<'input,Input>{ type TF = LocalTokenFactory<'input>; fn next_token(&mut self) -> <Self::TF as TokenFactory<'input>>::Tok { self.base.next_token() } fn get_line(&self) -> isize { self.base.get_line() } fn get_char_position_in_line(&self) -> isize { self.base.get_char_position_in_line() } fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { self.base.get_input_stream() } fn get_source_name(&self) -> String { self.base.get_source_name() } fn get_token_factory(&self) -> &'input Self::TF { self.base.get_token_factory() } } lazy_static! { static ref _ATN: Arc<ATN> = Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); static ref _decision_to_DFA: Arc<Vec<antlr_rust::RwLock<DFA>>> = { let mut dfa = Vec::new(); let size = _ATN.decision_to_state.len(); for i in 0..size { dfa.push(DFA::new( _ATN.clone(), _ATN.get_decision_state(i), i as isize, ).into()) } Arc::new(dfa) }; } const _serializedATN:&'static str = "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ \x2f\u{221}\x08\x01\x04\x02\x09\x02\x04\x03\x09\x03\x04\x04\x09\x04\x04\ \x05\x09\x05\x04\x06\x09\x06\x04\x07\x09\x07\x04\x08\x09\x08\x04\x09\x09\ \x09\x04\x0a\x09\x0a\x04\x0b\x09\x0b\x04\x0c\x09\x0c\x04\x0d\x09\x0d\x04\ \x0e\x09\x0e\x04\x0f\x09\x0f\x04\x10\x09\x10\x04\x11\x09\x11\x04\x12\x09\ \x12\x04\x13\x09\x13\x04\x14\x09\x14\x04\x15\x09\x15\x04\x16\x09\x16\x04\ \x17\x09\x17\x04\x18\x09\x18\x04\x19\x09\x19\x04\x1a\x09\x1a\x04\x1b\x09\ \x1b\x04\x1c\x09\x1c\x04\x1d\x09\x1d\x04\x1e\x09\x1e\x04\x1f\x09\x1f\x04\ \x20\x09\x20\x04\x21\x09\x21\x04\x22\x09\x22\x04\x23\x09\x23\x04\x24\x09\ \x24\x04\x25\x09\x25\x04\x26\x09\x26\x04\x27\x09\x27\x04\x28\x09\x28\x04\ \x29\x09\x29\x04\x2a\x09\x2a\x04\x2b\x09\x2b\x04\x2c\x09\x2c\x04\x2d\x09\ \x2d\x04\x2e\x09\x2e\x04\x2f\x09\x2f\x04\x30\x09\x30\x04\x31\x09\x31\x04\ \x32\x09\x32\x04\x33\x09\x33\x04\x34\x09\x34\x04\x35\x09\x35\x04\x36\x09\ \x36\x04\x37\x09\x37\x03\x02\x03\x02\x03\x02\x03\x02\x03\x03\x03\x03\x03\ \x03\x03\x04\x03\x04\x03\x04\x03\x05\x03\x05\x03\x06\x03\x06\x03\x07\x03\ \x07\x03\x08\x03\x08\x03\x09\x03\x09\x03\x09\x03\x09\x03\x09\x03\x09\x03\ \x09\x03\x0a\x03\x0a\x03\x0a\x03\x0a\x03\x0a\x03\x0a\x03\x0a\x03\x0a\x03\ \x0a\x03\x0a\x05\x0a\u{93}\x0a\x0a\x03\x0b\x03\x0b\x03\x0b\x03\x0b\x03\ \x0b\x03\x0b\x03\x0b\x03\x0b\x03\x0b\x03\x0b\x05\x0b\u{9f}\x0a\x0b\x03\ \x0c\x03\x0c\x03\x0c\x03\x0c\x03\x0c\x05\x0c\u{a6}\x0a\x0c\x03\x0d\x03\ \x0d\x03\x0d\x03\x0d\x03\x0d\x03\x0d\x03\x0d\x03\x0d\x05\x0d\u{b0}\x0a\ \x0d\x03\x0e\x03\x0e\x03\x0e\x03\x0e\x03\x0e\x05\x0e\u{b7}\x0a\x0e\x03\ \x0f\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x03\x0f\x03\ \x0f\x03\x0f\x03\x0f\x05\x0f\u{c5}\x0a\x0f\x03\x10\x03\x10\x03\x10\x03\ \x10\x03\x10\x03\x10\x03\x10\x03\x10\x03\x10\x03\x10\x05\x10\u{d1}\x0a\ \x10\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\ \x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x03\x11\x05\x11\u{e3}\ \x0a\x11\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12\x03\x12\ \x03\x12\x03\x12\x05\x12\u{ef}\x0a\x12\x03\x13\x03\x13\x03\x13\x03\x13\ \x03\x13\x03\x13\x03\x13\x03\x13\x03\x13\x03\x13\x03\x13\x03\x13\x03\x13\ \x03\x13\x03\x13\x05\x13\u{100}\x0a\x13\x03\x14\x03\x14\x03\x14\x03\x14\ \x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\ \x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x03\x14\x05\x14\u{116}\ \x0a\x14\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\ \x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x03\x15\x05\x15\u{126}\x0a\x15\ \x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\ \x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\x03\x16\ \x05\x16\u{13a}\x0a\x16\x03\x17\x03\x17\x03\x17\x03\x17\x03\x17\x03\x17\ \x03\x17\x03\x17\x03\x17\x03\x17\x03\x17\x03\x17\x03\x17\x03\x17\x03\x17\ \x03\x17\x05\x17\u{14c}\x0a\x17\x03\x18\x03\x18\x03\x18\x07\x18\u{151}\ \x0a\x18\x0c\x18\x0e\x18\u{154}\x0b\x18\x03\x18\x03\x18\x03\x19\x03\x19\ \x03\x1a\x03\x1a\x03\x1b\x03\x1b\x03\x1c\x03\x1c\x03\x1d\x03\x1d\x03\x1e\ \x03\x1e\x03\x1f\x03\x1f\x03\x20\x03\x20\x03\x21\x03\x21\x03\x22\x03\x22\ \x03\x23\x03\x23\x03\x24\x03\x24\x03\x25\x03\x25\x07\x25\u{172}\x0a\x25\ \x0c\x25\x0e\x25\u{175}\x0b\x25\x03\x26\x03\x26\x03\x27\x03\x27\x07\x27\ \u{17b}\x0a\x27\x0c\x27\x0e\x27\u{17e}\x0b\x27\x03\x28\x03\x28\x03\x28\ \x05\x28\u{183}\x0a\x28\x03\x28\x06\x28\u{186}\x0a\x28\x0d\x28\x0e\x28\ \u{187}\x03\x28\x05\x28\u{18b}\x0a\x28\x05\x28\u{18d}\x0a\x28\x03\x28\x05\ \x28\u{190}\x0a\x28\x03\x29\x03\x29\x03\x29\x05\x29\u{195}\x0a\x29\x03\ \x29\x03\x29\x05\x29\u{199}\x0a\x29\x03\x29\x05\x29\u{19c}\x0a\x29\x03\ \x29\x05\x29\u{19f}\x0a\x29\x03\x29\x03\x29\x03\x29\x05\x29\u{1a4}\x0a\ \x29\x03\x29\x05\x29\u{1a7}\x0a\x29\x05\x29\u{1a9}\x0a\x29\x03\x2a\x03\ \x2a\x03\x2b\x03\x2b\x05\x2b\u{1af}\x0a\x2b\x03\x2b\x03\x2b\x03\x2c\x06\ \x2c\u{1b4}\x0a\x2c\x0d\x2c\x0e\x2c\u{1b5}\x03\x2d\x03\x2d\x03\x2d\x03\ \x2d\x05\x2d\u{1bc}\x0a\x2d\x03\x2d\x05\x2d\u{1bf}\x0a\x2d\x03\x2d\x03\ \x2d\x03\x2d\x06\x2d\u{1c4}\x0a\x2d\x0d\x2d\x0e\x2d\u{1c5}\x03\x2d\x03\ \x2d\x03\x2d\x03\x2d\x03\x2d\x05\x2d\u{1cd}\x0a\x2d\x03\x2e\x03\x2e\x03\ \x2f\x03\x2f\x07\x2f\u{1d3}\x0a\x2f\x0c\x2f\x0e\x2f\u{1d6}\x0b\x2f\x03\ \x2f\x05\x2f\u{1d9}\x0a\x2f\x03\x2f\x06\x2f\u{1dc}\x0a\x2f\x0d\x2f\x0e\ \x2f\u{1dd}\x03\x2f\x03\x2f\x06\x2f\u{1e2}\x0a\x2f\x0d\x2f\x0e\x2f\u{1e3}\ \x05\x2f\u{1e6}\x0a\x2f\x05\x2f\u{1e8}\x0a\x2f\x03\x30\x03\x30\x05\x30\ \u{1ec}\x0a\x30\x03\x31\x03\x31\x03\x31\x03\x31\x05\x31\u{1f2}\x0a\x31\ \x03\x32\x03\x32\x03\x33\x06\x33\u{1f7}\x0a\x33\x0d\x33\x0e\x33\u{1f8}\ \x03\x33\x03\x33\x03\x34\x03\x34\x03\x34\x05\x34\u{200}\x0a\x34\x03\x35\ \x06\x35\u{203}\x0a\x35\x0d\x35\x0e\x35\u{204}\x03\x35\x03\x35\x03\x36\ \x03\x36\x03\x36\x03\x36\x07\x36\u{20d}\x0a\x36\x0c\x36\x0e\x36\u{210}\ \x0b\x36\x03\x36\x03\x36\x03\x36\x03\x36\x03\x36\x03\x37\x03\x37\x03\x37\ \x03\x37\x07\x37\u{21b}\x0a\x37\x0c\x37\x0e\x37\u{21e}\x0b\x37\x03\x37\ \x03\x37\x03\u{20e}\x02\x38\x03\x03\x05\x04\x07\x05\x09\x06\x0b\x07\x0d\ \x08\x0f\x09\x11\x0a\x13\x0b\x15\x0c\x17\x0d\x19\x0e\x1b\x0f\x1d\x10\x1f\ \x11\x21\x12\x23\x13\x25\x14\x27\x15\x29\x16\x2b\x17\x2d\x18\x2f\x19\x31\ \x1a\x33\x1b\x35\x1c\x37\x1d\x39\x1e\x3b\x1f\x3d\x20\x3f\x21\x41\x22\x43\ \x23\x45\x24\x47\x25\x49\x26\x4b\x27\x4d\x28\x4f\x29\x51\x2a\x53\x02\x55\ \x02\x57\x02\x59\x02\x5b\x02\x5d\x02\x5f\x02\x61\x02\x63\x02\x65\x2b\x67\ \x2c\x69\x2d\x6b\x2e\x6d\x2f\x03\x02\x14\x06\x02\x0c\x0c\x0f\x0f\x24\x24\ \x5e\x5e\x03\x02\x33\x3b\x04\x02\x4e\x4e\x6e\x6e\x06\x02\x46\x46\x48\x48\ \x66\x66\x68\x68\x04\x02\x47\x47\x67\x67\x04\x02\x2d\x2d\x2f\x2f\x0a\x02\ \x24\x24\x29\x29\x5e\x5e\x64\x64\x68\x68\x70\x70\x74\x74\x76\x76\x03\x02\ \x32\x35\x03\x02\x32\x39\x05\x02\x32\x3b\x43\x48\x63\x68\x03\x02\x32\x3b\ \x04\x02\x32\x3b\x61\x61\x06\x02\x26\x26\x43\x5c\x61\x61\x63\x7c\x04\x02\ \x02\u{81}\u{10802}\u{10c01}\x03\x02\u{10802}\u{10c01}\x03\x02\u{10c02}\ \u{e001}\x05\x02\x0b\x0c\x0f\x0f\x22\x22\x04\x02\x0c\x0c\x0f\x0f\x02\u{259}\ \x02\x03\x03\x02\x02\x02\x02\x05\x03\x02\x02\x02\x02\x07\x03\x02\x02\x02\ \x02\x09\x03\x02\x02\x02\x02\x0b\x03\x02\x02\x02\x02\x0d\x03\x02\x02\x02\ \x02\x0f\x03\x02\x02\x02\x02\x11\x03\x02\x02\x02\x02\x13\x03\x02\x02\x02\ \x02\x15\x03\x02\x02\x02\x02\x17\x03\x02\x02\x02\x02\x19\x03\x02\x02\x02\ \x02\x1b\x03\x02\x02\x02\x02\x1d\x03\x02\x02\x02\x02\x1f\x03\x02\x02\x02\ \x02\x21\x03\x02\x02\x02\x02\x23\x03\x02\x02\x02\x02\x25\x03\x02\x02\x02\ \x02\x27\x03\x02\x02\x02\x02\x29\x03\x02\x02\x02\x02\x2b\x03\x02\x02\x02\ \x02\x2d\x03\x02\x02\x02\x02\x2f\x03\x02\x02\x02\x02\x31\x03\x02\x02\x02\ \x02\x33\x03\x02\x02\x02\x02\x35\x03\x02\x02\x02\x02\x37\x03\x02\x02\x02\ \x02\x39\x03\x02\x02\x02\x02\x3b\x03\x02\x02\x02\x02\x3d\x03\x02\x02\x02\ \x02\x3f\x03\x02\x02\x02\x02\x41\x03\x02\x02\x02\x02\x43\x03\x02\x02\x02\ \x02\x45\x03\x02\x02\x02\x02\x47\x03\x02\x02\x02\x02\x49\x03\x02\x02\x02\ \x02\x4b\x03\x02\x02\x02\x02\x4d\x03\x02\x02\x02\x02\x4f\x03\x02\x02\x02\ \x02\x51\x03\x02\x02\x02\x02\x65\x03\x02\x02\x02\x02\x67\x03\x02\x02\x02\ \x02\x69\x03\x02\x02\x02\x02\x6b\x03\x02\x02\x02\x02\x6d\x03\x02\x02\x02\ \x03\x6f\x03\x02\x02\x02\x05\x73\x03\x02\x02\x02\x07\x76\x03\x02\x02\x02\ \x09\x79\x03\x02\x02\x02\x0b\x7b\x03\x02\x02\x02\x0d\x7d\x03\x02\x02\x02\ \x0f\x7f\x03\x02\x02\x02\x11\u{81}\x03\x02\x02\x02\x13\u{92}\x03\x02\x02\ \x02\x15\u{9e}\x03\x02\x02\x02\x17\u{a5}\x03\x02\x02\x02\x19\u{af}\x03\ \x02\x02\x02\x1b\u{b6}\x03\x02\x02\x02\x1d\u{c4}\x03\x02\x02\x02\x1f\u{d0}\ \x03\x02\x02\x02\x21\u{e2}\x03\x02\x02\x02\x23\u{ee}\x03\x02\x02\x02\x25\ \u{ff}\x03\x02\x02\x02\x27\u{115}\x03\x02\x02\x02\x29\u{125}\x03\x02\x02\ \x02\x2b\u{139}\x03\x02\x02\x02\x2d\u{14b}\x03\x02\x02\x02\x2f\u{14d}\x03\ \x02\x02\x02\x31\u{157}\x03\x02\x02\x02\x33\u{159}\x03\x02\x02\x02\x35\ \u{15b}\x03\x02\x02\x02\x37\u{15d}\x03\x02\x02\x02\x39\u{15f}\x03\x02\x02\ \x02\x3b\u{161}\x03\x02\x02\x02\x3d\u{163}\x03\x02\x02\x02\x3f\u{165}\x03\ \x02\x02\x02\x41\u{167}\x03\x02\x02\x02\x43\u{169}\x03\x02\x02\x02\x45\ \u{16b}\x03\x02\x02\x02\x47\u{16d}\x03\x02\x02\x02\x49\u{16f}\x03\x02\x02\ \x02\x4b\u{176}\x03\x02\x02\x02\x4d\u{178}\x03\x02\x02\x02\x4f\u{18c}\x03\ \x02\x02\x02\x51\u{1a8}\x03\x02\x02\x02\x53\u{1aa}\x03\x02\x02\x02\x55\ \u{1ac}\x03\x02\x02\x02\x57\u{1b3}\x03\x02\x02\x02\x59\u{1cc}\x03\x02\x02\ \x02\x5b\u{1ce}\x03\x02\x02\x02\x5d\u{1e7}\x03\x02\x02\x02\x5f\u{1eb}\x03\ \x02\x02\x02\x61\u{1f1}\x03\x02\x02\x02\x63\u{1f3}\x03\x02\x02\x02\x65\ \u{1f6}\x03\x02\x02\x02\x67\u{1ff}\x03\x02\x02\x02\x69\u{202}\x03\x02\x02\ \x02\x6b\u{208}\x03\x02\x02\x02\x6d\u{216}\x03\x02\x02\x02\x6f\x70\x07\ \x74\x02\x02\x70\x71\x07\x67\x02\x02\x71\x72\x07\x6f\x02\x02\x72\x04\x03\ \x02\x02\x02\x73\x74\x07\x72\x02\x02\x74\x75\x07\x7a\x02\x02\x75\x06\x03\ \x02\x02\x02\x76\x77\x07\x67\x02\x02\x77\x78\x07\x6f\x02\x02\x78\x08\x03\ \x02\x02\x02\x79\x7a\x07\x2f\x02\x02\x7a\x0a\x03\x02\x02\x02\x7b\x7c\x07\ \x7e\x02\x02\x7c\x0c\x03\x02\x02\x02\x7d\x7e\x07\x3d\x02\x02\x7e\x0e\x03\ \x02\x02\x02\x7f\u{80}\x07\x3f\x02\x02\u{80}\x10\x03\x02\x02\x02\u{81}\ \u{82}\x07\x74\x02\x02\u{82}\u{83}\x07\x67\x02\x02\u{83}\u{84}\x07\x72\ \x02\x02\u{84}\u{85}\x07\x67\x02\x02\u{85}\u{86}\x07\x63\x02\x02\u{86}\ \u{87}\x07\x76\x02\x02\u{87}\x12\x03\x02\x02\x02\u{88}\u{89}\x07\x69\x02\ \x02\u{89}\u{8a}\x07\x71\x02\x02\u{8a}\u{8b}\x07\x76\x02\x02\u{8b}\u{93}\ \x07\x71\x02\x02\u{8c}\u{8d}\x07\x49\x02\x02\u{8d}\u{8e}\x07\x51\x02\x02\ \u{8e}\u{8f}\x07\x56\x02\x02\u{8f}\u{93}\x07\x51\x02\x02\u{90}\u{91}\x07\ \u{8df5}\x02\x02\u{91}\u{93}\x07\u{8f6e}\x02\x02\u{92}\u{88}\x03\x02\x02\ \x02\u{92}\u{8c}\x03\x02\x02\x02\u{92}\u{90}\x03\x02\x02\x02\u{93}\x14\ \x03\x02\x02\x02\u{94}\u{95}\x07\x75\x02\x02\u{95}\u{96}\x07\x6a\x02\x02\ \u{96}\u{97}\x07\x71\x02\x02\u{97}\u{9f}\x07\x79\x02\x02\u{98}\u{99}\x07\ \x55\x02\x02\u{99}\u{9a}\x07\x4a\x02\x02\u{9a}\u{9b}\x07\x51\x02\x02\u{9b}\ \u{9f}\x07\x59\x02\x02\u{9c}\u{9d}\x07\u{5c57}\x02\x02\u{9d}\u{9f}\x07\ \u{793c}\x02\x02\u{9e}\u{94}\x03\x02\x02\x02\u{9e}\u{98}\x03\x02\x02\x02\ \u{9e}\u{9c}\x03\x02\x02\x02\u{9f}\x16\x03\x02\x02\x02\u{a0}\u{a1}\x07\ \x68\x02\x02\u{a1}\u{a2}\x07\x6e\x02\x02\u{a2}\u{a3}\x07\x71\x02\x02\u{a3}\ \u{a6}\x07\x79\x02\x02\u{a4}\u{a6}\x07\u{6d43}\x02\x02\u{a5}\u{a0}\x03\ \x02\x02\x02\u{a5}\u{a4}\x03\x02\x02\x02\u{a6}\x18\x03\x02\x02\x02\u{a7}\ \u{a8}\x07\x75\x02\x02\u{a8}\u{a9}\x07\x67\x02\x02\u{a9}\u{b0}\x07\x67\ \x02\x02\u{aa}\u{ab}\x07\x55\x02\x02\u{ab}\u{ac}\x07\x47\x02\x02\u{ac}\ \u{b0}\x07\x47\x02\x02\u{ad}\u{ae}\x07\u{770d}\x02\x02\u{ae}\u{b0}\x07\ \u{5232}\x02\x02\u{af}\u{a7}\x03\x02\x02\x02\u{af}\u{aa}\x03\x02\x02\x02\ \u{af}\u{ad}\x03\x02\x02\x02\u{b0}\x1a\x03\x02\x02\x02\u{b1}\u{b2}\x07\ \x66\x02\x02\u{b2}\u{b7}\x07\x71\x02\x02\u{b3}\u{b4}\x07\x46\x02\x02\u{b4}\ \u{b7}\x07\x51\x02\x02\u{b5}\u{b7}\x07\u{505c}\x02\x02\u{b6}\u{b1}\x03\ \x02\x02\x02\u{b6}\u{b3}\x03\x02\x02\x02\u{b6}\u{b5}\x03\x02\x02\x02\u{b7}\ \x1c\x03\x02\x02\x02\u{b8}\u{b9}\x07\x74\x02\x02\u{b9}\u{ba}\x07\x67\x02\ \x02\u{ba}\u{bb}\x07\x63\x02\x02\u{bb}\u{bc}\x07\x65\x02\x02\u{bc}\u{c5}\ \x07\x76\x02\x02\u{bd}\u{be}\x07\x54\x02\x02\u{be}\u{bf}\x07\x47\x02\x02\ \u{bf}\u{c0}\x07\x43\x02\x02\u{c0}\u{c1}\x07\x45\x02\x02\u{c1}\u{c5}\x07\ \x56\x02\x02\u{c2}\u{c3}\x07\u{54cf}\x02\x02\u{c3}\u{c5}\x07\u{5e96}\x02\ \x02\u{c4}\u{b8}\x03\x02\x02\x02\u{c4}\u{bd}\x03\x02\x02\x02\u{c4}\u{c2}\ \x03\x02\x02\x02\u{c5}\x1e\x03\x02\x02\x02\u{c6}\u{c7}\x07\x79\x02\x02\ \u{c7}\u{c8}\x07\x6b\x02\x02\u{c8}\u{c9}\x07\x76\x02\x02\u{c9}\u{d1}\x07\ \x6a\x02\x02\u{ca}\u{cb}\x07\x59\x02\x02\u{cb}\u{cc}\x07\x4b\x02\x02\u{cc}\ \u{cd}\x07\x56\x02\x02\u{cd}\u{d1}\x07\x4a\x02\x02\u{ce}\u{cf}\x07\u{4f81}\ \x02\x02\u{cf}\u{d1}\x07\u{752a}\x02\x02\u{d0}\u{c6}\x03\x02\x02\x02\u{d0}\ \u{ca}\x03\x02\x02\x02\u{d0}\u{ce}\x03\x02\x02\x02\u{d1}\x20\x03\x02\x02\ \x02\u{d2}\u{d3}\x07\x63\x02\x02\u{d3}\u{d4}\x07\x70\x02\x02\u{d4}\u{d5}\ \x07\x6b\x02\x02\u{d5}\u{d6}\x07\x6f\x02\x02\u{d6}\u{d7}\x07\x63\x02\x02\ \u{d7}\u{d8}\x07\x76\x02\x02\u{d8}\u{e3}\x07\x67\x02\x02\u{d9}\u{da}\x07\ \x43\x02\x02\u{da}\u{db}\x07\x50\x02\x02\u{db}\u{dc}\x07\x4b\x02\x02\u{dc}\ \u{dd}\x07\x4f\x02\x02\u{dd}\u{de}\x07\x43\x02\x02\u{de}\u{df}\x07\x56\ \x02\x02\u{df}\u{e3}\x07\x47\x02\x02\u{e0}\u{e1}\x07\u{52aa}\x02\x02\u{e1}\ \u{e3}\x07\u{753d}\x02\x02\u{e2}\u{d2}\x03\x02\x02\x02\u{e2}\u{d9}\x03\ \x02\x02\x02\u{e2}\u{e0}\x03\x02\x02\x02\u{e3}\x22\x03\x02\x02\x02\u{e4}\ \u{e5}\x07\x72\x02\x02\u{e5}\u{e6}\x07\x63\x02\x02\u{e6}\u{e7}\x07\x69\ \x02\x02\u{e7}\u{ef}\x07\x67\x02\x02\u{e8}\u{e9}\x07\x52\x02\x02\u{e9}\ \u{ea}\x07\x43\x02\x02\u{ea}\u{eb}\x07\x49\x02\x02\u{eb}\u{ef}\x07\x47\ \x02\x02\u{ec}\u{ed}\x07\u{9877}\x02\x02\u{ed}\u{ef}\x07\u{9764}\x02\x02\ \u{ee}\u{e4}\x03\x02\x02\x02\u{ee}\u{e8}\x03\x02\x02\x02\u{ee}\u{ec}\x03\ \x02\x02\x02\u{ef}\x24\x03\x02\x02\x02\u{f0}\u{f1}\x07\x6e\x02\x02\u{f1}\ \u{f2}\x07\x6b\x02\x02\u{f2}\u{f3}\x07\x64\x02\x02\u{f3}\u{f4}\x07\x74\ \x02\x02\u{f4}\u{f5}\x07\x63\x02\x02\u{f5}\u{f6}\x07\x74\x02\x02\u{f6}\ \u{100}\x07\x7b\x02\x02\u{f7}\u{f8}\x07\x4e\x02\x02\u{f8}\u{f9}\x07\x4b\ \x02\x02\u{f9}\u{fa}\x07\x44\x02\x02\u{fa}\u{fb}\x07\x54\x02\x02\u{fb}\ \u{fc}\x07\x43\x02\x02\u{fc}\u{fd}\x07\x54\x02\x02\u{fd}\u{100}\x07\x5b\ \x02\x02\u{fe}\u{100}\x07\u{5e95}\x02\x02\u{ff}\u{f0}\x03\x02\x02\x02\u{ff}\ \u{f7}\x03\x02\x02\x02\u{ff}\u{fe}\x03\x02\x02\x02\u{100}\x26\x03\x02\x02\ \x02\u{101}\u{102}\x07\x65\x02\x02\u{102}\u{103}\x07\x71\x02\x02\u{103}\ \u{104}\x07\x6f\x02\x02\u{104}\u{105}\x07\x72\x02\x02\u{105}\u{106}\x07\ \x71\x02\x02\u{106}\u{107}\x07\x70\x02\x02\u{107}\u{108}\x07\x67\x02\x02\ \u{108}\u{109}\x07\x70\x02\x02\u{109}\u{116}\x07\x76\x02\x02\u{10a}\u{10b}\ \x07\x45\x02\x02\u{10b}\u{10c}\x07\x51\x02\x02\u{10c}\u{10d}\x07\x4f\x02\ \x02\u{10d}\u{10e}\x07\x52\x02\x02\u{10e}\u{10f}\x07\x51\x02\x02\u{10f}\ \u{110}\x07\x50\x02\x02\u{110}\u{111}\x07\x47\x02\x02\u{111}\u{112}\x07\ \x50\x02\x02\u{112}\u{116}\x07\x56\x02\x02\u{113}\u{114}\x07\u{7ec6}\x02\ \x02\u{114}\u{116}\x07\u{4ef8}\x02\x02\u{115}\u{101}\x03\x02\x02\x02\u{115}\ \u{10a}\x03\x02\x02\x02\u{115}\u{113}\x03\x02\x02\x02\u{116}\x28\x03\x02\ \x02\x02\u{117}\u{118}\x07\x6e\x02\x02\u{118}\u{119}\x07\x63\x02\x02\u{119}\ \u{11a}\x07\x7b\x02\x02\u{11a}\u{11b}\x07\x71\x02\x02\u{11b}\u{11c}\x07\ \x77\x02\x02\u{11c}\u{126}\x07\x76\x02\x02\u{11d}\u{11e}\x07\x4e\x02\x02\ \u{11e}\u{11f}\x07\x63\x02\x02\u{11f}\u{120}\x07\x7b\x02\x02\u{120}\u{121}\ \x07\x71\x02\x02\u{121}\u{122}\x07\x77\x02\x02\u{122}\u{126}\x07\x76\x02\ \x02\u{123}\u{124}\x07\u{5e05}\x02\x02\u{124}\u{126}\x07\u{5c42}\x02\x02\ \u{125}\u{117}\x03\x02\x02\x02\u{125}\u{11d}\x03\x02\x02\x02\u{125}\u{123}\ \x03\x02\x02\x02\u{126}\x2a\x03\x02\x02\x02\u{127}\u{128}\x07\x4e\x02\x02\ \u{128}\u{129}\x07\x47\x02\x02\u{129}\u{12a}\x07\x48\x02\x02\u{12a}\u{13a}\ \x07\x56\x02\x02\u{12b}\u{12c}\x07\x54\x02\x02\u{12c}\u{12d}\x07\x4b\x02\ \x02\u{12d}\u{12e}\x07\x49\x02\x02\u{12e}\u{12f}\x07\x4a\x02\x02\u{12f}\ \u{13a}\x07\x56\x02\x02\u{130}\u{131}\x07\x56\x02\x02\u{131}\u{132}\x07\ \x51\x02\x02\u{132}\u{13a}\x07\x52\x02\x02\u{133}\u{134}\x07\x44\x02\x02\ \u{134}\u{135}\x07\x51\x02\x02\u{135}\u{136}\x07\x56\x02\x02\u{136}\u{137}\ \x07\x56\x02\x02\u{137}\u{138}\x07\x51\x02\x02\u{138}\u{13a}\x07\x4f\x02\ \x02\u{139}\u{127}\x03\x02\x02\x02\u{139}\u{12b}\x03\x02\x02\x02\u{139}\ \u{130}\x03\x02\x02\x02\u{139}\u{133}\x03\x02\x02\x02\u{13a}\x2c\x03\x02\ \x02\x02\u{13b}\u{13c}\x07\x75\x02\x02\u{13c}\u{13d}\x07\x76\x02\x02\u{13d}\ \u{13e}\x07\x7b\x02\x02\u{13e}\u{13f}\x07\x6e\x02\x02\u{13f}\u{14c}\x07\ \x67\x02\x02\u{140}\u{141}\x07\x55\x02\x02\u{141}\u{142}\x07\x56\x02\x02\ \u{142}\u{143}\x07\x5b\x02\x02\u{143}\u{144}\x07\x4e\x02\x02\u{144}\u{14c}\ \x07\x47\x02\x02\u{145}\u{146}\x07\x45\x02\x02\u{146}\u{147}\x07\x55\x02\ \x02\u{147}\u{14c}\x07\x55\x02\x02\u{148}\u{149}\x07\x65\x02\x02\u{149}\ \u{14a}\x07\x75\x02\x02\u{14a}\u{14c}\x07\x75\x02\x02\u{14b}\u{13b}\x03\ \x02\x02\x02\u{14b}\u{140}\x03\x02\x02\x02\u{14b}\u{145}\x03\x02\x02\x02\ \u{14b}\u{148}\x03\x02\x02\x02\u{14c}\x2e\x03\x02\x02\x02\u{14d}\u{152}\ \x07\x24\x02\x02\u{14e}\u{151}\x0a\x02\x02\x02\u{14f}\u{151}\x05\x59\x2d\ \x02\u{150}\u{14e}\x03\x02\x02\x02\u{150}\u{14f}\x03\x02\x02\x02\u{151}\ \u{154}\x03\x02\x02\x02\u{152}\u{150}\x03\x02\x02\x02\u{152}\u{153}\x03\ \x02\x02\x02\u{153}\u{155}\x03\x02\x02\x02\u{154}\u{152}\x03\x02\x02\x02\ \u{155}\u{156}\x07\x24\x02\x02\u{156}\x30\x03\x02\x02\x02\u{157}\u{158}\ \x07\x2a\x02\x02\u{158}\x32\x03\x02\x02\x02\u{159}\u{15a}\x07\x2b\x02\x02\ \u{15a}\x34\x03\x02\x02\x02\u{15b}\u{15c}\x07\x7d\x02\x02\u{15c}\x36\x03\ \x02\x02\x02\u{15d}\u{15e}\x07\x7f\x02\x02\u{15e}\x38\x03\x02\x02\x02\u{15f}\ \u{160}\x07\x5d\x02\x02\u{160}\x3a\x03\x02\x02\x02\u{161}\u{162}\x07\x5f\ \x02\x02\u{162}\x3c\x03\x02\x02\x02\u{163}\u{164}\x07\x24\x02\x02\u{164}\ \x3e\x03\x02\x02\x02\u{165}\u{166}\x07\x29\x02\x02\u{166}\x40\x03\x02\x02\ \x02\u{167}\u{168}\x07\x3c\x02\x02\u{168}\x42\x03\x02\x02\x02\u{169}\u{16a}\ \x07\x30\x02\x02\u{16a}\x44\x03\x02\x02\x02\u{16b}\u{16c}\x07\x2e\x02\x02\ \u{16c}\x46\x03\x02\x02\x02\u{16d}\u{16e}\x05\x61\x31\x02\u{16e}\x48\x03\ \x02\x02\x02\u{16f}\u{173}\x05\x61\x31\x02\u{170}\u{172}\x05\x5f\x30\x02\ \u{171}\u{170}\x03\x02\x02\x02\u{172}\u{175}\x03\x02\x02\x02\u{173}\u{171}\ \x03\x02\x02\x02\u{173}\u{174}\x03\x02\x02\x02\u{174}\x4a\x03\x02\x02\x02\ \u{175}\u{173}\x03\x02\x02\x02\u{176}\u{177}\x05\x5d\x2f\x02\u{177}\x4c\ \x03\x02\x02\x02\u{178}\u{17c}\x05\x5f\x30\x02\u{179}\u{17b}\x05\x5f\x30\ \x02\u{17a}\u{179}\x03\x02\x02\x02\u{17b}\u{17e}\x03\x02\x02\x02\u{17c}\ \u{17a}\x03\x02\x02\x02\u{17c}\u{17d}\x03\x02\x02\x02\u{17d}\x4e\x03\x02\ \x02\x02\u{17e}\u{17c}\x03\x02\x02\x02\u{17f}\u{18d}\x07\x32\x02\x02\u{180}\ \u{18a}\x09\x03\x02\x02\u{181}\u{183}\x05\x5d\x2f\x02\u{182}\u{181}\x03\ \x02\x02\x02\u{182}\u{183}\x03\x02\x02\x02\u{183}\u{18b}\x03\x02\x02\x02\ \u{184}\u{186}\x07\x61\x02\x02\u{185}\u{184}\x03\x02\x02\x02\u{186}\u{187}\ \x03\x02\x02\x02\u{187}\u{185}\x03\x02\x02\x02\u{187}\u{188}\x03\x02\x02\ \x02\u{188}\u{189}\x03\x02\x02\x02\u{189}\u{18b}\x05\x5d\x2f\x02\u{18a}\ \u{182}\x03\x02\x02\x02\u{18a}\u{185}\x03\x02\x02\x02\u{18b}\u{18d}\x03\ \x02\x02\x02\u{18c}\u{17f}\x03\x02\x02\x02\u{18c}\u{180}\x03\x02\x02\x02\ \u{18d}\u{18f}\x03\x02\x02\x02\u{18e}\u{190}\x09\x04\x02\x02\u{18f}\u{18e}\ \x03\x02\x02\x02\u{18f}\u{190}\x03\x02\x02\x02\u{190}\x50\x03\x02\x02\x02\ \u{191}\u{192}\x05\x5d\x2f\x02\u{192}\u{194}\x07\x30\x02\x02\u{193}\u{195}\ \x05\x5d\x2f\x02\u{194}\u{193}\x03\x02\x02\x02\u{194}\u{195}\x03\x02\x02\ \x02\u{195}\u{199}\x03\x02\x02\x02\u{196}\u{197}\x07\x30\x02\x02\u{197}\ \u{199}\x05\x5d\x2f\x02\u{198}\u{191}\x03\x02\x02\x02\u{198}\u{196}\x03\ \x02\x02\x02\u{199}\u{19b}\x03\x02\x02\x02\u{19a}\u{19c}\x05\x55\x2b\x02\ \u{19b}\u{19a}\x03\x02\x02\x02\u{19b}\u{19c}\x03\x02\x02\x02\u{19c}\u{19e}\ \x03\x02\x02\x02\u{19d}\u{19f}\x09\x05\x02\x02\u{19e}\u{19d}\x03\x02\x02\ \x02\u{19e}\u{19f}\x03\x02\x02\x02\u{19f}\u{1a9}\x03\x02\x02\x02\u{1a0}\ \u{1a6}\x05\x5d\x2f\x02\u{1a1}\u{1a3}\x05\x55\x2b\x02\u{1a2}\u{1a4}\x09\ \x05\x02\x02\u{1a3}\u{1a2}\x03\x02\x02\x02\u{1a3}\u{1a4}\x03\x02\x02\x02\ \u{1a4}\u{1a7}\x03\x02\x02\x02\u{1a5}\u{1a7}\x09\x05\x02\x02\u{1a6}\u{1a1}\ \x03\x02\x02\x02\u{1a6}\u{1a5}\x03\x02\x02\x02\u{1a7}\u{1a9}\x03\x02\x02\ \x02\u{1a8}\u{198}\x03\x02\x02\x02\u{1a8}\u{1a0}\x03\x02\x02\x02\u{1a9}\ \x52\x03\x02\x02\x02\u{1aa}\u{1ab}\x04\x32\x3b\x02\u{1ab}\x54\x03\x02\x02\ \x02\u{1ac}\u{1ae}\x09\x06\x02\x02\u{1ad}\u{1af}\x09\x07\x02\x02\u{1ae}\ \u{1ad}\x03\x02\x02\x02\u{1ae}\u{1af}\x03\x02\x02\x02\u{1af}\u{1b0}\x03\ \x02\x02\x02\u{1b0}\u{1b1}\x05\x5d\x2f\x02\u{1b1}\x56\x03\x02\x02\x02\u{1b2}\ \u{1b4}\x05\x53\x2a\x02\u{1b3}\u{1b2}\x03\x02\x02\x02\u{1b4}\u{1b5}\x03\ \x02\x02\x02\u{1b5}\u{1b3}\x03\x02\x02\x02\u{1b5}\u{1b6}\x03\x02\x02\x02\ \u{1b6}\x58\x03\x02\x02\x02\u{1b7}\u{1b8}\x07\x5e\x02\x02\u{1b8}\u{1cd}\ \x09\x08\x02\x02\u{1b9}\u{1be}\x07\x5e\x02\x02\u{1ba}\u{1bc}\x09\x09\x02\ \x02\u{1bb}\u{1ba}\x03\x02\x02\x02\u{1bb}\u{1bc}\x03\x02\x02\x02\u{1bc}\ \u{1bd}\x03\x02\x02\x02\u{1bd}\u{1bf}\x09\x0a\x02\x02\u{1be}\u{1bb}\x03\ \x02\x02\x02\u{1be}\u{1bf}\x03\x02\x02\x02\u{1bf}\u{1c0}\x03\x02\x02\x02\ \u{1c0}\u{1cd}\x09\x0a\x02\x02\u{1c1}\u{1c3}\x07\x5e\x02\x02\u{1c2}\u{1c4}\ \x07\x77\x02\x02\u{1c3}\u{1c2}\x03\x02\x02\x02\u{1c4}\u{1c5}\x03\x02\x02\ \x02\u{1c5}\u{1c3}\x03\x02\x02\x02\u{1c5}\u{1c6}\x03\x02\x02\x02\u{1c6}\ \u{1c7}\x03\x02\x02\x02\u{1c7}\u{1c8}\x05\x5b\x2e\x02\u{1c8}\u{1c9}\x05\ \x5b\x2e\x02\u{1c9}\u{1ca}\x05\x5b\x2e\x02\u{1ca}\u{1cb}\x05\x5b\x2e\x02\ \u{1cb}\u{1cd}\x03\x02\x02\x02\u{1cc}\u{1b7}\x03\x02\x02\x02\u{1cc}\u{1b9}\ \x03\x02\x02\x02\u{1cc}\u{1c1}\x03\x02\x02\x02\u{1cd}\x5a\x03\x02\x02\x02\ \u{1ce}\u{1cf}\x09\x0b\x02\x02\u{1cf}\x5c\x03\x02\x02\x02\u{1d0}\u{1d8}\ \x09\x0c\x02\x02\u{1d1}\u{1d3}\x09\x0d\x02\x02\u{1d2}\u{1d1}\x03\x02\x02\ \x02\u{1d3}\u{1d6}\x03\x02\x02\x02\u{1d4}\u{1d2}\x03\x02\x02\x02\u{1d4}\ \u{1d5}\x03\x02\x02\x02\u{1d5}\u{1d7}\x03\x02\x02\x02\u{1d6}\u{1d4}\x03\ \x02\x02\x02\u{1d7}\u{1d9}\x09\x0c\x02\x02\u{1d8}\u{1d4}\x03\x02\x02\x02\ \u{1d8}\u{1d9}\x03\x02\x02\x02\u{1d9}\u{1e8}\x03\x02\x02\x02\u{1da}\u{1dc}\ \x04\x32\x3b\x02\u{1db}\u{1da}\x03\x02\x02\x02\u{1dc}\u{1dd}\x03\x02\x02\ \x02\u{1dd}\u{1db}\x03\x02\x02\x02\u{1dd}\u{1de}\x03\x02\x02\x02\u{1de}\ \u{1e5}\x03\x02\x02\x02\u{1df}\u{1e1}\x07\x30\x02\x02\u{1e0}\u{1e2}\x04\ \x32\x3b\x02\u{1e1}\u{1e0}\x03\x02\x02\x02\u{1e2}\u{1e3}\x03\x02\x02\x02\ \u{1e3}\u{1e1}\x03\x02\x02\x02\u{1e3}\u{1e4}\x03\x02\x02\x02\u{1e4}\u{1e6}\ \x03\x02\x02\x02\u{1e5}\u{1df}\x03\x02\x02\x02\u{1e5}\u{1e6}\x03\x02\x02\ \x02\u{1e6}\u{1e8}\x03\x02\x02\x02\u{1e7}\u{1d0}\x03\x02\x02\x02\u{1e7}\ \u{1db}\x03\x02\x02\x02\u{1e8}\x5e\x03\x02\x02\x02\u{1e9}\u{1ec}\x05\x61\ \x31\x02\u{1ea}\u{1ec}\x05\x5d\x2f\x02\u{1eb}\u{1e9}\x03\x02\x02\x02\u{1eb}\ \u{1ea}\x03\x02\x02\x02\u{1ec}\x60\x03\x02\x02\x02\u{1ed}\u{1f2}\x09\x0e\ \x02\x02\u{1ee}\u{1f2}\x0a\x0f\x02\x02\u{1ef}\u{1f0}\x09\x10\x02\x02\u{1f0}\ \u{1f2}\x09\x11\x02\x02\u{1f1}\u{1ed}\x03\x02\x02\x02\u{1f1}\u{1ee}\x03\ \x02\x02\x02\u{1f1}\u{1ef}\x03\x02\x02\x02\u{1f2}\x62\x03\x02\x02\x02\u{1f3}\ \u{1f4}\x09\x12\x02\x02\u{1f4}\x64\x03\x02\x02\x02\u{1f5}\u{1f7}\x05\x63\ \x32\x02\u{1f6}\u{1f5}\x03\x02\x02\x02\u{1f7}\u{1f8}\x03\x02\x02\x02\u{1f8}\ \u{1f6}\x03\x02\x02\x02\u{1f8}\u{1f9}\x03\x02\x02\x02\u{1f9}\u{1fa}\x03\ \x02\x02\x02\u{1fa}\u{1fb}\x08\x33\x02\x02\u{1fb}\x66\x03\x02\x02\x02\u{1fc}\ \u{1fd}\x07\x0f\x02\x02\u{1fd}\u{200}\x07\x0c\x02\x02\u{1fe}\u{200}\x09\ \x13\x02\x02\u{1ff}\u{1fc}\x03\x02\x02\x02\u{1ff}\u{1fe}\x03\x02\x02\x02\ \u{200}\x68\x03\x02\x02\x02\u{201}\u{203}\x05\x67\x34\x02\u{202}\u{201}\ \x03\x02\x02\x02\u{203}\u{204}\x03\x02\x02\x02\u{204}\u{202}\x03\x02\x02\ \x02\u{204}\u{205}\x03\x02\x02\x02\u{205}\u{206}\x03\x02\x02\x02\u{206}\ \u{207}\x08\x35\x02\x02\u{207}\x6a\x03\x02\x02\x02\u{208}\u{209}\x07\x31\ \x02\x02\u{209}\u{20a}\x07\x2c\x02\x02\u{20a}\u{20e}\x03\x02\x02\x02\u{20b}\ \u{20d}\x0b\x02\x02\x02\u{20c}\u{20b}\x03\x02\x02\x02\u{20d}\u{210}\x03\ \x02\x02\x02\u{20e}\u{20f}\x03\x02\x02\x02\u{20e}\u{20c}\x03\x02\x02\x02\ \u{20f}\u{211}\x03\x02\x02\x02\u{210}\u{20e}\x03\x02\x02\x02\u{211}\u{212}\ \x07\x2c\x02\x02\u{212}\u{213}\x07\x31\x02\x02\u{213}\u{214}\x03\x02\x02\ \x02\u{214}\u{215}\x08\x36\x02\x02\u{215}\x6c\x03\x02\x02\x02\u{216}\u{217}\ \x07\x31\x02\x02\u{217}\u{218}\x07\x31\x02\x02\u{218}\u{21c}\x03\x02\x02\ \x02\u{219}\u{21b}\x0a\x13\x02\x02\u{21a}\u{219}\x03\x02\x02\x02\u{21b}\ \u{21e}\x03\x02\x02\x02\u{21c}\u{21a}\x03\x02\x02\x02\u{21c}\u{21d}\x03\ \x02\x02\x02\u{21d}\u{21f}\x03\x02\x02\x02\u{21e}\u{21c}\x03\x02\x02\x02\ \u{21f}\u{220}\x08\x37\x02\x02\u{220}\x6e\x03\x02\x02\x02\x34\x02\u{92}\ \u{9e}\u{a5}\u{af}\u{b6}\u{c4}\u{d0}\u{e2}\u{ee}\u{ff}\u{115}\u{125}\u{139}\ \u{14b}\u{150}\u{152}\u{173}\u{17c}\u{182}\u{187}\u{18a}\u{18c}\u{18f}\ \u{194}\u{198}\u{19b}\u{19e}\u{1a3}\u{1a6}\u{1a8}\u{1ae}\u{1b5}\u{1bb}\ \u{1be}\u{1c5}\u{1cc}\u{1d4}\u{1d8}\u{1dd}\u{1e3}\u{1e5}\u{1e7}\u{1eb}\ \u{1f1}\u{1f8}\u{1ff}\u{204}\u{20e}\u{21c}\x03\x08\x02\x02";
use crate::command::Command; use crate::direction::Direction; use crate::point::Point; use crate::snake::Snake; use crossterm::cursor::{Hide, MoveTo, Show}; use crossterm::event::{poll, read, Event, KeyCode, KeyEvent, KeyModifiers}; use crossterm::style::{Color, Print, ResetColor, SetForegroundColor}; use crossterm::terminal::{disable_raw_mode, enable_raw_mode, size, Clear, ClearType, SetSize}; use crossterm::ExecutableCommand; use rand::Rng; use std::io::Stdout; use std::time::{Duration, Instant}; const MAX_INTERVAL: u16 = 700; const MIN_INTERVAL: u16 = 200; const MAX_SPEED: u16 = 20; #[derive(Debug)] pub struct Game { stdout: Stdout, original_terminal_size: (u16, u16), width: u16, height: u16, food: Option<Point>, snake: Snake, speed: u16, score: u16, } impl Game { pub fn new(stdout: Stdout, width: u16, height: u16) -> Self { let original_terminal_size: (u16, u16) = size().unwrap(); Self { stdout, original_terminal_size, width, height, food: None, snake: Snake::new( Point::new(width / 2, height / 2), 3, match rand::thread_rng().gen_range(0, 4) { 0 => Direction::Up, 1 => Direction::Right, 2 => Direction::Down, _ => Direction::Left, }, ), speed: 0, score: 0, } } pub fn run(&mut self) { self.place_food(); self.prepare_ui(); self.render(); let mut done = false; while !done { let interval = self.calculate_interval(); let direction = self.snake.get_direction(); let now = Instant::now(); while now.elapsed() < interval { if let Some(command) = self.get_command(interval - now.elapsed()) { match command { Command::Quit => { done = true; break; } Command::Turn(towards) => { if direction != towards && direction.opposite() != towards { self.snake.set_direction(towards); } } } } } if self.has_collided_with_wall() || self.has_bitten_itself() { done = true; } else { self.snake.slither(); if let Some(food_point) = self.food { if self.snake.get_head_point() == food_point { self.snake.grow(); self.place_food(); self.score += 1; if self.score % ((self.width * self.height) / MAX_SPEED) == 0 { self.speed += 1; } } } self.render(); } } self.restore_ui(); println!("Game Over! Your score is {}", self.score); } pub fn place_food(&mut self) { loop { let random_x = rand::thread_rng().gen_range(0, self.width); let random_y = rand::thread_rng().gen_range(0, self.height); let point = Point::new(random_x, random_y); if !self.snake.contains_points(&point) { self.food = Some(point); break; } } } pub fn prepare_ui(&mut self) { enable_raw_mode().unwrap(); self.stdout .execute(SetSize(self.width + 3, self.height + 3)) .unwrap() .execute(Clear(ClearType::All)) .unwrap() .execute(Hide) .unwrap(); } fn render(&mut self) { self.draw_borders(); self.draw_background(); self.draw_food(); self.draw_snake(); } fn draw_borders(&mut self) { self.stdout .execute(SetForegroundColor(Color::DarkGrey)) .unwrap(); for y in 0..self.height + 2 { self.stdout .execute(MoveTo(0, y)) .unwrap() .execute(Print("#")) .unwrap() .execute(MoveTo(self.width + 1, y)) .unwrap() .execute(Print("#")) .unwrap(); } for x in 0..self.width + 2 { self.stdout .execute(MoveTo(x, 0)) .unwrap() .execute(Print("#")) .unwrap() .execute(MoveTo(x, self.height + 1)) .unwrap() .execute(Print("#")) .unwrap(); } self.stdout .execute(MoveTo(0, 0)) .unwrap() .execute(Print("#")) .unwrap() .execute(MoveTo(self.width + 1, self.height + 1)) .unwrap() .execute(Print("#")) .unwrap() .execute(MoveTo(self.width + 1, 0)) .unwrap() .execute(Print("#")) .unwrap() .execute(MoveTo(0, self.height + 1)) .unwrap() .execute(Print("#")) .unwrap(); } fn draw_background(&mut self) { self.stdout.execute(ResetColor).unwrap(); for y in 1..self.height + 1 { for x in 1..self.width + 1 { self.stdout .execute(MoveTo(x, y)) .unwrap() .execute(Print(" ")) .unwrap(); } } } fn draw_food(&mut self) { self.stdout .execute(SetForegroundColor(Color::White)) .unwrap(); for food in self.food.iter() { self.stdout .execute(MoveTo(food.x + 1, food.y + 1)) .unwrap() .execute(Print("•")) .unwrap(); } } fn draw_snake(&mut self) { let fg = SetForegroundColor(match self.speed % 3 { 0 => Color::Green, 1 => Color::Cyan, _ => Color::Yellow, }); self.stdout.execute(fg).unwrap(); let body_points = self.snake.get_body_points(); for (i, body) in body_points.iter().enumerate() { let previous = if i == 0 { None } else { body_points.get(i - 1) }; let next = body_points.get(i + 1); let symbol = if let Some(&next) = next { if let Some(&previous) = previous { if previous.x == next.x { '║' } else if previous.y == next.y { '═' } else { let d = body.transform(Direction::Down, 1); let r = body.transform(Direction::Right, 1); let u = if body.y == 0 { *body } else { body.transform(Direction::Up, 1) }; let l = if body.x == 0 { *body } else { body.transform(Direction::Left, 1) }; if (next == d && previous == r) || (previous == d && next == r) { '╔' } else if (next == d && previous == l) || (previous == d && next == l) { '╗' } else if (next == u && previous == r) || (previous == u && next == r) { '╚' } else { '╝' } } } else { 'O' } } else if let Some(&previous) = previous { if body.y == previous.y { '═' } else { '║' } } else { panic!("Invalid snake body point."); }; self.stdout .execute(MoveTo(body.x + 1, body.y + 1)) .unwrap() .execute(Print(symbol)) .unwrap(); } } fn calculate_interval(&self) -> Duration { let speed = MAX_SPEED - self.speed; Duration::from_millis( (MIN_INTERVAL + (((MAX_INTERVAL - MIN_INTERVAL) / MAX_SPEED) * speed)) as u64, ) } fn get_command(&self, wait_for: Duration) -> Option<Command> { let key_event = self.wait_for_key_event(wait_for)?; match key_event.code { KeyCode::Char('q') | KeyCode::Char('Q') | KeyCode::Esc => Some(Command::Quit), KeyCode::Char('c') | KeyCode::Char('C') => { if key_event.modifiers == KeyModifiers::CONTROL { Some(Command::Quit) } else { None } } KeyCode::Up => Some(Command::Turn(Direction::Up)), KeyCode::Right => Some(Command::Turn(Direction::Right)), KeyCode::Down => Some(Command::Turn(Direction::Down)), KeyCode::Left => Some(Command::Turn(Direction::Left)), _ => None, } } fn wait_for_key_event(&self, wait_for: Duration) -> Option<KeyEvent> { if poll(wait_for).ok()? { let event = read().ok()?; if let Event::Key(key_event) = event { return Some(key_event); } } None } fn has_collided_with_wall(&self) -> bool { let head_point = self.snake.get_head_point(); match self.snake.get_direction() { Direction::Up => head_point.y == 0, Direction::Right => head_point.x == self.width - 1, Direction::Down => head_point.y == self.height - 1, Direction::Left => head_point.x == 0, } } fn has_bitten_itself(&self) -> bool { let next_head_point = self .snake .get_head_point() .transform(self.snake.get_direction(), 1); let mut next_body_points = self.snake.get_body_points(); next_body_points.remove(next_body_points.len() - 1); next_body_points.remove(0); next_body_points.contains(&next_head_point) } fn restore_ui(&mut self) { let (cols, rows) = self.original_terminal_size; self.stdout .execute(SetSize(cols, rows)) .unwrap() .execute(Clear(ClearType::All)) .unwrap() .execute(Show) .unwrap() .execute(ResetColor) .unwrap(); disable_raw_mode().unwrap(); } }
extern crate ansi_term; extern crate cargo_license; extern crate getopts; use std::env; use getopts::Options; use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::btree_map::Entry::*; use ansi_term::Colour::Green; fn print_full_licenses(dependencies: Vec<cargo_license::Dependency>) { for dependency in dependencies { println!("{}:", dependency.name); if let Some(licenses) = dependency.get_license_text() { for license in licenses { println!("{}", license); } } else { let license = dependency.get_license().unwrap_or("N/A".to_owned()); println!("Could not find license file. License(s) specified in crate: {}", license) } } } fn group_by_license_type(dependencies: Vec<cargo_license::Dependency>, display_authors: bool) { let mut table: BTreeMap<String, Vec<cargo_license::Dependency>> = BTreeMap::new(); for dependency in dependencies { let license = dependency.get_license().unwrap_or("N/A".to_owned()); match table.entry(license) { Vacant(e) => { e.insert(vec![dependency]); } Occupied(mut e) => { e.get_mut().push(dependency); } }; } for (license, crates) in table { let crate_names = crates.iter().map(|c| c.name.clone()).collect::<Vec<_>>(); if display_authors { let crate_authors = crates .iter() .flat_map(|c| c.get_authors().unwrap_or(vec![])) .collect::<BTreeSet<_>>(); println!( "{} ({})\n{}\n{} {}", Green.bold().paint(license), crates.len(), crate_names.join(", "), Green.paint("by"), crate_authors.into_iter().collect::<Vec<_>>().join(", ") ); } else { println!( "{} ({}): {}", Green.bold().paint(license), crates.len(), crate_names.join(", ") ); } } } fn one_license_per_line(dependencies: Vec<cargo_license::Dependency>, display_authors: bool) { for dependency in dependencies { let name = dependency.name.clone(); let version = dependency.version.clone(); let license = dependency.get_license().unwrap_or("N/A".to_owned()); let source = dependency.source.clone(); if display_authors { let authors = dependency.get_authors().unwrap_or(vec![]); println!( "{}: {}, \"{}\", {}, {} \"{}\"", Green.bold().paint(name), version, license, source, Green.paint("by"), authors.into_iter().collect::<Vec<_>>().join(", ") ); } else { println!( "{}: {}, \"{}\", {}", Green.bold().paint(name), version, license, source ); } } } fn print_usage(program: &str, opts: Options) { let brief = format!("Usage: {} [options]", program); print!("{}", opts.usage(&brief)); } fn main() { let args: Vec<String> = env::args().collect(); let mut opts = Options::new(); let program = args[0].clone(); opts.optflag("a", "authors", "Display crate authors"); opts.optflag("d", "do-not-bundle", "Output one license per line."); opts.optflag("f", "full", "Display full licenses."); opts.optflag("h", "help", "print this help menu"); let matches = match opts.parse(&args[1..]) { Ok(m) => m, Err(f) => { print_usage(&program, opts); panic!(f.to_string()) } }; if matches.opt_present("h") { print_usage(&program, opts); return; } let display_authors = matches.opt_present("authors"); let do_not_bundle = matches.opt_present("do-not-bundle"); let display_full_licenses = matches.opt_present("full"); let dependencies = match cargo_license::get_dependencies_from_cargo_lock() { Ok(m) => m, Err(err) => { println!( "Cargo.lock file not found. Try building the project first.\n{}", err ); std::process::exit(1); } }; if display_full_licenses { print_full_licenses(dependencies); } else if do_not_bundle { one_license_per_line(dependencies, display_authors); } else { group_by_license_type(dependencies, display_authors); } }
/// Converts this into a synchronized version. /// /// See also `IntoUnSyncView`, `IntoUnSync` and `UnSyncRef`. pub trait IntoSync { type Target; /// Converts this into a synchronized version. /// /// It's cheap if this is already backed by a synchronized implementation (or if it's just a /// view). See also `IntoUnSyncView` / `IntoUnSync`. If it's not /// backed by a synchronized implementation, this operation might be expensive: for instance /// if you apply this operation on a reference-counted binary that's not synchronized and has /// multiple references pointing to it, the data of the binary must be cloned. /// /// ```rust /// use abin::{NewBin, Bin, BinFactory, SBin, IntoSync, NewSBin, AnyBin}; /// /// let string = "this is the content of this binary"; /// let not_sync : Bin = NewBin::copy_from_slice(string.as_bytes()); /// // this line 'converts' (not just a view) the binary into a sync binary (after that call /// // the reference-counter is synchronized). /// let sync_1 : SBin = not_sync.into_sync(); /// // this is the direct way to construct a synchronized binary. /// // sync_1 and sync_2 are equivalent. /// let sync_2 : SBin = NewSBin::copy_from_slice(string.as_bytes()); /// assert_eq!(string.as_bytes(), sync_1.as_slice()); /// assert_eq!(sync_1, sync_2); /// ``` fn into_sync(self) -> Self::Target; }
//! Conversion trait implementations for instructions. use super::{insns::*, Insn}; impl From<ClsInsn> for Insn { fn from(insn: ClsInsn) -> Self { Self::Cls(insn) } } impl From<RetInsn> for Insn { fn from(insn: RetInsn) -> Self { Self::Ret(insn) } } impl From<SysInsn> for Insn { fn from(insn: SysInsn) -> Self { Self::Sys(insn) } } impl From<JpInsn> for Insn { fn from(insn: JpInsn) -> Self { Self::Jp(insn) } } impl From<CallInsn> for Insn { fn from(insn: CallInsn) -> Self { Self::Call(insn) } } impl From<SeInsn> for Insn { fn from(insn: SeInsn) -> Self { Self::Se(insn) } } impl From<SneInsn> for Insn { fn from(insn: SneInsn) -> Self { Self::SNe(insn) } } impl From<LdInsn> for Insn { fn from(insn: LdInsn) -> Self { Self::Ld(insn) } } impl From<AddInsn> for Insn { fn from(insn: AddInsn) -> Self { Self::Add(insn) } } impl From<OrInsn> for Insn { fn from(insn: OrInsn) -> Self { Self::Or(insn) } } impl From<AndInsn> for Insn { fn from(insn: AndInsn) -> Self { Self::And(insn) } } impl From<XorInsn> for Insn { fn from(insn: XorInsn) -> Self { Self::Xor(insn) } } impl From<SubInsn> for Insn { fn from(insn: SubInsn) -> Self { Self::Sub(insn) } } impl From<ShrInsn> for Insn { fn from(insn: ShrInsn) -> Self { Self::Shr(insn) } } impl From<SubNInsn> for Insn { fn from(insn: SubNInsn) -> Self { Self::SubN(insn) } } impl From<ShlInsn> for Insn { fn from(insn: ShlInsn) -> Self { Self::Shl(insn) } } impl From<RndInsn> for Insn { fn from(insn: RndInsn) -> Self { Self::Rnd(insn) } } impl From<DrwInsn> for Insn { fn from(insn: DrwInsn) -> Self { Self::Drw(insn) } } impl From<SkpInsn> for Insn { fn from(insn: SkpInsn) -> Self { Self::Skp(insn) } } impl From<SkpNpInsn> for Insn { fn from(insn: SkpNpInsn) -> Self { Self::SkpNp(insn) } }
#![no_std] #![feature(generic_associated_types)] #![feature(asm)] #![feature(min_type_alias_impl_trait)] #![feature(impl_trait_in_bindings)] #![feature(type_alias_impl_trait)] #![allow(incomplete_features)] #[cfg(not(any( feature = "nrf51", feature = "nrf52805", feature = "nrf52810", feature = "nrf52811", feature = "nrf52820", feature = "nrf52832", feature = "nrf52833", feature = "nrf52840", feature = "nrf5340-app", feature = "nrf5340-net", feature = "nrf9160", )))] compile_error!("No chip feature activated. You must activate exactly one of the following features: nrf52810, nrf52811, nrf52832, nrf52833, nrf52840"); // This mod MUST go first, so that the others see its macros. pub(crate) mod fmt; pub(crate) mod util; pub mod buffered_uarte; pub mod gpio; pub mod gpiote; pub mod ppi; #[cfg(not(any(feature = "nrf52805", feature = "nrf52820")))] pub mod pwm; #[cfg(feature = "nrf52840")] pub mod qspi; pub mod rtc; #[cfg(not(feature = "nrf52820"))] pub mod saadc; pub mod spim; pub mod timer; pub mod twim; pub mod uarte; // This mod MUST go last, so that it sees all the `impl_foo!` macros #[cfg(feature = "nrf52805")] #[path = "chips/nrf52805.rs"] mod chip; #[cfg(feature = "nrf52810")] #[path = "chips/nrf52810.rs"] mod chip; #[cfg(feature = "nrf52811")] #[path = "chips/nrf52811.rs"] mod chip; #[cfg(feature = "nrf52820")] #[path = "chips/nrf52820.rs"] mod chip; #[cfg(feature = "nrf52832")] #[path = "chips/nrf52832.rs"] mod chip; #[cfg(feature = "nrf52833")] #[path = "chips/nrf52833.rs"] mod chip; #[cfg(feature = "nrf52840")] #[path = "chips/nrf52840.rs"] mod chip; pub(crate) use chip::pac; pub use chip::{peripherals, Peripherals}; pub mod interrupt { pub use crate::chip::irqs::*; pub use cortex_m::interrupt::{CriticalSection, Mutex}; pub use embassy::interrupt::{declare, take, Interrupt}; pub use embassy_extras::interrupt::Priority3 as Priority; } pub use embassy_macros::interrupt; pub mod config { pub enum HfclkSource { Internal, ExternalXtal, } pub enum LfclkSource { InternalRC, Synthesized, ExternalXtal, ExternalLowSwing, ExternalFullSwing, } #[non_exhaustive] pub struct Config { pub hfclk_source: HfclkSource, pub lfclk_source: LfclkSource, pub gpiote_interrupt_priority: crate::interrupt::Priority, } impl Default for Config { fn default() -> Self { Self { // There are hobby nrf52 boards out there without external XTALs... // Default everything to internal so it Just Works. User can enable external // xtals if they know they have them. hfclk_source: HfclkSource::Internal, lfclk_source: LfclkSource::InternalRC, gpiote_interrupt_priority: crate::interrupt::Priority::P0, } } } } pub fn init(config: config::Config) -> Peripherals { // Do this first, so that it panics if user is calling `init` a second time // before doing anything important. let peripherals = Peripherals::take(); let r = unsafe { &*pac::CLOCK::ptr() }; // Start HFCLK. match config.hfclk_source { config::HfclkSource::Internal => {} config::HfclkSource::ExternalXtal => { // Datasheet says this is likely to take 0.36ms r.events_hfclkstarted.write(|w| unsafe { w.bits(0) }); r.tasks_hfclkstart.write(|w| unsafe { w.bits(1) }); while r.events_hfclkstarted.read().bits() == 0 {} } } // Configure LFCLK. match config.lfclk_source { config::LfclkSource::InternalRC => r.lfclksrc.write(|w| w.src().rc()), config::LfclkSource::Synthesized => r.lfclksrc.write(|w| w.src().synth()), config::LfclkSource::ExternalXtal => r.lfclksrc.write(|w| w.src().xtal()), config::LfclkSource::ExternalLowSwing => r.lfclksrc.write(|w| { w.src().xtal(); w.external().enabled(); w.bypass().disabled(); w }), config::LfclkSource::ExternalFullSwing => r.lfclksrc.write(|w| { w.src().xtal(); w.external().enabled(); w.bypass().enabled(); w }), } // Start LFCLK. // Datasheet says this could take 100us from synth source // 600us from rc source, 0.25s from an external source. r.events_lfclkstarted.write(|w| unsafe { w.bits(0) }); r.tasks_lfclkstart.write(|w| unsafe { w.bits(1) }); while r.events_lfclkstarted.read().bits() == 0 {} // Init GPIOTE crate::gpiote::init(config.gpiote_interrupt_priority); peripherals }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - QUADSPI control register"] pub quadspi_cr: QUADSPI_CR, #[doc = "0x04 - QUADSPI device configuration register"] pub quadspi_dcr: QUADSPI_DCR, #[doc = "0x08 - QUADSPI status register"] pub quadspi_sr: QUADSPI_SR, #[doc = "0x0c - QUADSPI flag clear register"] pub quadspi_fcr: QUADSPI_FCR, #[doc = "0x10 - QUADSPI data length register"] pub quadspi_dlr: QUADSPI_DLR, #[doc = "0x14 - QUADSPI communication configuration register"] pub quadspi_ccr: QUADSPI_CCR, #[doc = "0x18 - QUADSPI address register"] pub quadspi_ar: QUADSPI_AR, #[doc = "0x1c - QUADSPI alternate bytes registers"] pub quadspi_abr: QUADSPI_ABR, #[doc = "0x20 - QUADSPI data register"] pub quadspi_dr: QUADSPI_DR, #[doc = "0x24 - QUADSPI polling status mask register"] pub quadspi_psmkr: QUADSPI_PSMKR, #[doc = "0x28 - QUADSPI polling status match register"] pub quadspi_psmar: QUADSPI_PSMAR, #[doc = "0x2c - QUADSPI polling interval register"] pub quadspi_pir: QUADSPI_PIR, #[doc = "0x30 - QUADSPI low-power timeout register"] pub quadspi_lptr: QUADSPI_LPTR, _reserved13: [u8; 0x03bc], #[doc = "0x3f0 - QUADSPI HW configuration register"] pub quadspi_hwcfgr: QUADSPI_HWCFGR, #[doc = "0x3f4 - QUADSPI version register"] pub quadspi_verr: QUADSPI_VERR, #[doc = "0x3f8 - QUADSPI identification register"] pub quadspi_ipidr: QUADSPI_IPIDR, #[doc = "0x3fc - QUADSPI size identification register"] pub quadspi_sidr: QUADSPI_SIDR, } #[doc = "QUADSPI_CR (rw) register accessor: QUADSPI control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_cr`] module"] pub type QUADSPI_CR = crate::Reg<quadspi_cr::QUADSPI_CR_SPEC>; #[doc = "QUADSPI control register"] pub mod quadspi_cr; #[doc = "QUADSPI_DCR (rw) register accessor: QUADSPI device configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_dcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_dcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_dcr`] module"] pub type QUADSPI_DCR = crate::Reg<quadspi_dcr::QUADSPI_DCR_SPEC>; #[doc = "QUADSPI device configuration register"] pub mod quadspi_dcr; #[doc = "QUADSPI_SR (r) register accessor: QUADSPI status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_sr`] module"] pub type QUADSPI_SR = crate::Reg<quadspi_sr::QUADSPI_SR_SPEC>; #[doc = "QUADSPI status register"] pub mod quadspi_sr; #[doc = "QUADSPI_FCR (w) register accessor: QUADSPI flag clear register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_fcr::W`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_fcr`] module"] pub type QUADSPI_FCR = crate::Reg<quadspi_fcr::QUADSPI_FCR_SPEC>; #[doc = "QUADSPI flag clear register"] pub mod quadspi_fcr; #[doc = "QUADSPI_DLR (rw) register accessor: QUADSPI data length register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_dlr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_dlr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_dlr`] module"] pub type QUADSPI_DLR = crate::Reg<quadspi_dlr::QUADSPI_DLR_SPEC>; #[doc = "QUADSPI data length register"] pub mod quadspi_dlr; #[doc = "QUADSPI_CCR (rw) register accessor: QUADSPI communication configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_ccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_ccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_ccr`] module"] pub type QUADSPI_CCR = crate::Reg<quadspi_ccr::QUADSPI_CCR_SPEC>; #[doc = "QUADSPI communication configuration register"] pub mod quadspi_ccr; #[doc = "QUADSPI_AR (rw) register accessor: QUADSPI address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_ar`] module"] pub type QUADSPI_AR = crate::Reg<quadspi_ar::QUADSPI_AR_SPEC>; #[doc = "QUADSPI address register"] pub mod quadspi_ar; #[doc = "QUADSPI_ABR (rw) register accessor: QUADSPI alternate bytes registers\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_abr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_abr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_abr`] module"] pub type QUADSPI_ABR = crate::Reg<quadspi_abr::QUADSPI_ABR_SPEC>; #[doc = "QUADSPI alternate bytes registers"] pub mod quadspi_abr; #[doc = "QUADSPI_DR (rw) register accessor: QUADSPI data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_dr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_dr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_dr`] module"] pub type QUADSPI_DR = crate::Reg<quadspi_dr::QUADSPI_DR_SPEC>; #[doc = "QUADSPI data register"] pub mod quadspi_dr; #[doc = "QUADSPI_PSMKR (rw) register accessor: QUADSPI polling status mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_psmkr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_psmkr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_psmkr`] module"] pub type QUADSPI_PSMKR = crate::Reg<quadspi_psmkr::QUADSPI_PSMKR_SPEC>; #[doc = "QUADSPI polling status mask register"] pub mod quadspi_psmkr; #[doc = "QUADSPI_PSMAR (rw) register accessor: QUADSPI polling status match register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_psmar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_psmar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_psmar`] module"] pub type QUADSPI_PSMAR = crate::Reg<quadspi_psmar::QUADSPI_PSMAR_SPEC>; #[doc = "QUADSPI polling status match register"] pub mod quadspi_psmar; #[doc = "QUADSPI_PIR (rw) register accessor: QUADSPI polling interval register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_pir::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_pir::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_pir`] module"] pub type QUADSPI_PIR = crate::Reg<quadspi_pir::QUADSPI_PIR_SPEC>; #[doc = "QUADSPI polling interval register"] pub mod quadspi_pir; #[doc = "QUADSPI_LPTR (rw) register accessor: QUADSPI low-power timeout register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_lptr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_lptr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_lptr`] module"] pub type QUADSPI_LPTR = crate::Reg<quadspi_lptr::QUADSPI_LPTR_SPEC>; #[doc = "QUADSPI low-power timeout register"] pub mod quadspi_lptr; #[doc = "QUADSPI_HWCFGR (r) register accessor: QUADSPI HW configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_hwcfgr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_hwcfgr`] module"] pub type QUADSPI_HWCFGR = crate::Reg<quadspi_hwcfgr::QUADSPI_HWCFGR_SPEC>; #[doc = "QUADSPI HW configuration register"] pub mod quadspi_hwcfgr; #[doc = "QUADSPI_VERR (r) register accessor: QUADSPI version register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_verr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_verr`] module"] pub type QUADSPI_VERR = crate::Reg<quadspi_verr::QUADSPI_VERR_SPEC>; #[doc = "QUADSPI version register"] pub mod quadspi_verr; #[doc = "QUADSPI_IPIDR (r) register accessor: QUADSPI identification register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_ipidr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_ipidr`] module"] pub type QUADSPI_IPIDR = crate::Reg<quadspi_ipidr::QUADSPI_IPIDR_SPEC>; #[doc = "QUADSPI identification register"] pub mod quadspi_ipidr; #[doc = "QUADSPI_SIDR (r) register accessor: QUADSPI size identification register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_sidr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`quadspi_sidr`] module"] pub type QUADSPI_SIDR = crate::Reg<quadspi_sidr::QUADSPI_SIDR_SPEC>; #[doc = "QUADSPI size identification register"] pub mod quadspi_sidr;
// This file is part of Substrate. // Copyright (C) 2017-2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Proving state machine backend. use crate::trie_backend::TrieBackend; use crate::trie_backend_essence::{Ephemeral, TrieBackendEssence, TrieBackendStorage}; use crate::{Backend, DBValue, Error, ExecutionError}; use codec::{Codec, Decode}; use hash_db::{HashDB, Hasher, Prefix, EMPTY_PREFIX}; use log::debug; use parking_lot::RwLock; use sp_core::storage::ChildInfo; use sp_trie::{ empty_child_trie_root, read_child_trie_value_with, read_trie_value_with, record_all_keys, MemoryDB, StorageProof, }; pub use sp_trie::{ trie_types::{Layout, TrieError}, Recorder, }; use std::{collections::HashMap, sync::Arc}; /// Patricia trie-based backend specialized in get value proofs. pub struct ProvingBackendRecorder<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher> { pub(crate) backend: &'a TrieBackendEssence<S, H>, pub(crate) proof_recorder: &'a mut Recorder<H::Out>, } impl<'a, S, H> ProvingBackendRecorder<'a, S, H> where S: TrieBackendStorage<H>, H: Hasher, H::Out: Codec, { /// Produce proof for a key query. pub fn storage(&mut self, key: &[u8]) -> Result<Option<Vec<u8>>, String> { let mut read_overlay = S::Overlay::default(); let eph = Ephemeral::new(self.backend.backend_storage(), &mut read_overlay); let map_e = |e| format!("Trie lookup error: {}", e); read_trie_value_with::<Layout<H>, _, Ephemeral<S, H>>( &eph, self.backend.root(), key, &mut *self.proof_recorder, ) .map_err(map_e) } /// Produce proof for a child key query. pub fn child_storage( &mut self, child_info: &ChildInfo, key: &[u8], ) -> Result<Option<Vec<u8>>, String> { let storage_key = child_info.storage_key(); let root = self .storage(storage_key)? .and_then(|r| Decode::decode(&mut &r[..]).ok()) .unwrap_or_else(|| empty_child_trie_root::<Layout<H>>()); let mut read_overlay = S::Overlay::default(); let eph = Ephemeral::new(self.backend.backend_storage(), &mut read_overlay); let map_e = |e| format!("Trie lookup error: {}", e); read_child_trie_value_with::<Layout<H>, _, _>( child_info.keyspace(), &eph, &root.as_ref(), key, &mut *self.proof_recorder, ) .map_err(map_e) } /// Produce proof for the whole backend. pub fn record_all_keys(&mut self) { let mut read_overlay = S::Overlay::default(); let eph = Ephemeral::new(self.backend.backend_storage(), &mut read_overlay); let mut iter = move || -> Result<(), Box<TrieError<H::Out>>> { let root = self.backend.root(); record_all_keys::<Layout<H>, _>(&eph, root, &mut *self.proof_recorder) }; if let Err(e) = iter() { debug!(target: "trie", "Error while recording all keys: {}", e); } } } /// Global proof recorder, act as a layer over a hash db for recording queried /// data. pub type ProofRecorder<H> = Arc<RwLock<HashMap<<H as Hasher>::Out, Option<DBValue>>>>; /// Patricia trie-based backend which also tracks all touched storage trie values. /// These can be sent to remote node and used as a proof of execution. pub struct ProvingBackend<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher>( TrieBackend<ProofRecorderBackend<'a, S, H>, H>, ); /// Trie backend storage with its proof recorder. pub struct ProofRecorderBackend<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher> { backend: &'a S, proof_recorder: ProofRecorder<H>, } impl<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher> ProvingBackend<'a, S, H> where H::Out: Codec, { /// Create new proving backend. pub fn new(backend: &'a TrieBackend<S, H>) -> Self { let proof_recorder = Default::default(); Self::new_with_recorder(backend, proof_recorder) } /// Create new proving backend with the given recorder. pub fn new_with_recorder( backend: &'a TrieBackend<S, H>, proof_recorder: ProofRecorder<H>, ) -> Self { let essence = backend.essence(); let root = essence.root().clone(); let recorder = ProofRecorderBackend { backend: essence.backend_storage(), proof_recorder }; ProvingBackend(TrieBackend::new(recorder, root)) } /// Extracting the gathered unordered proof. pub fn extract_proof(&self) -> StorageProof { let trie_nodes = self .0 .essence() .backend_storage() .proof_recorder .read() .iter() .filter_map(|(_k, v)| v.as_ref().map(|v| v.to_vec())) .collect(); StorageProof::new(trie_nodes) } } impl<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher> TrieBackendStorage<H> for ProofRecorderBackend<'a, S, H> { type Overlay = S::Overlay; fn get(&self, key: &H::Out, prefix: Prefix) -> Result<Option<DBValue>, String> { if let Some(v) = self.proof_recorder.read().get(key) { return Ok(v.clone()) } let backend_value = self.backend.get(key, prefix)?; self.proof_recorder.write().insert(key.clone(), backend_value.clone()); Ok(backend_value) } } impl<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher> std::fmt::Debug for ProvingBackend<'a, S, H> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "ProvingBackend") } } impl<'a, S, H> Backend<H> for ProvingBackend<'a, S, H> where S: 'a + TrieBackendStorage<H>, H: 'a + Hasher, H::Out: Ord + Codec, { type Error = String; type Transaction = S::Overlay; type TrieBackendStorage = S; fn storage(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error> { self.0.storage(key) } fn child_storage( &self, child_info: &ChildInfo, key: &[u8], ) -> Result<Option<Vec<u8>>, Self::Error> { self.0.child_storage(child_info, key) } fn for_keys_in_child_storage<F: FnMut(&[u8])>(&self, child_info: &ChildInfo, f: F) { self.0.for_keys_in_child_storage(child_info, f) } fn next_storage_key(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error> { self.0.next_storage_key(key) } fn next_child_storage_key( &self, child_info: &ChildInfo, key: &[u8], ) -> Result<Option<Vec<u8>>, Self::Error> { self.0.next_child_storage_key(child_info, key) } fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F) { self.0.for_keys_with_prefix(prefix, f) } fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], f: F) { self.0.for_key_values_with_prefix(prefix, f) } fn for_child_keys_with_prefix<F: FnMut(&[u8])>( &self, child_info: &ChildInfo, prefix: &[u8], f: F, ) { self.0.for_child_keys_with_prefix(child_info, prefix, f) } fn pairs(&self) -> Vec<(Vec<u8>, Vec<u8>)> { self.0.pairs() } fn keys(&self, prefix: &[u8]) -> Vec<Vec<u8>> { self.0.keys(prefix) } fn child_keys(&self, child_info: &ChildInfo, prefix: &[u8]) -> Vec<Vec<u8>> { self.0.child_keys(child_info, prefix) } fn storage_root<'b>( &self, delta: impl Iterator<Item = (&'b [u8], Option<&'b [u8]>)>, ) -> (H::Out, Self::Transaction) where H::Out: Ord, { self.0.storage_root(delta) } fn child_storage_root<'b>( &self, child_info: &ChildInfo, delta: impl Iterator<Item = (&'b [u8], Option<&'b [u8]>)>, ) -> (H::Out, bool, Self::Transaction) where H::Out: Ord, { self.0.child_storage_root(child_info, delta) } fn register_overlay_stats(&mut self, _stats: &crate::stats::StateMachineStats) {} fn usage_info(&self) -> crate::stats::UsageInfo { self.0.usage_info() } } /// Create proof check backend. pub fn create_proof_check_backend<H>( root: H::Out, proof: StorageProof, ) -> Result<TrieBackend<MemoryDB<H>, H>, Box<dyn Error>> where H: Hasher, H::Out: Codec, { let db = proof.into_memory_db(); if db.contains(&root, EMPTY_PREFIX) { Ok(TrieBackend::new(db, root)) } else { Err(Box::new(ExecutionError::InvalidProof)) } } #[cfg(test)] mod tests { use super::*; use crate::proving_backend::create_proof_check_backend; use crate::trie_backend::tests::test_trie; use crate::InMemoryBackend; use sp_runtime::traits::BlakeTwo256; use sp_trie::PrefixedMemoryDB; fn test_proving<'a>( trie_backend: &'a TrieBackend<PrefixedMemoryDB<BlakeTwo256>, BlakeTwo256>, ) -> ProvingBackend<'a, PrefixedMemoryDB<BlakeTwo256>, BlakeTwo256> { ProvingBackend::new(trie_backend) } #[test] fn proof_is_empty_until_value_is_read() { let trie_backend = test_trie(); assert!(test_proving(&trie_backend).extract_proof().is_empty()); } #[test] fn proof_is_non_empty_after_value_is_read() { let trie_backend = test_trie(); let backend = test_proving(&trie_backend); assert_eq!(backend.storage(b"key").unwrap(), Some(b"value".to_vec())); assert!(!backend.extract_proof().is_empty()); } #[test] fn proof_is_invalid_when_does_not_contains_root() { use sp_core::H256; let result = create_proof_check_backend::<BlakeTwo256>( H256::from_low_u64_be(1), StorageProof::empty(), ); assert!(result.is_err()); } #[test] fn passes_through_backend_calls() { let trie_backend = test_trie(); let proving_backend = test_proving(&trie_backend); assert_eq!(trie_backend.storage(b"key").unwrap(), proving_backend.storage(b"key").unwrap()); assert_eq!(trie_backend.pairs(), proving_backend.pairs()); let (trie_root, mut trie_mdb) = trie_backend.storage_root(::std::iter::empty()); let (proving_root, mut proving_mdb) = proving_backend.storage_root(::std::iter::empty()); assert_eq!(trie_root, proving_root); assert_eq!(trie_mdb.drain(), proving_mdb.drain()); } #[test] fn proof_recorded_and_checked() { let contents = (0..64).map(|i| (vec![i], Some(vec![i]))).collect::<Vec<_>>(); let in_memory = InMemoryBackend::<BlakeTwo256>::default(); let mut in_memory = in_memory.update(vec![(None, contents)]); let in_memory_root = in_memory.storage_root(::std::iter::empty()).0; (0..64).for_each(|i| assert_eq!(in_memory.storage(&[i]).unwrap().unwrap(), vec![i])); let trie = in_memory.as_trie_backend().unwrap(); let trie_root = trie.storage_root(::std::iter::empty()).0; assert_eq!(in_memory_root, trie_root); (0..64).for_each(|i| assert_eq!(trie.storage(&[i]).unwrap().unwrap(), vec![i])); let proving = ProvingBackend::new(trie); assert_eq!(proving.storage(&[42]).unwrap().unwrap(), vec![42]); let proof = proving.extract_proof(); let proof_check = create_proof_check_backend::<BlakeTwo256>(in_memory_root.into(), proof).unwrap(); assert_eq!(proof_check.storage(&[42]).unwrap().unwrap(), vec![42]); } #[test] fn proof_recorded_and_checked_with_child() { let child_info_1 = ChildInfo::new_default(b"sub1"); let child_info_2 = ChildInfo::new_default(b"sub2"); let child_info_1 = &child_info_1; let child_info_2 = &child_info_2; let contents = vec![ (None, (0..64).map(|i| (vec![i], Some(vec![i]))).collect()), (Some(child_info_1.clone()), (28..65).map(|i| (vec![i], Some(vec![i]))).collect()), (Some(child_info_2.clone()), (10..15).map(|i| (vec![i], Some(vec![i]))).collect()), ]; let in_memory = InMemoryBackend::<BlakeTwo256>::default(); let mut in_memory = in_memory.update(contents); let child_storage_keys = vec![child_info_1.to_owned(), child_info_2.to_owned()]; let in_memory_root = in_memory .full_storage_root( std::iter::empty(), child_storage_keys.iter().map(|k| (k, std::iter::empty())), ) .0; (0..64).for_each(|i| assert_eq!(in_memory.storage(&[i]).unwrap().unwrap(), vec![i])); (28..65).for_each(|i| { assert_eq!(in_memory.child_storage(child_info_1, &[i]).unwrap().unwrap(), vec![i]) }); (10..15).for_each(|i| { assert_eq!(in_memory.child_storage(child_info_2, &[i]).unwrap().unwrap(), vec![i]) }); let trie = in_memory.as_trie_backend().unwrap(); let trie_root = trie.storage_root(::std::iter::empty()).0; assert_eq!(in_memory_root, trie_root); (0..64).for_each(|i| assert_eq!(trie.storage(&[i]).unwrap().unwrap(), vec![i])); let proving = ProvingBackend::new(trie); assert_eq!(proving.storage(&[42]).unwrap().unwrap(), vec![42]); let proof = proving.extract_proof(); let proof_check = create_proof_check_backend::<BlakeTwo256>(in_memory_root.into(), proof).unwrap(); assert!(proof_check.storage(&[0]).is_err()); assert_eq!(proof_check.storage(&[42]).unwrap().unwrap(), vec![42]); // note that it is include in root because proof close assert_eq!(proof_check.storage(&[41]).unwrap().unwrap(), vec![41]); assert_eq!(proof_check.storage(&[64]).unwrap(), None); let proving = ProvingBackend::new(trie); assert_eq!(proving.child_storage(child_info_1, &[64]), Ok(Some(vec![64]))); let proof = proving.extract_proof(); let proof_check = create_proof_check_backend::<BlakeTwo256>(in_memory_root.into(), proof).unwrap(); assert_eq!(proof_check.child_storage(child_info_1, &[64]).unwrap().unwrap(), vec![64]); } }
use pubnub_hyper::core::data::{pam, request}; use pubnub_hyper::runtime::tokio_global::TokioGlobal; use pubnub_hyper::transport::hyper::Hyper; use pubnub_hyper::Builder; use std::collections::HashMap; mod common; fn secret_key_from_env() -> String { std::env::var("PUBNUB_TEST_SUBSCRIBE_KEY") .expect("you must pass the secret key at PUBNUB_TEST_SUBSCRIBE_KEY") } #[test] fn grant() { common::init(); common::current_thread_block_on(async { let transport = Hyper::new() .agent("Rust-Agent-Test") .publish_key("demo") .subscribe_key("demo") .secret_key(secret_key_from_env()) .build() .unwrap(); let pubnub = Builder::with_components(transport, TokioGlobal).build(); { pubnub .call(request::Grant { ttl: 10, permissions: pam::Permissions { resources: pam::Resources { channels: { let mut map = HashMap::new(); map.insert("channel_a".into(), pam::BitMask::MANAGE); map.insert("channel_b".into(), pam::BitMask::READ); map }, groups: { let mut map = HashMap::new(); map.insert("groups_a".into(), pam::BitMask::MANAGE); map.insert("groups_b".into(), pam::BitMask::READ); map }, users: { let mut map = HashMap::new(); map.insert("users_a".into(), pam::BitMask::MANAGE); map.insert("users_b".into(), pam::BitMask::READ); map }, spaces: { let mut map = HashMap::new(); map.insert("spaces_a".into(), pam::BitMask::MANAGE); map.insert("spaces_b".into(), pam::BitMask::READ); map }, }, patterns: pam::Patterns { channels: { let mut map = HashMap::new(); map.insert("channel_c".into(), pam::BitMask::MANAGE); map.insert("channel_d".into(), pam::BitMask::READ); map }, groups: { let mut map = HashMap::new(); map.insert("groups_c".into(), pam::BitMask::MANAGE); map.insert("groups_d".into(), pam::BitMask::READ); map }, users: { let mut map = HashMap::new(); map.insert("users_c".into(), pam::BitMask::MANAGE); map.insert("users_d".into(), pam::BitMask::READ); map }, spaces: { let mut map = HashMap::new(); map.insert("spaces_c".into(), pam::BitMask::MANAGE); map.insert("spaces_d".into(), pam::BitMask::READ); map }, }, meta: json::object! { "user_id" => "qwerty", }, }, }) .await .unwrap(); } }); }
#[test] fn firsttest() {} #[test] fn secondtest() { assert!(false); }
use std::collections::HashSet; use std::env; use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use std::process; fn main() { let args: Vec<String> = env::args().collect(); if args.len() != 3 { eprintln!("Invalid arguments, expected 2 args"); process::exit(1); } let total = args[1].parse::<i32>().expect("arg1 not an int"); let file = File::open(&args[2]).expect("arg2 not a file on disk"); let reader = BufReader::new(file); let mut seen = HashSet::new(); for line in reader.lines() { let num = line .unwrap() .parse::<i32>() .expect("File has a non-numeric entry"); let val = total - num; if seen.contains(&val) { println!("Result = {}, from {} and {}", val * num, val, num); process::exit(0); } seen.insert(num); } }
use std::path::Path; use crate::laze_parser::parser::LazeParser; #[test] fn simple() { let mut test_parser = LazeParser::new(Path::new("./parser_files/ja.peg")); let ast = test_parser.parse(Path::new("./laze_tests/stm/while_stm/while_simple.laze")); let mut ast_string = String::new(); let _ = std::fmt::write(&mut ast_string, format_args!("{:?}", ast)); assert_eq!( ast_string, r##"DecList([Dec_ { pos: 72, data: Func("実行", [], [], Stm_ { pos: 72, data: Compound([Stm_ { pos: 36, data: Dec(Dec_ { pos: 36, data: Var(Var_ { pos: 27, data: Simple("a") }, Type_ { pos: 23, data: Name("整数") }, Exp_ { pos: 30, data: String("0") }) }) }, Stm_ { pos: 71, data: While(Exp_ { pos: 42, data: BinOp([Lt], [Exp_ { pos: 39, data: Var("a") }, Exp_ { pos: 42, data: String("5") }]) }, Stm_ { pos: 71, data: Compound([Stm_ { pos: 69, data: Assign(Var_ { pos: 59, data: Simple("a") }, Exp_ { pos: 63, data: String("1") }, Add) }]) }) }]) }) }])"## ); }
use json_to_table::json_to_table; use serde_json::json; use tabled::settings::{Alignment, Padding, Style}; use testing_table::test_table; #[cfg(feature = "color")] use tabled::{ grid::color::AnsiColor, grid::config::{ColoredConfig, SpannedConfig}, }; test_table!( config_from_table_test, json_to_table(&json!({ "key1": 123, "234": ["123", "234", "456"], "key22": { "k1": 1, "k2": 2, } })) .with(Alignment::center()) .with(Alignment::center_vertical()) .with(Style::modern()) .collapse(), "┌───────┬────────┐" "│ │ 123 │" "│ ├────────┤" "│ 234 │ 234 │" "│ ├────────┤" "│ │ 456 │" "├───────┼────────┤" "│ key1 │ 123 │" "├───────┼────┬───┤" "│ │ k1 │ 1 │" "│ key22 ├────┼───┤" "│ │ k2 │ 2 │" "└───────┴────┴───┘" ); test_table!( config_from_table_padding_zero_test, json_to_table(&json!({ "key1": 123, "234": ["123", "234", "456"], "key22": { "k1": 1, "k2": 2, } })) .with(Padding::zero()) .with(Alignment::center()) .with(Alignment::center_vertical()) .with(Style::modern()) .collapse(), "┌─────┬────┐" "│ │123 │" "│ ├────┤" "│ 234 │234 │" "│ ├────┤" "│ │456 │" "├─────┼────┤" "│key1 │123 │" "├─────┼──┬─┤" "│ │k1│1│" "│key22├──┼─┤" "│ │k2│2│" "└─────┴──┴─┘" ); test_table!( config_from_table_general_test, json_to_table(&json!({ "key1": 123, "234": ["123", "234", "456"], "key22": { "k1": 1, "k2": 2, } })) .with(Padding::zero()) .with(Alignment::center()) .with(Alignment::center_vertical()) .with(Style::modern()), "┌─────┬──────┐" "│ │┌───┐ │" "│ ││123│ │" "│ │├───┤ │" "│ 234 ││234│ │" "│ │├───┤ │" "│ ││456│ │" "│ │└───┘ │" "├─────┼──────┤" "│key1 │ 123 │" "├─────┼──────┤" "│ │┌──┬─┐│" "│ ││k1│1││" "│key22│├──┼─┤│" "│ ││k2│2││" "│ │└──┴─┘│" "└─────┴──────┘" ); #[cfg(feature = "color")] test_table!( color_test, json_to_table(&json!({ "key1": 123, "234": ["123", "234", "456"], "key22": { "k1": 1, "k2": 2, } })) .with(ColoredConfig::new({ let mut cfg = SpannedConfig::default(); cfg.set_border_color_global(AnsiColor::new("\u{1b}[34m".into(), "\u{1b}[39m".into())); cfg })) .with(Style::modern()) .collapse(), "\u{1b}[34m┌─────\u{1b}[39m\u{1b}[34m┬────┐\u{1b}[39m" "\u{1b}[34m│\u{1b}[39m234 \u{1b}[34m│\u{1b}[39m123 \u{1b}[34m│\u{1b}[39m" "\u{1b}[34m│\u{1b}[39m \u{1b}[34m├────┤\u{1b}[39m" "\u{1b}[34m│\u{1b}[39m \u{1b}[34m│\u{1b}[39m234 \u{1b}[34m│\u{1b}[39m" "\u{1b}[34m│\u{1b}[39m \u{1b}[34m├────┤\u{1b}[39m" "\u{1b}[34m│\u{1b}[39m \u{1b}[34m│\u{1b}[39m456 \u{1b}[34m│\u{1b}[39m" "\u{1b}[34m├─────\u{1b}[39m\u{1b}[34m┼────┤\u{1b}[39m" "\u{1b}[34m│\u{1b}[39mkey1 \u{1b}[34m│\u{1b}[39m123 \u{1b}[34m│\u{1b}[39m" "\u{1b}[34m├─────\u{1b}[39m\u{1b}[34m┼──\u{1b}[39m\u{1b}[34m┬─┤\u{1b}[39m" "\u{1b}[34m│\u{1b}[39mkey22\u{1b}[34m│\u{1b}[39mk1\u{1b}[34m│\u{1b}[39m1\u{1b}[34m│\u{1b}[39m" "\u{1b}[34m│\u{1b}[39m \u{1b}[34m├──\u{1b}[39m\u{1b}[34m┼─┤\u{1b}[39m" "\u{1b}[34m│\u{1b}[39m \u{1b}[34m│\u{1b}[39mk2\u{1b}[34m│\u{1b}[39m2\u{1b}[34m│\u{1b}[39m" "\u{1b}[34m└─────\u{1b}[39m\u{1b}[34m┴──\u{1b}[39m\u{1b}[34m┴─┘\u{1b}[39m" );
$NetBSD: patch-vendor_libc_src_unix_bsd_netbsdlike_netbsd_mips.rs,v 1.2 2023/01/23 18:49:04 he Exp $ Add mips support. --- vendor/libc/src/unix/bsd/netbsdlike/netbsd/mips.rs.orig 2022-11-06 22:14:03.866753983 +0000 +++ vendor/libc/src/unix/bsd/netbsdlike/netbsd/mips.rs 2022-11-06 22:07:15.448812020 +0000 @@ -0,0 +1,22 @@ +use PT_FIRSTMACH; + +pub type c_long = i32; +pub type c_ulong = u32; +pub type c_char = i8; +pub type __cpu_simple_lock_nv_t = ::c_int; + +// should be pub(crate), but that requires Rust 1.18.0 +cfg_if! { + if #[cfg(libc_const_size_of)] { + #[doc(hidden)] + pub const _ALIGNBYTES: usize = ::mem::size_of::<::c_longlong>() - 1; + } else { + #[doc(hidden)] + pub const _ALIGNBYTES: usize = 8 - 1; + } +} + +pub const PT_GETREGS: ::c_int = PT_FIRSTMACH + 1; +pub const PT_SETREGS: ::c_int = PT_FIRSTMACH + 2; +pub const PT_GETFPREGS: ::c_int = PT_FIRSTMACH + 3; +pub const PT_SETFPREGS: ::c_int = PT_FIRSTMACH + 4;
use rocket::{State, Request}; use crate::DbConn; use rusqlite::Error; use rocket_contrib::templates::Template; use std::collections::HashMap; use rocket::response::Redirect; use crate::pages::login::User; //#[get("/")] //pub fn index(db_conn: State<DbConn>) -> Result<String, Error> { // db_conn.lock() // .expect("db connection lock") // .query_row("SELECT username FROM User", // &[]: &[&str;0], // |row| { row.get(0) }) //} #[get("/")] pub fn user_index(user: User) -> Template { let mut context = HashMap::new(); context.insert("user_id", user.0); Template::render("index", &context) } #[get("/", rank = 2)] pub fn index() -> Redirect { Redirect::to(uri!(crate::pages::login::login_page)) } #[catch(404)] pub fn not_found(req: &Request) -> Template { let mut map = HashMap::new(); map.insert("path", req.uri().path()); Template::render("error/404", &map) }
use std::io::{self, prelude::*}; use std::error::Error; /* - simulate algo on input - each reverse, add that length to the count */ fn reversort(vals: &mut [u32]) -> usize { let n = vals.len(); let mut cost = 0; for i in 0..n-1 { let (j, _) = vals.iter().enumerate().skip(i).min_by_key(|&(_, &val)| val).unwrap(); cost += reverse(&mut vals[i..=j]); } cost } fn reverse(vals: &mut [u32]) -> usize { vals.reverse(); vals.len() } type Res<T> = Result<T, Box<dyn Error>>; fn main() -> Res<()> { run_tests(io::stdin().lock().lines()) } /// Panics on malformed input. fn run_tests(mut lines: impl Iterator<Item = io::Result<String>>) -> Res<()> { let line = lines.next().unwrap()?; let t = line.parse()?; for test_no in 1..=t { let mut vals = read_test_input(&mut lines)?; let ans = reversort(&mut vals); println!("Case #{}: {}", test_no, ans); } assert!(lines.next().is_none()); Ok(()) } /// Panics on malformed input. fn read_test_input(lines: &mut impl Iterator<Item = io::Result<String>>) -> Res<Vec<u32>> { let line = lines.next().unwrap()?; let n: usize = line.parse()?; let line = lines.next().unwrap()?; let vals: Vec<_> = line .split_whitespace() .map(|word| word.parse::<u32>()) .collect::<Result<_, _>>()?; assert_eq!(vals.len(), n); Ok(vals) }
use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::time::Duration; use std::{collections::HashMap, convert::Infallible, path::PathBuf, sync::Arc}; use warp::http::StatusCode; use warp::Filter; use structopt::StructOpt; use std::process::{Command, ExitStatus, Output}; pub struct Subversion { path: PathBuf, } impl Subversion { fn cleanup(&self) -> Result<Output, Output> { let output = Command::new("svn") .args(&["cleanup", "--remove-unversioned"]) .current_dir(&self.path) .output() .unwrap(); if output.status.success() { Ok(output) } else { Err(output) } } fn revert(&self) -> Result<Output, Output> { let output = Command::new("svn") .args(&["revert", "-R", "."]) .current_dir(&self.path) .output() .unwrap(); if output.status.success() { Ok(output) } else { Err(output) } } fn update(&self) -> Result<Output, Output> { let output = Command::new("svn") .arg("up") .current_dir(&self.path) .output() .unwrap(); if output.status.success() { Ok(output) } else { Err(output) } } fn commit( &self, repo: &str, id: &str, username: &str, password: &str, ) -> Result<Output, Output> { let output = Command::new("svn") .args(&["commit", "-m"]) .arg(format!("[CD] Package: {}, Repo: {}", id, repo)) .args(&[ format!("--username={}", username), format!("--password={}", password), ]) .current_dir(&self.path) .output() .unwrap(); if output.status.success() { Ok(output) } else { Err(output) } } } #[derive(Serialize, Deserialize)] struct PackageUpdateRequest { release: Release, } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] pub struct Release { pub version: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub channel: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub authors: Vec<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub license: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub license_url: Option<String>, pub target: pahkat_types::payload::Target, } #[derive(Debug, thiserror::Error)] enum PackageUpdateError { #[error("Invalid API token")] Unauthorized, #[error("Unsupported repository identifier.")] UnsupportedRepo, #[error("{0}")] RepoError(#[from] pahkat_repomgr::package::update::Error), #[error("Invalid version provided")] VersionError(#[from] pahkat_types::package::version::Error), #[error("Indexing error")] IndexError, } impl warp::reject::Reject for PackageUpdateError {} impl warp::reply::Reply for PackageUpdateError { fn into_response(self) -> warp::reply::Response { let msg = format!("{}", self); let code = match self { PackageUpdateError::Unauthorized => StatusCode::from_u16(403).unwrap(), PackageUpdateError::UnsupportedRepo => StatusCode::from_u16(400).unwrap(), PackageUpdateError::RepoError(_) => StatusCode::from_u16(500).unwrap(), PackageUpdateError::IndexError => StatusCode::from_u16(500).unwrap(), PackageUpdateError::VersionError(_) => StatusCode::from_u16(400).unwrap(), }; warp::reply::with_status(msg, code).into_response() } } async fn process_package_update_request( config: Arc<Config>, svn: Arc<HashMap<String, Arc<Mutex<Subversion>>>>, repo_id: String, package_id: String, req: PackageUpdateRequest, auth_token: String, ) -> Result<Box<dyn warp::Reply>, Infallible> { if !auth_token.starts_with("Bearer ") { return Ok(Box::new(PackageUpdateError::Unauthorized)); } let candidate = auth_token.split(" ").skip(1).next().unwrap(); if candidate != config.api_token { return Ok(Box::new(PackageUpdateError::Unauthorized)); } if !config.repos.contains_key(&repo_id) { return Ok(Box::new(PackageUpdateError::UnsupportedRepo)); } let repo_path = &config.repos[&repo_id]; log::info!("Waiting for lock on repository..."); let svn = svn[&repo_id].lock().await; log::info!("Got lock!"); loop { log::info!("Updating repository..."); svn.revert().unwrap(); svn.cleanup().unwrap(); svn.update().unwrap(); let version: pahkat_types::package::Version = match req.release.version.parse() { Ok(v) => v, Err(e) => return Ok(Box::new(PackageUpdateError::VersionError(e))), }; let inner_req = pahkat_repomgr::package::update::Request::builder() .repo_path(svn.path.clone().into()) .id(package_id.clone().into()) .version(Cow::Owned(version)) .channel(req.release.channel.as_ref().map(|x| Cow::Borrowed(&**x))) .target(Cow::Borrowed(&req.release.target)) .url(None) .build(); log::info!("Updating package..."); match pahkat_repomgr::package::update::update(inner_req) { Ok(_) => {} Err(e) => return Ok(Box::new(PackageUpdateError::RepoError(e))), }; log::info!("Updating index..."); match pahkat_repomgr::repo::indexing::index( pahkat_repomgr::repo::indexing::Request::builder() .path(svn.path.clone().into()) .build(), ) { Ok(_) => {} Err(e) => return Ok(Box::new(PackageUpdateError::IndexError)), }; log::info!("Committing to repository..."); match svn.commit( &repo_id, &package_id, &config.svn_username, &config.svn_password, ) { Ok(_) => break, Err(_) => { log::error!("Blocked from committing; retrying in 5 seconds."); tokio::time::delay_for(std::time::Duration::from_secs(5)).await; continue; } } } Ok(Box::new("Success")) } #[derive(Serialize, Deserialize)] pub struct Config { svn_username: String, svn_password: String, api_token: String, repos: HashMap<String, PathBuf>, } mod filters { use super::*; use std::collections::HashMap; use std::convert::Infallible; use std::sync::Arc; use tokio::sync::Mutex; use warp::Filter; pub fn config( config: &Arc<Config>, ) -> impl Filter<Extract = (Arc<Config>,), Error = Infallible> + Clone { let config = Arc::clone(config); warp::any().map(move || Arc::clone(&config)) } pub fn svn( svn: &Arc<HashMap<String, Arc<Mutex<Subversion>>>>, ) -> impl Filter<Extract = (Arc<HashMap<String, Arc<Mutex<Subversion>>>>,), Error = Infallible> + Clone { let svn = Arc::clone(svn); warp::any().map(move || Arc::clone(&svn)) } } #[derive(StructOpt)] struct Args { #[structopt(short, long)] config_path: PathBuf, } use tokio::sync::Mutex; #[tokio::main] async fn main() -> anyhow::Result<()> { env_logger::from_env(env_logger::Env::default().default_filter_or("debug")).init(); let args = Args::from_args(); let config = std::fs::read_to_string(&args.config_path)?; let config: Arc<Config> = Arc::new(toml::from_str(&config)?); let mut svn = HashMap::new(); for (key, value) in config.repos.iter() { svn.insert( key.to_string(), Arc::new(Mutex::new(Subversion { path: std::fs::canonicalize(value.clone()).unwrap(), })), ); } let svn = Arc::new(svn); log::info!("Repos: {:#?}", &config.repos); let package_update = warp::any() .and(warp::filters::method::patch()) .and(filters::config(&config)) .and(filters::svn(&svn)) .and(warp::path::param::<String>()) .and(warp::path("packages")) .and(warp::path::param::<String>()) .and(warp::body::json()) .and(warp::header::<String>("authorization")) .and_then(process_package_update_request) .with(warp::log("pahkat_server::update_pkg")); // let index = warp::any() // .map(|| "Hello!"); warp::serve(package_update) .run(([127, 0, 0, 1], 3030)) .await; Ok(()) }
use std::collections::HashMap; pub struct UnionFind { pub nodes: HashMap<i32, Option<i32>> } impl UnionFind { pub fn new() -> UnionFind { UnionFind{nodes: HashMap::new()} } pub fn union(&mut self, node_1: i32, node_2: i32) { let node_1_leader = self.find_and_create_if_not_present(node_1); let node_2_leader = self.find_and_create_if_not_present(node_2); self.update_leader(node_2_leader, Some(node_1_leader)); } pub fn is_in_different_cluster(&mut self, node_1: i32, node_2: i32 ) -> bool { let node_1_leader = self.find_and_create_if_not_present(node_1); let node_2_leader = self.find_and_create_if_not_present(node_2); node_1_leader != node_2_leader } fn find_and_create_if_not_present(&mut self, node: i32) -> i32 { match self.find_leader(node) { None => { self.update_leader(node, None); node }, Some(node) => node } } fn update_leader(&mut self, node: i32, leader: Option<i32>) { self.nodes.insert(node, leader); } pub fn find_leader(&mut self, node: i32) -> Option<i32> { match self.nodes.get(&node) { None => return None, // Node not present Some(leader) => { match leader { None => return Some(node), // Node is the leader of itself Some(leader) => { let leader = self.find_leader(*leader); self.nodes.insert(node, leader); leader } } }, } } } #[cfg(test)] mod test { use super::*; #[test] fn union_of_two_non_present() { let mut union = UnionFind::new(); union.union(1, 2); assert_eq!(union.find_leader(1), Some(1)); assert_eq!(union.find_leader(2), Some(1)) } #[test] fn find_not_present() { let mut union = UnionFind::new(); union.nodes.insert(1, None); assert_eq!(union.find_leader(2), None) } #[test] fn find_self_leader() { let mut union = UnionFind::new(); union.nodes.insert(1, None); assert_eq!(union.find_leader(1), Some(1)) } #[test] fn find_nested_leader() { let mut union = UnionFind::new(); union.nodes.insert(1, None); union.nodes.insert(2, Some(1)); union.nodes.insert(3, Some(2)); assert_eq!(union.find_leader(3), Some(1)); assert_eq!(*union.nodes.get(&3).unwrap(), Some(1)) // It caches the result } }
use yew::prelude::*; use yew_router::prelude::*; use crate::components::lobby::Lobby; use crate::components::room::Room; #[derive(Routable, PartialEq, Clone, Debug)] pub(crate) enum Route { #[at("/")] Lobby, #[at("/room/:id")] Room { id: u32 }, } pub(crate) fn switch(route: &Route) -> Html { match route { Route::Lobby => { html! { <Lobby /> } } Route::Room { id } => { html! { <Room id=*id /> } } } }
fn main() { let s = "masuda"; // あえて別の型に代入する // let a: i32 = a ; } fn typename<T>(_: T) -> &'static str { std::any::type_name::<T>() } fn main2() { let s = "masuda"; println!("s type is {}", typename(s)); } fn main3() { // 型をチェックする let s = "masuda"; println!("s type is {}", typename(s)); let a = 100 ; println!("a type is {}", typename(a)); let a = ("masuda", 50 ); println!("a type is {}", typename(a)); let a = [1,2,3,4]; println!("a type is {}", typename(a)); let v = vec![1,2,3,4]; println!("v type is {}", typename(v)); let a = Person { name: "masuda", age: 50 }; println!("a type is {}", typename(a)); let func = |name, age| { println!("name: {}, age: {}", name, age ); }; func("masuda", 50); println!("func type is {}", typename(func)); } #[derive(Debug)] struct Person { name: &'static str, age: i32, }
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RiskEventType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "maliciousIPAddressValidCredentialsBlockedIP")] MaliciousIPAddressValidCredentialsBlockedIP, #[serde(rename = "investigationsThreatIntelligenceSigninLinked")] InvestigationsThreatIntelligenceSigninLinked, #[serde(rename = "mcasSuspiciousInboxManipulationRules")] McasSuspiciousInboxManipulationRules, #[serde(rename = "mcasImpossibleTravel")] McasImpossibleTravel, #[serde(rename = "adminConfirmedUserCompromised")] AdminConfirmedUserCompromised, #[serde(rename = "generic")] Generic, #[serde(rename = "investigationsThreatIntelligence")] InvestigationsThreatIntelligence, #[serde(rename = "leakedCredentials")] LeakedCredentials, #[serde(rename = "suspiciousIPAddress")] SuspiciousIPAddress, #[serde(rename = "malwareInfectedIPAddress")] MalwareInfectedIPAddress, #[serde(rename = "unfamiliarFeatures")] UnfamiliarFeatures, #[serde(rename = "maliciousIPAddress")] MaliciousIPAddress, #[serde(rename = "anonymizedIPAddress")] AnonymizedIPAddress, #[serde(rename = "unlikelyTravel")] UnlikelyTravel, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RiskDetail { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "adminConfirmedUserCompromised")] AdminConfirmedUserCompromised, #[serde(rename = "hidden")] Hidden, #[serde(rename = "adminConfirmedSigninCompromised")] AdminConfirmedSigninCompromised, #[serde(rename = "adminDismissedAllRiskForUser")] AdminDismissedAllRiskForUser, #[serde(rename = "userPassedMFADrivenByRiskBasedPolicy")] UserPassedMFADrivenByRiskBasedPolicy, #[serde(rename = "aiConfirmedSigninSafe")] AiConfirmedSigninSafe, #[serde(rename = "adminConfirmedSigninSafe")] AdminConfirmedSigninSafe, #[serde(rename = "userPerformedSecuredPasswordReset")] UserPerformedSecuredPasswordReset, #[serde(rename = "userPerformedSecuredPasswordChange")] UserPerformedSecuredPasswordChange, #[serde(rename = "adminGeneratedTemporaryPassword")] AdminGeneratedTemporaryPassword, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RiskState { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "confirmedCompromised")] ConfirmedCompromised, #[serde(rename = "atRisk")] AtRisk, #[serde(rename = "dismissed")] Dismissed, #[serde(rename = "remediated")] Remediated, #[serde(rename = "confirmedSafe")] ConfirmedSafe, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum OperationResult { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "timeout")] Timeout, #[serde(rename = "failure")] Failure, #[serde(rename = "success")] Success, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum GroupType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "azureAD")] AzureAD, #[serde(rename = "unifiedGroups")] UnifiedGroups, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ConditionalAccessStatus { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "notApplied")] NotApplied, #[serde(rename = "failure")] Failure, #[serde(rename = "success")] Success, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AppliedConditionalAccessPolicyResult { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "unknown")] Unknown, #[serde(rename = "notEnabled")] NotEnabled, #[serde(rename = "notApplied")] NotApplied, #[serde(rename = "failure")] Failure, #[serde(rename = "success")] Success, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RiskLevel { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "none")] None, #[serde(rename = "hidden")] Hidden, #[serde(rename = "high")] High, #[serde(rename = "medium")] Medium, #[serde(rename = "low")] Low, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DataPolicyOperationStatus { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "failed")] Failed, #[serde(rename = "complete")] Complete, #[serde(rename = "running")] Running, #[serde(rename = "notStarted")] NotStarted, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum TeamsAppDistributionMethod { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "sideloaded")] Sideloaded, #[serde(rename = "organization")] Organization, #[serde(rename = "store")] Store, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum TeamsAsyncOperationStatus { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "failed")] Failed, #[serde(rename = "succeeded")] Succeeded, #[serde(rename = "inProgress")] InProgress, #[serde(rename = "notStarted")] NotStarted, #[serde(rename = "invalid")] Invalid, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum TeamsAsyncOperationType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "createTeam")] CreateTeam, #[serde(rename = "unarchiveTeam")] UnarchiveTeam, #[serde(rename = "archiveTeam")] ArchiveTeam, #[serde(rename = "cloneTeam")] CloneTeam, #[serde(rename = "invalid")] Invalid, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum GiphyRatingType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "strict")] Strict, #[serde(rename = "moderate")] Moderate, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ClonableTeamParts { #[serde(rename = "members")] Members, #[serde(rename = "channels")] Channels, #[serde(rename = "settings")] Settings, #[serde(rename = "tabs")] Tabs, #[serde(rename = "apps")] Apps, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum TeamVisibilityType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "hiddenMembership")] HiddenMembership, #[serde(rename = "public")] Public, #[serde(rename = "private")] Private, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum UserAccountSecurityType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "administrator")] Administrator, #[serde(rename = "power")] Power, #[serde(rename = "standard")] Standard, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SecurityNetworkProtocol { #[serde(rename = "unknown")] Unknown, #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "spxII")] SpxII, #[serde(rename = "spx")] Spx, #[serde(rename = "ipx")] Ipx, #[serde(rename = "raw")] Raw, #[serde(rename = "nd")] Nd, #[serde(rename = "ipv6DestinationOptions")] Ipv6DestinationOptions, #[serde(rename = "ipv6NoNextHeader")] Ipv6NoNextHeader, #[serde(rename = "icmpV6")] IcmpV6, #[serde(rename = "ipSecAuthenticationHeader")] IpSecAuthenticationHeader, #[serde(rename = "ipSecEncapsulatingSecurityPayload")] IpSecEncapsulatingSecurityPayload, #[serde(rename = "ipv6FragmentHeader")] Ipv6FragmentHeader, #[serde(rename = "ipv6RoutingHeader")] Ipv6RoutingHeader, #[serde(rename = "ipv6")] Ipv6, #[serde(rename = "idp")] Idp, #[serde(rename = "udp")] Udp, #[serde(rename = "pup")] Pup, #[serde(rename = "tcp")] Tcp, #[serde(rename = "ipv4")] Ipv4, #[serde(rename = "ggp")] Ggp, #[serde(rename = "igmp")] Igmp, #[serde(rename = "icmp")] Icmp, #[serde(rename = "ip")] Ip, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RegistryValueType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "sz")] Sz, #[serde(rename = "qwordlittleEndian")] QwordlittleEndian, #[serde(rename = "qword")] Qword, #[serde(rename = "none")] None, #[serde(rename = "multiSz")] MultiSz, #[serde(rename = "link")] Link, #[serde(rename = "expandSz")] ExpandSz, #[serde(rename = "dwordBigEndian")] DwordBigEndian, #[serde(rename = "dwordLittleEndian")] DwordLittleEndian, #[serde(rename = "dword")] Dword, #[serde(rename = "binary")] Binary, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RegistryOperation { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "delete")] Delete, #[serde(rename = "modify")] Modify, #[serde(rename = "create")] Create, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RegistryHive { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "usersDefault")] UsersDefault, #[serde(rename = "localMachineSystem")] LocalMachineSystem, #[serde(rename = "localMachineSoftware")] LocalMachineSoftware, #[serde(rename = "localMachineSecurity")] LocalMachineSecurity, #[serde(rename = "localMachineSam")] LocalMachineSam, #[serde(rename = "currentUser")] CurrentUser, #[serde(rename = "currentConfig")] CurrentConfig, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ProcessIntegrityLevel { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "system")] System, #[serde(rename = "high")] High, #[serde(rename = "medium")] Medium, #[serde(rename = "low")] Low, #[serde(rename = "untrusted")] Untrusted, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum LogonType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "service")] Service, #[serde(rename = "batch")] Batch, #[serde(rename = "network")] Network, #[serde(rename = "remoteInteractive")] RemoteInteractive, #[serde(rename = "interactive")] Interactive, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum FileHashType { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "ctph")] Ctph, #[serde(rename = "lsHash")] LsHash, #[serde(rename = "authenticodeHash256")] AuthenticodeHash256, #[serde(rename = "md5")] Md5, #[serde(rename = "sha256")] Sha256, #[serde(rename = "sha1")] Sha1, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EmailRole { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "recipient")] Recipient, #[serde(rename = "sender")] Sender, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ConnectionStatus { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "failed")] Failed, #[serde(rename = "blocked")] Blocked, #[serde(rename = "succeeded")] Succeeded, #[serde(rename = "attempted")] Attempted, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ConnectionDirection { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "outbound")] Outbound, #[serde(rename = "inbound")] Inbound, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AlertStatus { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "dismissed")] Dismissed, #[serde(rename = "resolved")] Resolved, #[serde(rename = "inProgress")] InProgress, #[serde(rename = "newAlert")] NewAlert, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AlertSeverity { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "high")] High, #[serde(rename = "medium")] Medium, #[serde(rename = "low")] Low, #[serde(rename = "informational")] Informational, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AlertFeedback { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "benignPositive")] BenignPositive, #[serde(rename = "falsePositive")] FalsePositive, #[serde(rename = "truePositive")] TruePositive, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Status { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "ignored")] Ignored, #[serde(rename = "deleted")] Deleted, #[serde(rename = "updated")] Updated, #[serde(rename = "active")] Active, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceEnrollmentFailureReason { #[serde(rename = "userAbandonment")] UserAbandonment, #[serde(rename = "clientDisconnected")] ClientDisconnected, #[serde(rename = "enrollmentRestrictionsEnforced")] EnrollmentRestrictionsEnforced, #[serde(rename = "featureNotSupported")] FeatureNotSupported, #[serde(rename = "badRequest")] BadRequest, #[serde(rename = "inMaintenance")] InMaintenance, #[serde(rename = "deviceNotSupported")] DeviceNotSupported, #[serde(rename = "userValidation")] UserValidation, #[serde(rename = "accountValidation")] AccountValidation, #[serde(rename = "authorization")] Authorization, #[serde(rename = "authentication")] Authentication, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ApplicationType { #[serde(rename = "desktop")] Desktop, #[serde(rename = "universal")] Universal, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RemoteAssistanceOnboardingStatus { #[serde(rename = "onboarded")] Onboarded, #[serde(rename = "onboarding")] Onboarding, #[serde(rename = "notOnboarded")] NotOnboarded, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum InstallState { #[serde(rename = "unknown")] Unknown, #[serde(rename = "uninstallFailed")] UninstallFailed, #[serde(rename = "notInstalled")] NotInstalled, #[serde(rename = "failed")] Failed, #[serde(rename = "installed")] Installed, #[serde(rename = "notApplicable")] NotApplicable, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum NotificationTemplateBrandingOptions { #[serde(rename = "includeContactInformation")] IncludeContactInformation, #[serde(rename = "includeCompanyName")] IncludeCompanyName, #[serde(rename = "includeCompanyLogo")] IncludeCompanyLogo, #[serde(rename = "none")] None, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedAppFlaggedReason { #[serde(rename = "rootedDevice")] RootedDevice, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsInformationProtectionPinCharacterRequirements { #[serde(rename = "allow")] Allow, #[serde(rename = "requireAtLeastOne")] RequireAtLeastOne, #[serde(rename = "notAllow")] NotAllow, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsInformationProtectionEnforcementLevel { #[serde(rename = "encryptAuditAndBlock")] EncryptAuditAndBlock, #[serde(rename = "encryptAuditAndPrompt")] EncryptAuditAndPrompt, #[serde(rename = "encryptAndAuditOnly")] EncryptAndAuditOnly, #[serde(rename = "noProtection")] NoProtection, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedAppDataEncryptionType { #[serde(rename = "whenDeviceLocked")] WhenDeviceLocked, #[serde(rename = "whenDeviceLockedExceptOpenFiles")] WhenDeviceLockedExceptOpenFiles, #[serde(rename = "afterDeviceRestart")] AfterDeviceRestart, #[serde(rename = "useDeviceSettings")] UseDeviceSettings, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedAppPinCharacterSet { #[serde(rename = "alphanumericAndSymbol")] AlphanumericAndSymbol, #[serde(rename = "numeric")] Numeric, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedAppClipboardSharingLevel { #[serde(rename = "blocked")] Blocked, #[serde(rename = "managedApps")] ManagedApps, #[serde(rename = "managedAppsWithPasteIn")] ManagedAppsWithPasteIn, #[serde(rename = "allApps")] AllApps, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedAppDataTransferLevel { #[serde(rename = "none")] None, #[serde(rename = "managedApps")] ManagedApps, #[serde(rename = "allApps")] AllApps, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedAppDataStorageLocation { #[serde(rename = "localStorage")] LocalStorage, #[serde(rename = "sharePoint")] SharePoint, #[serde(rename = "oneDriveForBusiness")] OneDriveForBusiness, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementPartnerAppType { #[serde(rename = "multiTenantApp")] MultiTenantApp, #[serde(rename = "singleTenantApp")] SingleTenantApp, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementPartnerTenantState { #[serde(rename = "unresponsive")] Unresponsive, #[serde(rename = "rejected")] Rejected, #[serde(rename = "terminated")] Terminated, #[serde(rename = "enabled")] Enabled, #[serde(rename = "unavailable")] Unavailable, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MobileThreatPartnerTenantState { #[serde(rename = "unresponsive")] Unresponsive, #[serde(rename = "enabled")] Enabled, #[serde(rename = "available")] Available, #[serde(rename = "unavailable")] Unavailable, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementExchangeConnectorType { #[serde(rename = "dedicated")] Dedicated, #[serde(rename = "serviceToService")] ServiceToService, #[serde(rename = "hosted")] Hosted, #[serde(rename = "onPremises")] OnPremises, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementExchangeConnectorStatus { #[serde(rename = "disconnected")] Disconnected, #[serde(rename = "connected")] Connected, #[serde(rename = "connectionPending")] ConnectionPending, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum VppTokenSyncStatus { #[serde(rename = "failed")] Failed, #[serde(rename = "completed")] Completed, #[serde(rename = "inProgress")] InProgress, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum VppTokenState { #[serde(rename = "assignedToExternalMDM")] AssignedToExternalMDM, #[serde(rename = "invalid")] Invalid, #[serde(rename = "expired")] Expired, #[serde(rename = "valid")] Valid, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Enablement { #[serde(rename = "disabled")] Disabled, #[serde(rename = "enabled")] Enabled, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsHelloForBusinessPinUsage { #[serde(rename = "disallowed")] Disallowed, #[serde(rename = "required")] Required, #[serde(rename = "allowed")] Allowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MdmAuthority { #[serde(rename = "office365")] Office365, #[serde(rename = "sccm")] Sccm, #[serde(rename = "intune")] Intune, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementExchangeConnectorSyncType { #[serde(rename = "deltaSync")] DeltaSync, #[serde(rename = "fullSync")] FullSync, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum IosUpdatesInstallStatus { #[serde(rename = "sharedDeviceUserLoggedInError")] SharedDeviceUserLoggedInError, #[serde(rename = "notSupportedOperation")] NotSupportedOperation, #[serde(rename = "installFailed")] InstallFailed, #[serde(rename = "installPhoneCallInProgress")] InstallPhoneCallInProgress, #[serde(rename = "installInsufficientPower")] InstallInsufficientPower, #[serde(rename = "installInsufficientSpace")] InstallInsufficientSpace, #[serde(rename = "installing")] Installing, #[serde(rename = "downloadInsufficientNetwork")] DownloadInsufficientNetwork, #[serde(rename = "downloadInsufficientPower")] DownloadInsufficientPower, #[serde(rename = "downloadInsufficientSpace")] DownloadInsufficientSpace, #[serde(rename = "downloadRequiresComputer")] DownloadRequiresComputer, #[serde(rename = "downloadFailed")] DownloadFailed, #[serde(rename = "downloading")] Downloading, #[serde(rename = "unknown")] Unknown, #[serde(rename = "idle")] Idle, #[serde(rename = "available")] Available, #[serde(rename = "success")] Success, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum PolicyPlatformType { #[serde(rename = "all")] All, #[serde(rename = "androidWorkProfile")] AndroidWorkProfile, #[serde(rename = "windows10AndLater")] Windows10AndLater, #[serde(rename = "windows81AndLater")] Windows81AndLater, #[serde(rename = "windowsPhone81")] WindowsPhone81, #[serde(rename = "macOS")] MacOS, #[serde(rename = "iOS")] IOS, #[serde(rename = "android")] Android, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceThreatProtectionLevel { #[serde(rename = "notSet")] NotSet, #[serde(rename = "high")] High, #[serde(rename = "medium")] Medium, #[serde(rename = "low")] Low, #[serde(rename = "secured")] Secured, #[serde(rename = "unavailable")] Unavailable, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceComplianceActionType { #[serde(rename = "pushNotification")] PushNotification, #[serde(rename = "removeResourceAccessProfiles")] RemoveResourceAccessProfiles, #[serde(rename = "wipe")] Wipe, #[serde(rename = "retire")] Retire, #[serde(rename = "block")] Block, #[serde(rename = "notification")] Notification, #[serde(rename = "noAction")] NoAction, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WelcomeScreenMeetingInformation { #[serde(rename = "showOrganizerAndTimeAndSubject")] ShowOrganizerAndTimeAndSubject, #[serde(rename = "showOrganizerAndTimeOnly")] ShowOrganizerAndTimeOnly, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MiracastChannel { #[serde(rename = "oneHundredSixtyFive")] OneHundredSixtyFive, #[serde(rename = "oneHundredSixtyOne")] OneHundredSixtyOne, #[serde(rename = "oneHundredFiftySeven")] OneHundredFiftySeven, #[serde(rename = "oneHundredFiftyThree")] OneHundredFiftyThree, #[serde(rename = "oneHundredFortyNine")] OneHundredFortyNine, #[serde(rename = "fortyEight")] FortyEight, #[serde(rename = "fortyFour")] FortyFour, #[serde(rename = "forty")] Forty, #[serde(rename = "thirtySix")] ThirtySix, #[serde(rename = "eleven")] Eleven, #[serde(rename = "ten")] Ten, #[serde(rename = "nine")] Nine, #[serde(rename = "eight")] Eight, #[serde(rename = "seven")] Seven, #[serde(rename = "six")] Six, #[serde(rename = "five")] Five, #[serde(rename = "four")] Four, #[serde(rename = "three")] Three, #[serde(rename = "two")] Two, #[serde(rename = "one")] One, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsUserAccountControlSettings { #[serde(rename = "neverNotify")] NeverNotify, #[serde(rename = "notifyOnAppChangesWithoutDimming")] NotifyOnAppChangesWithoutDimming, #[serde(rename = "notifyOnAppChanges")] NotifyOnAppChanges, #[serde(rename = "alwaysNotify")] AlwaysNotify, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SiteSecurityLevel { #[serde(rename = "high")] High, #[serde(rename = "mediumHigh")] MediumHigh, #[serde(rename = "medium")] Medium, #[serde(rename = "mediumLow")] MediumLow, #[serde(rename = "low")] Low, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum InternetSiteSecurityLevel { #[serde(rename = "high")] High, #[serde(rename = "mediumHigh")] MediumHigh, #[serde(rename = "medium")] Medium, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsUpdateType { #[serde(rename = "windowsInsiderBuildRelease")] WindowsInsiderBuildRelease, #[serde(rename = "windowsInsiderBuildSlow")] WindowsInsiderBuildSlow, #[serde(rename = "windowsInsiderBuildFast")] WindowsInsiderBuildFast, #[serde(rename = "businessReadyOnly")] BusinessReadyOnly, #[serde(rename = "all")] All, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SharedPCAllowedAccountType { #[serde(rename = "domain")] Domain, #[serde(rename = "guest")] Guest, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SharedPCAccountDeletionPolicyType { #[serde(rename = "diskSpaceThresholdOrInactiveThreshold")] DiskSpaceThresholdOrInactiveThreshold, #[serde(rename = "diskSpaceThreshold")] DiskSpaceThreshold, #[serde(rename = "immediate")] Immediate, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsDeliveryOptimizationMode { #[serde(rename = "bypassMode")] BypassMode, #[serde(rename = "simpleDownload")] SimpleDownload, #[serde(rename = "httpWithInternetPeering")] HttpWithInternetPeering, #[serde(rename = "httpWithPeeringPrivateGroup")] HttpWithPeeringPrivateGroup, #[serde(rename = "httpWithPeeringNat")] HttpWithPeeringNat, #[serde(rename = "httpOnly")] HttpOnly, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EditionUpgradeLicenseType { #[serde(rename = "licenseFile")] LicenseFile, #[serde(rename = "productKey")] ProductKey, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum PrereleaseFeatures { #[serde(rename = "notAllowed")] NotAllowed, #[serde(rename = "settingsAndExperimentations")] SettingsAndExperimentations, #[serde(rename = "settingsOnly")] SettingsOnly, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EdgeSearchEngineType { #[serde(rename = "bing")] Bing, #[serde(rename = "default")] Default, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SafeSearchFilterType { #[serde(rename = "moderate")] Moderate, #[serde(rename = "strict")] Strict, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AutomaticUpdateMode { #[serde(rename = "autoInstallAndRebootWithoutEndUserControl")] AutoInstallAndRebootWithoutEndUserControl, #[serde(rename = "autoInstallAndRebootAtScheduledTime")] AutoInstallAndRebootAtScheduledTime, #[serde(rename = "autoInstallAndRebootAtMaintenanceTime")] AutoInstallAndRebootAtMaintenanceTime, #[serde(rename = "autoInstallAtMaintenanceTime")] AutoInstallAtMaintenanceTime, #[serde(rename = "notifyDownload")] NotifyDownload, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsSpotlightEnablementSettings { #[serde(rename = "enabled")] Enabled, #[serde(rename = "disabled")] Disabled, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsStartMenuModeType { #[serde(rename = "nonFullScreen")] NonFullScreen, #[serde(rename = "fullScreen")] FullScreen, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsStartMenuAppListVisibilityType { #[serde(rename = "disableSettingsApp")] DisableSettingsApp, #[serde(rename = "remove")] Remove, #[serde(rename = "collapse")] Collapse, #[serde(rename = "userDefined")] UserDefined, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DefenderCloudBlockLevelType { #[serde(rename = "zeroTolerance")] ZeroTolerance, #[serde(rename = "highPlus")] HighPlus, #[serde(rename = "high")] High, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DefenderScanType { #[serde(rename = "full")] Full, #[serde(rename = "quick")] Quick, #[serde(rename = "disabled")] Disabled, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DefenderPromptForSampleSubmission { #[serde(rename = "sendAllDataWithoutPrompting")] SendAllDataWithoutPrompting, #[serde(rename = "neverSendData")] NeverSendData, #[serde(rename = "promptBeforeSendingPersonalData")] PromptBeforeSendingPersonalData, #[serde(rename = "alwaysPrompt")] AlwaysPrompt, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DefenderMonitorFileActivity { #[serde(rename = "monitorOutgoingFilesOnly")] MonitorOutgoingFilesOnly, #[serde(rename = "monitorIncomingFilesOnly")] MonitorIncomingFilesOnly, #[serde(rename = "monitorAllFiles")] MonitorAllFiles, #[serde(rename = "disable")] Disable, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WeeklySchedule { #[serde(rename = "saturday")] Saturday, #[serde(rename = "friday")] Friday, #[serde(rename = "thursday")] Thursday, #[serde(rename = "wednesday")] Wednesday, #[serde(rename = "tuesday")] Tuesday, #[serde(rename = "monday")] Monday, #[serde(rename = "sunday")] Sunday, #[serde(rename = "everyday")] Everyday, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DefenderThreatAction { #[serde(rename = "block")] Block, #[serde(rename = "userDefined")] UserDefined, #[serde(rename = "allow")] Allow, #[serde(rename = "remove")] Remove, #[serde(rename = "quarantine")] Quarantine, #[serde(rename = "clean")] Clean, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum VisibilitySetting { #[serde(rename = "show")] Show, #[serde(rename = "hide")] Hide, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EdgeCookiePolicy { #[serde(rename = "blockAll")] BlockAll, #[serde(rename = "blockThirdParty")] BlockThirdParty, #[serde(rename = "allow")] Allow, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DiagnosticDataSubmissionMode { #[serde(rename = "full")] Full, #[serde(rename = "enhanced")] Enhanced, #[serde(rename = "basic")] Basic, #[serde(rename = "none")] None, #[serde(rename = "userDefined")] UserDefined, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum BitLockerEncryptionMethod { #[serde(rename = "xtsAes256")] XtsAes256, #[serde(rename = "xtsAes128")] XtsAes128, #[serde(rename = "aesCbc256")] AesCbc256, #[serde(rename = "aesCbc128")] AesCbc128, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ApplicationGuardBlockClipboardSharingType { #[serde(rename = "blockNone")] BlockNone, #[serde(rename = "blockContainerToHost")] BlockContainerToHost, #[serde(rename = "blockHostToContainer")] BlockHostToContainer, #[serde(rename = "blockBoth")] BlockBoth, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ApplicationGuardBlockFileTransferType { #[serde(rename = "blockTextFile")] BlockTextFile, #[serde(rename = "blockNone")] BlockNone, #[serde(rename = "blockImageFile")] BlockImageFile, #[serde(rename = "blockImageAndTextFile")] BlockImageAndTextFile, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AppLockerApplicationControlType { #[serde(rename = "auditComponentsStoreAppsAndSmartlocker")] AuditComponentsStoreAppsAndSmartlocker, #[serde(rename = "enforceComponentsStoreAppsAndSmartlocker")] EnforceComponentsStoreAppsAndSmartlocker, #[serde(rename = "auditComponentsAndStoreApps")] AuditComponentsAndStoreApps, #[serde(rename = "enforceComponentsAndStoreApps")] EnforceComponentsAndStoreApps, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum FirewallPacketQueueingMethodType { #[serde(rename = "queueBoth")] QueueBoth, #[serde(rename = "queueOutbound")] QueueOutbound, #[serde(rename = "queueInbound")] QueueInbound, #[serde(rename = "disabled")] Disabled, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum FirewallCertificateRevocationListCheckMethodType { #[serde(rename = "require")] Require, #[serde(rename = "attempt")] Attempt, #[serde(rename = "none")] None, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum FirewallPreSharedKeyEncodingMethodType { #[serde(rename = "utF8")] UtF8, #[serde(rename = "none")] None, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum StateManagementSetting { #[serde(rename = "allowed")] Allowed, #[serde(rename = "blocked")] Blocked, #[serde(rename = "notConfigured")] NotConfigured, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum IosNotificationAlertType { #[serde(rename = "none")] None, #[serde(rename = "modal")] Modal, #[serde(rename = "banner")] Banner, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RequiredPasswordType { #[serde(rename = "numeric")] Numeric, #[serde(rename = "alphanumeric")] Alphanumeric, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingAppsType { #[serde(rename = "agesAbove17")] AgesAbove17, #[serde(rename = "agesAbove12")] AgesAbove12, #[serde(rename = "agesAbove9")] AgesAbove9, #[serde(rename = "agesAbove4")] AgesAbove4, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingUnitedStatesTelevisionType { #[serde(rename = "adults")] Adults, #[serde(rename = "childrenAbove14")] ChildrenAbove14, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "childrenAbove7")] ChildrenAbove7, #[serde(rename = "childrenAll")] ChildrenAll, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingUnitedStatesMoviesType { #[serde(rename = "adults")] Adults, #[serde(rename = "restricted")] Restricted, #[serde(rename = "parentalGuidance13")] ParentalGuidance13, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingUnitedKingdomTelevisionType { #[serde(rename = "caution")] Caution, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingUnitedKingdomMoviesType { #[serde(rename = "adults")] Adults, #[serde(rename = "agesAbove15")] AgesAbove15, #[serde(rename = "agesAbove12Cinema")] AgesAbove12Cinema, #[serde(rename = "agesAbove12Video")] AgesAbove12Video, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "universalChildren")] UniversalChildren, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingNewZealandTelevisionType { #[serde(rename = "adults")] Adults, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingNewZealandMoviesType { #[serde(rename = "agesAbove16Restricted")] AgesAbove16Restricted, #[serde(rename = "restricted")] Restricted, #[serde(rename = "agesAbove18")] AgesAbove18, #[serde(rename = "agesAbove16")] AgesAbove16, #[serde(rename = "agesAbove15")] AgesAbove15, #[serde(rename = "agesAbove13")] AgesAbove13, #[serde(rename = "mature")] Mature, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingJapanTelevisionType { #[serde(rename = "explicitAllowed")] ExplicitAllowed, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingJapanMoviesType { #[serde(rename = "agesAbove18")] AgesAbove18, #[serde(rename = "agesAbove15")] AgesAbove15, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingIrelandTelevisionType { #[serde(rename = "mature")] Mature, #[serde(rename = "parentalSupervision")] ParentalSupervision, #[serde(rename = "youngAdults")] YoungAdults, #[serde(rename = "children")] Children, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingIrelandMoviesType { #[serde(rename = "adults")] Adults, #[serde(rename = "agesAbove16")] AgesAbove16, #[serde(rename = "agesAbove15")] AgesAbove15, #[serde(rename = "agesAbove12")] AgesAbove12, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingGermanyTelevisionType { #[serde(rename = "adults")] Adults, #[serde(rename = "agesAbove16")] AgesAbove16, #[serde(rename = "agesAbove12")] AgesAbove12, #[serde(rename = "agesAbove6")] AgesAbove6, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingGermanyMoviesType { #[serde(rename = "adults")] Adults, #[serde(rename = "agesAbove16")] AgesAbove16, #[serde(rename = "agesAbove12")] AgesAbove12, #[serde(rename = "agesAbove6")] AgesAbove6, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingFranceTelevisionType { #[serde(rename = "agesAbove18")] AgesAbove18, #[serde(rename = "agesAbove16")] AgesAbove16, #[serde(rename = "agesAbove12")] AgesAbove12, #[serde(rename = "agesAbove10")] AgesAbove10, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingFranceMoviesType { #[serde(rename = "agesAbove18")] AgesAbove18, #[serde(rename = "agesAbove16")] AgesAbove16, #[serde(rename = "agesAbove12")] AgesAbove12, #[serde(rename = "agesAbove10")] AgesAbove10, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingCanadaTelevisionType { #[serde(rename = "agesAbove18")] AgesAbove18, #[serde(rename = "agesAbove14")] AgesAbove14, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "childrenAbove8")] ChildrenAbove8, #[serde(rename = "children")] Children, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingCanadaMoviesType { #[serde(rename = "restricted")] Restricted, #[serde(rename = "agesAbove18")] AgesAbove18, #[serde(rename = "agesAbove14")] AgesAbove14, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingAustraliaTelevisionType { #[serde(rename = "agesAbove15AdultViolence")] AgesAbove15AdultViolence, #[serde(rename = "agesAbove15")] AgesAbove15, #[serde(rename = "mature")] Mature, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "children")] Children, #[serde(rename = "preschoolers")] Preschoolers, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RatingAustraliaMoviesType { #[serde(rename = "agesAbove18")] AgesAbove18, #[serde(rename = "agesAbove15")] AgesAbove15, #[serde(rename = "mature")] Mature, #[serde(rename = "parentalGuidance")] ParentalGuidance, #[serde(rename = "general")] General, #[serde(rename = "allBlocked")] AllBlocked, #[serde(rename = "allAllowed")] AllAllowed, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AndroidWorkProfileDefaultAppPermissionPolicyType { #[serde(rename = "autoDeny")] AutoDeny, #[serde(rename = "autoGrant")] AutoGrant, #[serde(rename = "prompt")] Prompt, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AndroidWorkProfileCrossProfileDataSharingType { #[serde(rename = "noRestrictions")] NoRestrictions, #[serde(rename = "allowPersonalToWork")] AllowPersonalToWork, #[serde(rename = "preventAny")] PreventAny, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AndroidWorkProfileRequiredPasswordType { #[serde(rename = "alphanumericWithSymbols")] AlphanumericWithSymbols, #[serde(rename = "atLeastAlphanumeric")] AtLeastAlphanumeric, #[serde(rename = "atLeastAlphabetic")] AtLeastAlphabetic, #[serde(rename = "numericComplex")] NumericComplex, #[serde(rename = "atLeastNumeric")] AtLeastNumeric, #[serde(rename = "required")] Required, #[serde(rename = "lowSecurityBiometric")] LowSecurityBiometric, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WebBrowserCookieSettings { #[serde(rename = "allowAlways")] AllowAlways, #[serde(rename = "allowFromWebsitesVisited")] AllowFromWebsitesVisited, #[serde(rename = "allowCurrentWebSite")] AllowCurrentWebSite, #[serde(rename = "blockAlways")] BlockAlways, #[serde(rename = "browserDefault")] BrowserDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AndroidRequiredPasswordType { #[serde(rename = "any")] Any, #[serde(rename = "numericComplex")] NumericComplex, #[serde(rename = "numeric")] Numeric, #[serde(rename = "lowSecurityBiometric")] LowSecurityBiometric, #[serde(rename = "alphanumericWithSymbols")] AlphanumericWithSymbols, #[serde(rename = "alphanumeric")] Alphanumeric, #[serde(rename = "alphabetic")] Alphabetic, #[serde(rename = "deviceDefault")] DeviceDefault, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AppListType { #[serde(rename = "appsNotInListCompliant")] AppsNotInListCompliant, #[serde(rename = "appsInListCompliant")] AppsInListCompliant, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Windows10EditionType { #[serde(rename = "windows10ProfessionalWorkstationN")] Windows10ProfessionalWorkstationN, #[serde(rename = "windows10ProfessionalWorkstation")] Windows10ProfessionalWorkstation, #[serde(rename = "windows10ProfessionalEducationN")] Windows10ProfessionalEducationN, #[serde(rename = "windows10ProfessionalEducation")] Windows10ProfessionalEducation, #[serde(rename = "windows10ProfessionalN")] Windows10ProfessionalN, #[serde(rename = "windows10Professional")] Windows10Professional, #[serde(rename = "windows10HolographicEnterprise")] Windows10HolographicEnterprise, #[serde(rename = "windows10MobileEnterprise")] Windows10MobileEnterprise, #[serde(rename = "windows10EducationN")] Windows10EducationN, #[serde(rename = "windows10Education")] Windows10Education, #[serde(rename = "windows10EnterpriseN")] Windows10EnterpriseN, #[serde(rename = "windows10Enterprise")] Windows10Enterprise, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementSubscriptionState { #[serde(rename = "lockedOut")] LockedOut, #[serde(rename = "blocked")] Blocked, #[serde(rename = "deleted")] Deleted, #[serde(rename = "disabled")] Disabled, #[serde(rename = "warning")] Warning, #[serde(rename = "active")] Active, #[serde(rename = "pending")] Pending, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedDevicePartnerReportedHealthState { #[serde(rename = "misconfigured")] Misconfigured, #[serde(rename = "compromised")] Compromised, #[serde(rename = "unresponsive")] Unresponsive, #[serde(rename = "highSeverity")] HighSeverity, #[serde(rename = "mediumSeverity")] MediumSeverity, #[serde(rename = "lowSeverity")] LowSeverity, #[serde(rename = "secured")] Secured, #[serde(rename = "deactivated")] Deactivated, #[serde(rename = "activated")] Activated, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementExchangeAccessStateReason { #[serde(rename = "deviceNotKnownWithManagedApp")] DeviceNotKnownWithManagedApp, #[serde(rename = "compromisedPassword")] CompromisedPassword, #[serde(rename = "azureADBlockDueToAccessPolicy")] AzureADBlockDueToAccessPolicy, #[serde(rename = "mfaRequired")] MfaRequired, #[serde(rename = "unknownLocation")] UnknownLocation, #[serde(rename = "notEnrolled")] NotEnrolled, #[serde(rename = "notCompliant")] NotCompliant, #[serde(rename = "compliant")] Compliant, #[serde(rename = "other")] Other, #[serde(rename = "exchangeMailboxPolicy")] ExchangeMailboxPolicy, #[serde(rename = "exchangeUpgrade")] ExchangeUpgrade, #[serde(rename = "exchangeDeviceRule")] ExchangeDeviceRule, #[serde(rename = "exchangeIndividualRule")] ExchangeIndividualRule, #[serde(rename = "exchangeGlobalRule")] ExchangeGlobalRule, #[serde(rename = "unknown")] Unknown, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceManagementExchangeAccessState { #[serde(rename = "quarantined")] Quarantined, #[serde(rename = "blocked")] Blocked, #[serde(rename = "allowed")] Allowed, #[serde(rename = "unknown")] Unknown, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceRegistrationState { #[serde(rename = "unknown")] Unknown, #[serde(rename = "notRegisteredPendingEnrollment")] NotRegisteredPendingEnrollment, #[serde(rename = "certificateReset")] CertificateReset, #[serde(rename = "approvalPending")] ApprovalPending, #[serde(rename = "keyConflict")] KeyConflict, #[serde(rename = "revoked")] Revoked, #[serde(rename = "registered")] Registered, #[serde(rename = "notRegistered")] NotRegistered, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DeviceEnrollmentType { #[serde(rename = "windowsCoManagement")] WindowsCoManagement, #[serde(rename = "windowsBulkAzureDomainJoin")] WindowsBulkAzureDomainJoin, #[serde(rename = "windowsAutoEnrollment")] WindowsAutoEnrollment, #[serde(rename = "windowsBulkUserless")] WindowsBulkUserless, #[serde(rename = "windowsAzureADJoin")] WindowsAzureADJoin, #[serde(rename = "appleBulkWithoutUser")] AppleBulkWithoutUser, #[serde(rename = "appleBulkWithUser")] AppleBulkWithUser, #[serde(rename = "deviceEnrollmentManager")] DeviceEnrollmentManager, #[serde(rename = "userEnrollment")] UserEnrollment, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagementAgentType { #[serde(rename = "googleCloudDevicePolicyController")] GoogleCloudDevicePolicyController, #[serde(rename = "jamf")] Jamf, #[serde(rename = "unknown")] Unknown, #[serde(rename = "configurationManagerClientMdmEas")] ConfigurationManagerClientMdmEas, #[serde(rename = "configurationManagerClientMdm")] ConfigurationManagerClientMdm, #[serde(rename = "configurationManagerClient")] ConfigurationManagerClient, #[serde(rename = "easIntuneClient")] EasIntuneClient, #[serde(rename = "intuneClient")] IntuneClient, #[serde(rename = "easMdm")] EasMdm, #[serde(rename = "mdm")] Mdm, #[serde(rename = "eas")] Eas, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ComplianceState { #[serde(rename = "configManager")] ConfigManager, #[serde(rename = "inGracePeriod")] InGracePeriod, #[serde(rename = "error")] Error, #[serde(rename = "conflict")] Conflict, #[serde(rename = "noncompliant")] Noncompliant, #[serde(rename = "compliant")] Compliant, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedDeviceOwnerType { #[serde(rename = "personal")] Personal, #[serde(rename = "company")] Company, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ActionState { #[serde(rename = "notSupported")] NotSupported, #[serde(rename = "failed")] Failed, #[serde(rename = "done")] Done, #[serde(rename = "active")] Active, #[serde(rename = "canceled")] Canceled, #[serde(rename = "pending")] Pending, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MdmAppConfigKeyType { #[serde(rename = "tokenType")] TokenType, #[serde(rename = "booleanType")] BooleanType, #[serde(rename = "realType")] RealType, #[serde(rename = "integerType")] IntegerType, #[serde(rename = "stringType")] StringType, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ComplianceStatus { #[serde(rename = "notAssigned")] NotAssigned, #[serde(rename = "conflict")] Conflict, #[serde(rename = "error")] Error, #[serde(rename = "nonCompliant")] NonCompliant, #[serde(rename = "remediated")] Remediated, #[serde(rename = "compliant")] Compliant, #[serde(rename = "notApplicable")] NotApplicable, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum VppTokenAccountType { #[serde(rename = "education")] Education, #[serde(rename = "business")] Business, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MicrosoftStoreForBusinessLicenseType { #[serde(rename = "online")] Online, #[serde(rename = "offline")] Offline, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsDeviceType { #[serde(rename = "team")] Team, #[serde(rename = "holographic")] Holographic, #[serde(rename = "mobile")] Mobile, #[serde(rename = "desktop")] Desktop, #[serde(rename = "none")] None, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MobileAppContentFileUploadState { #[serde(rename = "commitFileTimedOut")] CommitFileTimedOut, #[serde(rename = "commitFileFailed")] CommitFileFailed, #[serde(rename = "commitFilePending")] CommitFilePending, #[serde(rename = "commitFileSuccess")] CommitFileSuccess, #[serde(rename = "azureStorageUriRenewalTimedOut")] AzureStorageUriRenewalTimedOut, #[serde(rename = "azureStorageUriRenewalFailed")] AzureStorageUriRenewalFailed, #[serde(rename = "azureStorageUriRenewalPending")] AzureStorageUriRenewalPending, #[serde(rename = "azureStorageUriRenewalSuccess")] AzureStorageUriRenewalSuccess, #[serde(rename = "azureStorageUriRequestTimedOut")] AzureStorageUriRequestTimedOut, #[serde(rename = "azureStorageUriRequestFailed")] AzureStorageUriRequestFailed, #[serde(rename = "azureStorageUriRequestPending")] AzureStorageUriRequestPending, #[serde(rename = "azureStorageUriRequestSuccess")] AzureStorageUriRequestSuccess, #[serde(rename = "unknown")] Unknown, #[serde(rename = "error")] Error, #[serde(rename = "transientError")] TransientError, #[serde(rename = "success")] Success, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ManagedAppAvailability { #[serde(rename = "lineOfBusiness")] LineOfBusiness, #[serde(rename = "global")] Global, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WindowsArchitecture { #[serde(rename = "neutral")] Neutral, #[serde(rename = "arm")] Arm, #[serde(rename = "x64")] X64, #[serde(rename = "x86")] X86, #[serde(rename = "none")] None, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MobileAppPublishingState { #[serde(rename = "published")] Published, #[serde(rename = "processing")] Processing, #[serde(rename = "notPublished")] NotPublished, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum InstallIntent { #[serde(rename = "availableWithoutEnrollment")] AvailableWithoutEnrollment, #[serde(rename = "uninstall")] Uninstall, #[serde(rename = "required")] Required, #[serde(rename = "available")] Available, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EducationGender { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "other")] Other, #[serde(rename = "male")] Male, #[serde(rename = "female")] Female, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EducationExternalSource { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "manual")] Manual, #[serde(rename = "sis")] Sis, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EducationUserRole { #[serde(rename = "unknownFutureValue")] UnknownFutureValue, #[serde(rename = "none")] None, #[serde(rename = "teacher")] Teacher, #[serde(rename = "student")] Student, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum OnenoteUserRole { #[serde(rename = "None")] None, #[serde(rename = "Reader")] Reader, #[serde(rename = "Contributor")] Contributor, #[serde(rename = "Owner")] Owner, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum OnenoteSourceService { #[serde(rename = "OnPremOneDriveForBusiness")] OnPremOneDriveForBusiness, #[serde(rename = "OneDriveForBusiness")] OneDriveForBusiness, #[serde(rename = "OneDrive")] OneDrive, #[serde(rename = "Unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum OnenotePatchActionType { #[serde(rename = "Prepend")] Prepend, #[serde(rename = "Insert")] Insert, #[serde(rename = "Delete")] Delete, #[serde(rename = "Append")] Append, #[serde(rename = "Replace")] Replace, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum OnenotePatchInsertPosition { #[serde(rename = "Before")] Before, #[serde(rename = "After")] After, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum OperationStatus { #[serde(rename = "Failed")] Failed, #[serde(rename = "Completed")] Completed, #[serde(rename = "Running")] Running, #[serde(rename = "NotStarted")] NotStarted, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum PlannerPreviewType { #[serde(rename = "reference")] Reference, #[serde(rename = "description")] Description, #[serde(rename = "checklist")] Checklist, #[serde(rename = "noPreview")] NoPreview, #[serde(rename = "automatic")] Automatic, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum PhysicalAddressType { #[serde(rename = "other")] Other, #[serde(rename = "business")] Business, #[serde(rename = "home")] Home, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ActivityDomain { #[serde(rename = "unrestricted")] Unrestricted, #[serde(rename = "personal")] Personal, #[serde(rename = "work")] Work, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WebsiteType { #[serde(rename = "profile")] Profile, #[serde(rename = "blog")] Blog, #[serde(rename = "work")] Work, #[serde(rename = "home")] Home, #[serde(rename = "other")] Other, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum PhoneType { #[serde(rename = "radio")] Radio, #[serde(rename = "pager")] Pager, #[serde(rename = "otherFax")] OtherFax, #[serde(rename = "businessFax")] BusinessFax, #[serde(rename = "homeFax")] HomeFax, #[serde(rename = "assistant")] Assistant, #[serde(rename = "other")] Other, #[serde(rename = "mobile")] Mobile, #[serde(rename = "business")] Business, #[serde(rename = "home")] Home, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SelectionLikelihoodInfo { #[serde(rename = "high")] High, #[serde(rename = "notSpecified")] NotSpecified, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum CategoryColor { #[serde(rename = "none")] None, #[serde(rename = "preset24")] Preset24, #[serde(rename = "preset23")] Preset23, #[serde(rename = "preset22")] Preset22, #[serde(rename = "preset21")] Preset21, #[serde(rename = "preset20")] Preset20, #[serde(rename = "preset19")] Preset19, #[serde(rename = "preset18")] Preset18, #[serde(rename = "preset17")] Preset17, #[serde(rename = "preset16")] Preset16, #[serde(rename = "preset15")] Preset15, #[serde(rename = "preset14")] Preset14, #[serde(rename = "preset13")] Preset13, #[serde(rename = "preset12")] Preset12, #[serde(rename = "preset11")] Preset11, #[serde(rename = "preset10")] Preset10, #[serde(rename = "preset9")] Preset9, #[serde(rename = "preset8")] Preset8, #[serde(rename = "preset7")] Preset7, #[serde(rename = "preset6")] Preset6, #[serde(rename = "preset5")] Preset5, #[serde(rename = "preset4")] Preset4, #[serde(rename = "preset3")] Preset3, #[serde(rename = "preset2")] Preset2, #[serde(rename = "preset1")] Preset1, #[serde(rename = "preset0")] Preset0, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MessageActionFlag { #[serde(rename = "review")] Review, #[serde(rename = "replyToAll")] ReplyToAll, #[serde(rename = "reply")] Reply, #[serde(rename = "read")] Read, #[serde(rename = "noResponseNecessary")] NoResponseNecessary, #[serde(rename = "forward")] Forward, #[serde(rename = "fyi")] Fyi, #[serde(rename = "followUp")] FollowUp, #[serde(rename = "doNotForward")] DoNotForward, #[serde(rename = "call")] Call, #[serde(rename = "any")] Any, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MeetingMessageType { #[serde(rename = "meetingDeclined")] MeetingDeclined, #[serde(rename = "meetingTenativelyAccepted")] MeetingTenativelyAccepted, #[serde(rename = "meetingAccepted")] MeetingAccepted, #[serde(rename = "meetingCancelled")] MeetingCancelled, #[serde(rename = "meetingRequest")] MeetingRequest, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum EventType { #[serde(rename = "seriesMaster")] SeriesMaster, #[serde(rename = "exception")] Exception, #[serde(rename = "occurrence")] Occurrence, #[serde(rename = "singleInstance")] SingleInstance, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RecurrenceRangeType { #[serde(rename = "numbered")] Numbered, #[serde(rename = "noEnd")] NoEnd, #[serde(rename = "endDate")] EndDate, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum WeekIndex { #[serde(rename = "last")] Last, #[serde(rename = "fourth")] Fourth, #[serde(rename = "third")] Third, #[serde(rename = "second")] Second, #[serde(rename = "first")] First, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RecurrencePatternType { #[serde(rename = "relativeYearly")] RelativeYearly, #[serde(rename = "absoluteYearly")] AbsoluteYearly, #[serde(rename = "relativeMonthly")] RelativeMonthly, #[serde(rename = "absoluteMonthly")] AbsoluteMonthly, #[serde(rename = "weekly")] Weekly, #[serde(rename = "daily")] Daily, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Sensitivity { #[serde(rename = "confidential")] Confidential, #[serde(rename = "private")] Private, #[serde(rename = "personal")] Personal, #[serde(rename = "normal")] Normal, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ResponseType { #[serde(rename = "notResponded")] NotResponded, #[serde(rename = "declined")] Declined, #[serde(rename = "accepted")] Accepted, #[serde(rename = "tentativelyAccepted")] TentativelyAccepted, #[serde(rename = "organizer")] Organizer, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum CalendarColor { #[serde(rename = "auto")] Auto, #[serde(rename = "maxColor")] MaxColor, #[serde(rename = "lightRed")] LightRed, #[serde(rename = "lightBrown")] LightBrown, #[serde(rename = "lightPink")] LightPink, #[serde(rename = "lightTeal")] LightTeal, #[serde(rename = "lightYellow")] LightYellow, #[serde(rename = "lightGray")] LightGray, #[serde(rename = "lightOrange")] LightOrange, #[serde(rename = "lightGreen")] LightGreen, #[serde(rename = "lightBlue")] LightBlue, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum FollowupFlagStatus { #[serde(rename = "flagged")] Flagged, #[serde(rename = "complete")] Complete, #[serde(rename = "notFlagged")] NotFlagged, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum InferenceClassificationType { #[serde(rename = "other")] Other, #[serde(rename = "focused")] Focused, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Importance { #[serde(rename = "high")] High, #[serde(rename = "normal")] Normal, #[serde(rename = "low")] Low, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum BodyType { #[serde(rename = "html")] Html, #[serde(rename = "text")] Text, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum TimeZoneStandard { #[serde(rename = "iana")] Iana, #[serde(rename = "windows")] Windows, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum RecipientScopeType { #[serde(rename = "externalNonPartner")] ExternalNonPartner, #[serde(rename = "externalPartner")] ExternalPartner, #[serde(rename = "external")] External, #[serde(rename = "internal")] Internal, #[serde(rename = "none")] None, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum MailTipsType { #[serde(rename = "recipientSuggestions")] RecipientSuggestions, #[serde(rename = "recipientScope")] RecipientScope, #[serde(rename = "moderationStatus")] ModerationStatus, #[serde(rename = "deliveryRestriction")] DeliveryRestriction, #[serde(rename = "maxMessageSize")] MaxMessageSize, #[serde(rename = "totalMemberCount")] TotalMemberCount, #[serde(rename = "externalMemberCount")] ExternalMemberCount, #[serde(rename = "customMailTip")] CustomMailTip, #[serde(rename = "mailboxFullStatus")] MailboxFullStatus, #[serde(rename = "automaticReplies")] AutomaticReplies, #[serde(rename = "true")] True, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum LocationUniqueIdType { #[serde(rename = "bing")] Bing, #[serde(rename = "private")] Private, #[serde(rename = "directory")] Directory, #[serde(rename = "locationStore")] LocationStore, #[serde(rename = "unknown")] Unknown, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum LocationType { #[serde(rename = "postalAddress")] PostalAddress, #[serde(rename = "localBusiness")] LocalBusiness, #[serde(rename = "restaurant")] Restaurant, #[serde(rename = "hotel")] Hotel, #[serde(rename = "streetAddress")] StreetAddress, #[serde(rename = "geoCoordinates")] GeoCoordinates, #[serde(rename = "businessAddress")] BusinessAddress, #[serde(rename = "homeAddress")] HomeAddress, #[serde(rename = "conferenceRoom")] ConferenceRoom, #[serde(rename = "default")] Default, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum FreeBusyStatus { #[serde(rename = "unknown")] Unknown, #[serde(rename = "workingElsewhere")] WorkingElsewhere, #[serde(rename = "oof")] Oof, #[serde(rename = "busy")] Busy, #[serde(rename = "tentative")] Tentative, #[serde(rename = "free")] Free, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AttendeeType { #[serde(rename = "resource")] Resource, #[serde(rename = "optional")] Optional, #[serde(rename = "required")] Required, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum ExternalAudienceScope { #[serde(rename = "all")] All, #[serde(rename = "contactsOnly")] ContactsOnly, #[serde(rename = "none")] None, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum AutomaticRepliesStatus { #[serde(rename = "scheduled")] Scheduled, #[serde(rename = "alwaysEnabled")] AlwaysEnabled, #[serde(rename = "disabled")] Disabled, } #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum DayOfWeek { #[serde(rename = "saturday")] Saturday, #[serde(rename = "friday")] Friday, #[serde(rename = "thursday")] Thursday, #[serde(rename = "wednesday")] Wednesday, #[serde(rename = "tuesday")] Tuesday, #[serde(rename = "monday")] Monday, #[serde(rename = "sunday")] Sunday, }
/// An enum to represent all characters in the Lycian block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum Lycian { /// \u{10280}: '𐊀' LetterA, /// \u{10281}: '𐊁' LetterE, /// \u{10282}: '𐊂' LetterB, /// \u{10283}: '𐊃' LetterBh, /// \u{10284}: '𐊄' LetterG, /// \u{10285}: '𐊅' LetterD, /// \u{10286}: '𐊆' LetterI, /// \u{10287}: '𐊇' LetterW, /// \u{10288}: '𐊈' LetterZ, /// \u{10289}: '𐊉' LetterTh, /// \u{1028a}: '𐊊' LetterJ, /// \u{1028b}: '𐊋' LetterK, /// \u{1028c}: '𐊌' LetterQ, /// \u{1028d}: '𐊍' LetterL, /// \u{1028e}: '𐊎' LetterM, /// \u{1028f}: '𐊏' LetterN, /// \u{10290}: '𐊐' LetterMm, /// \u{10291}: '𐊑' LetterNn, /// \u{10292}: '𐊒' LetterU, /// \u{10293}: '𐊓' LetterP, /// \u{10294}: '𐊔' LetterKk, /// \u{10295}: '𐊕' LetterR, /// \u{10296}: '𐊖' LetterS, /// \u{10297}: '𐊗' LetterT, /// \u{10298}: '𐊘' LetterTt, /// \u{10299}: '𐊙' LetterAn, /// \u{1029a}: '𐊚' LetterEn, /// \u{1029b}: '𐊛' LetterH, /// \u{1029c}: '𐊜' LetterX, } impl Into<char> for Lycian { fn into(self) -> char { match self { Lycian::LetterA => '𐊀', Lycian::LetterE => '𐊁', Lycian::LetterB => '𐊂', Lycian::LetterBh => '𐊃', Lycian::LetterG => '𐊄', Lycian::LetterD => '𐊅', Lycian::LetterI => '𐊆', Lycian::LetterW => '𐊇', Lycian::LetterZ => '𐊈', Lycian::LetterTh => '𐊉', Lycian::LetterJ => '𐊊', Lycian::LetterK => '𐊋', Lycian::LetterQ => '𐊌', Lycian::LetterL => '𐊍', Lycian::LetterM => '𐊎', Lycian::LetterN => '𐊏', Lycian::LetterMm => '𐊐', Lycian::LetterNn => '𐊑', Lycian::LetterU => '𐊒', Lycian::LetterP => '𐊓', Lycian::LetterKk => '𐊔', Lycian::LetterR => '𐊕', Lycian::LetterS => '𐊖', Lycian::LetterT => '𐊗', Lycian::LetterTt => '𐊘', Lycian::LetterAn => '𐊙', Lycian::LetterEn => '𐊚', Lycian::LetterH => '𐊛', Lycian::LetterX => '𐊜', } } } impl std::convert::TryFrom<char> for Lycian { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '𐊀' => Ok(Lycian::LetterA), '𐊁' => Ok(Lycian::LetterE), '𐊂' => Ok(Lycian::LetterB), '𐊃' => Ok(Lycian::LetterBh), '𐊄' => Ok(Lycian::LetterG), '𐊅' => Ok(Lycian::LetterD), '𐊆' => Ok(Lycian::LetterI), '𐊇' => Ok(Lycian::LetterW), '𐊈' => Ok(Lycian::LetterZ), '𐊉' => Ok(Lycian::LetterTh), '𐊊' => Ok(Lycian::LetterJ), '𐊋' => Ok(Lycian::LetterK), '𐊌' => Ok(Lycian::LetterQ), '𐊍' => Ok(Lycian::LetterL), '𐊎' => Ok(Lycian::LetterM), '𐊏' => Ok(Lycian::LetterN), '𐊐' => Ok(Lycian::LetterMm), '𐊑' => Ok(Lycian::LetterNn), '𐊒' => Ok(Lycian::LetterU), '𐊓' => Ok(Lycian::LetterP), '𐊔' => Ok(Lycian::LetterKk), '𐊕' => Ok(Lycian::LetterR), '𐊖' => Ok(Lycian::LetterS), '𐊗' => Ok(Lycian::LetterT), '𐊘' => Ok(Lycian::LetterTt), '𐊙' => Ok(Lycian::LetterAn), '𐊚' => Ok(Lycian::LetterEn), '𐊛' => Ok(Lycian::LetterH), '𐊜' => Ok(Lycian::LetterX), _ => Err(()), } } } impl Into<u32> for Lycian { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for Lycian { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for Lycian { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl Lycian { /// The character with the lowest index in this unicode block pub fn new() -> Self { Lycian::LetterA } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("Lycian{:#?}", self); string_morph::to_sentence_case(&s) } }
#[macro_export] macro_rules! observable { (struct $name:ident { $($field:ident : $t:ty = $e:expr $(,)*)* }) => { struct $name { $( $field : $t, )* has_changed: bool, } impl $name { pub fn new() -> $name { $name { $( $field : $e, )* has_changed: true, } } $( interpolate_idents! { #[allow(dead_code)] fn [get_ $field](&self) -> &$t { &self.$field } #[allow(dead_code)] fn [get_ $field _mut](&mut self) -> &mut $t { self.has_changed = true; &mut self.$field } #[allow(dead_code)] fn [set_ $field](&mut self, val: $t) { self.$field = val; self.has_changed = true; } } )* } } }
use handlegraph::handle::NodeId; use rustc_hash::FxHashSet; use ash::version::DeviceV1_0; use ash::{vk, Device}; use anyhow::Result; use crate::geometry::Rect; use crate::universe::Node; use crate::vulkan::GfaestusVk; #[derive(Debug, Clone, Default)] pub struct NodeSelection { pub nodes: FxHashSet<NodeId>, } impl NodeSelection { pub fn clear(&mut self) { self.nodes.clear(); } pub fn union(&self, other: &NodeSelection) -> NodeSelection { let nodes = self.nodes.union(&other.nodes).copied().collect(); Self { nodes } } pub fn intersection(&self, other: &NodeSelection) -> NodeSelection { let nodes = self.nodes.intersection(&other.nodes).copied().collect(); Self { nodes } } pub fn difference(&self, other: &NodeSelection) -> NodeSelection { let nodes = self.nodes.difference(&other.nodes).copied().collect(); Self { nodes } } pub fn add_one(&mut self, clear: bool, node: NodeId) { if clear { self.nodes.clear(); } self.nodes.insert(node); } pub fn add_slice(&mut self, clear: bool, nodes: &[NodeId]) { if clear { self.nodes.clear(); } self.nodes.extend(nodes.iter().copied()); } pub fn remove_one(&mut self, clear: bool, node: NodeId) { if clear { self.nodes.clear(); } else { self.nodes.remove(&node); } } pub fn remove_slice(&mut self, clear: bool, nodes: &[NodeId]) { if clear { self.nodes.clear(); } else { for n in nodes { self.nodes.remove(n); } } } pub fn bounding_box(&self, node_positions: &[Node]) -> Rect { let mut bbox = Rect::default(); for id in self.nodes.iter() { let ix = (id.0 - 1) as usize; let node = node_positions[ix]; bbox = bbox.union(Rect::new(node.p0, node.p1)); } bbox } } pub struct SelectionBuffer { latest_selection: FxHashSet<NodeId>, pub buffer: vk::Buffer, memory: vk::DeviceMemory, pub size: vk::DeviceSize, } impl SelectionBuffer { pub fn new(app: &GfaestusVk, node_count: usize) -> Result<Self> { let size = ((node_count * std::mem::size_of::<u32>()) as u32) as vk::DeviceSize; let usage = vk::BufferUsageFlags::TRANSFER_DST | vk::BufferUsageFlags::TRANSFER_SRC | vk::BufferUsageFlags::STORAGE_BUFFER; let mem_props = vk::MemoryPropertyFlags::HOST_VISIBLE | vk::MemoryPropertyFlags::HOST_CACHED | vk::MemoryPropertyFlags::HOST_COHERENT; let (buffer, memory, _size) = app.create_buffer(size, usage, mem_props)?; app.set_debug_object_name(buffer, "Node Selection Flag Buffer")?; let latest_selection = FxHashSet::default(); Ok(Self { latest_selection, buffer, memory, size, }) } pub fn selection_set(&self) -> &FxHashSet<NodeId> { &self.latest_selection } /// fill `latest_selection` by reading from the buffer pub fn fill_selection_set(&mut self, device: &Device) -> Result<()> { let node_count = (self.size / 4) as usize; self.latest_selection.clear(); self.latest_selection.reserve(node_count); unsafe { let data_ptr = device.map_memory( self.memory, 0, self.size, vk::MemoryMapFlags::empty(), )?; let val_ptr = data_ptr as *const u32; let sel_slice = std::slice::from_raw_parts(val_ptr, node_count); self.latest_selection.extend( sel_slice.iter().enumerate().filter_map(|(ix, &val)| { let node_id = NodeId::from((ix + 1) as u64); if val == 1 { Some(node_id) } else { None } }), ); device.unmap_memory(self.memory); } self.latest_selection.shrink_to_fit(); Ok(()) } pub fn destroy(&mut self, device: &Device) { unsafe { device.destroy_buffer(self.buffer, None); device.free_memory(self.memory, None); } self.latest_selection.clear(); self.buffer = vk::Buffer::null(); self.memory = vk::DeviceMemory::null(); self.size = 0 as vk::DeviceSize; } pub fn clear(&mut self) { self.latest_selection.clear(); } pub fn clear_buffer(&mut self, device: &Device) -> Result<()> { unsafe { let data_ptr = device.map_memory( self.memory, 0, self.size, vk::MemoryMapFlags::empty(), )?; let val_ptr = data_ptr as *mut u32; std::ptr::write_bytes(val_ptr, 0u8, (self.size / 4) as usize); device.unmap_memory(self.memory); } Ok(()) } pub fn add_select_one( &mut self, device: &Device, node: NodeId, ) -> Result<()> { if self.latest_selection.insert(node) { unsafe { let data_ptr = device.map_memory( self.memory, 0, self.size, vk::MemoryMapFlags::empty(), )?; let val_ptr = data_ptr as *mut u32; let ix = (node.0 - 1) as usize; if ix >= (self.size / 4) as usize { panic!("attempted to select a node that does not exist"); } let val_ptr = val_ptr.add(ix); // let val_ptr = val_ptr.add(2); val_ptr.write(1); device.unmap_memory(self.memory); } } Ok(()) } pub fn write_latest_buffer(&mut self, device: &Device) -> Result<()> { unsafe { let data_ptr = device.map_memory( self.memory, 0, self.size, vk::MemoryMapFlags::empty(), )?; let val_ptr = data_ptr as *mut u32; for ix in 0..self.size { let node = NodeId::from((ix + 1) as u64); let val_ptr = val_ptr.add(1); if self.latest_selection.contains(&node) { val_ptr.write(1); } else { val_ptr.write(0); } } device.unmap_memory(self.memory); } Ok(()) } pub fn update_selection( &mut self, device: &Device, new_selection: &FxHashSet<NodeId>, ) -> Result<()> { let removed = self.latest_selection.difference(new_selection); let added = new_selection.difference(&self.latest_selection); unsafe { let data_ptr = device.map_memory( self.memory, 0, self.size, vk::MemoryMapFlags::empty(), )?; for &node in removed { let val_ptr = data_ptr as *mut u32; let ix = (node.0 - 1) as usize; if ix >= (self.size / 4) as usize { panic!("attempted to deselect a node that does not exist"); } let val_ptr = val_ptr.add(ix); val_ptr.write(0); } for &node in added { let val_ptr = data_ptr as *mut u32; let ix = (node.0 - 1) as usize; if ix >= (self.size / 4) as usize { panic!("attempted to select a node that does not exist"); } let val_ptr = val_ptr.add(ix); val_ptr.write(1); } device.unmap_memory(self.memory); } self.latest_selection.clone_from(new_selection); Ok(()) } }
/// PayloadUser represents the author or committer of a commit #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct PayloadUser { pub email: Option<String>, /// Full name of the commit author pub name: Option<String>, pub username: Option<String>, } impl PayloadUser { /// Create a builder for this object. #[inline] pub fn builder() -> PayloadUserBuilder { PayloadUserBuilder { body: Default::default(), } } } impl Into<PayloadUser> for PayloadUserBuilder { fn into(self) -> PayloadUser { self.body } } /// Builder for [`PayloadUser`](./struct.PayloadUser.html) object. #[derive(Debug, Clone)] pub struct PayloadUserBuilder { body: self::PayloadUser, } impl PayloadUserBuilder { #[inline] pub fn email(mut self, value: impl Into<String>) -> Self { self.body.email = Some(value.into()); self } /// Full name of the commit author #[inline] pub fn name(mut self, value: impl Into<String>) -> Self { self.body.name = Some(value.into()); self } #[inline] pub fn username(mut self, value: impl Into<String>) -> Self { self.body.username = Some(value.into()); self } }
mod another_submod1_file; pub use another_submod1_file::*; #[derive(Clone)] pub struct MyTupleType(pub u16, pub u8);
use std::collections::{HashSet, HashMap}; pub struct Mrr { } impl Mrr { /// Calculates the Mean Recriprocal Rank of all queries provided. pub fn calc(results: &HashMap<usize, Vec<usize>>, relevance_info: &HashMap<usize, HashSet<usize>>) -> f64 { let mut count = 0.0; let mut sum = 0.0; for (query, docs) in relevance_info.iter() { sum += Mrr::calc_recrip_rank(results.get(query).unwrap(), docs); count += 1.0; } sum / count } /// Given a list of retrieved documents and the set of relevant ones, /// returns the recriprocal of the rank of the first relevant document. pub fn calc_recrip_rank(retrieved_docs: &Vec<usize>, relevant_docs: &HashSet<usize>) -> f64 { let mut rank = 1.0; for doc in retrieved_docs.iter() { if relevant_docs.contains(doc) { return 1.0 / rank; } rank += 1.0; } 0.0 } }
use crate::nasdaq::gen; // use crate::nasdaq::gen::HasRecs; #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct OptionChainRoot { pub data: Data, pub message: ::serde_json::Value, pub status: gen::Status, } impl crate::HasRecs for OptionChainRoot { fn to_recs(&self) -> Vec<Vec<String>> { let mut recs = vec![]; for row in self.data.option_chain_list.rows.iter() { recs.append(&mut row.to_recs()) } return recs; } //pub fn get_id(&self) -> String { // return self.data.option_chain_list.rows[0] // .call // .symbol // .to_string() // .split_whitespace() // .next() // .expect("wtf option ticker") // .to_string(); //} } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct Data { pub total_record: i64, pub last_trade: String, pub option_chain_list: OptionChainList, pub month_filter: ::serde_json::Value, } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct OptionChainList { pub headers: ::serde_json::Value, pub rows: Vec<OptionRow>, } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct OptionRow { pub call: Option<OptionData2>, pub put: Option<OptionData2>, } impl OptionRow { pub fn to_recs(&self) -> Vec<Vec<String>> { let mut recs: Vec<Vec<String>> = vec![]; if let Some(c) = &self.call { let call: Vec<String> = OptionData2::to_rec(&c); recs.push(call); } if let Some(p) = &self.put{ let put: Vec<String> = OptionData2::to_rec(&p); recs.push(put); } return recs; } } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct OptionData2 { pub symbol: String, pub last: String, pub change: String, pub bid: String, pub ask: String, pub volume: String, pub openinterest: String, pub strike: String, pub expiry_date: String, pub colour: bool, } impl OptionData2 { pub fn to_rec(&self) -> Vec<String> { return vec![ self.symbol.to_string(), self.last.to_string().replace("--", ""), self.change.to_string().replace("--", ""), self.bid.to_string().replace("--", ""), self.ask.to_string().replace("--", ""), self.volume.to_string().replace("--", ""), self.openinterest.to_string().replace("--", ""), self.strike.to_string(), self.expiry_date.to_string(), self.colour.to_string(), ]; } } pub const NDAQ_OPTION_HEADER: [&'static str; 10] = [ "symbol", "last", "change", "bid", "ask", "volume", "openinterest", "strike", "expiry_date", "colour", ];
use libmdbx::*; use std::{ borrow::Cow, io::Write, sync::{Arc, Barrier}, thread::{self, JoinHandle}, }; use tempfile::tempdir; type Database = libmdbx::Database<NoWriteMap>; #[test] fn test_put_get_del() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); let data = [(b"key1", b"val1"), (b"key2", b"val2"), (b"key3", b"val3")]; for (k, v) in data { txn.put(&table, k, v, WriteFlags::empty()).unwrap(); } txn.commit().unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); for (k, v) in data { assert_eq!(txn.get(&table, k).unwrap(), Some(*v)); assert_eq!(txn.get(&table, k).unwrap(), Some(*v)); assert_eq!(txn.get(&table, k).unwrap(), Some(*v)); } assert_eq!(txn.get::<()>(&table, b"key").unwrap(), None); txn.del(&table, b"key1", None).unwrap(); assert_eq!(txn.get::<()>(&table, b"key1").unwrap(), None); } #[test] fn test_put_get_del_multi() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.create_table(None, TableFlags::DUP_SORT).unwrap(); for (k, v) in [ (b"key1", b"val1"), (b"key1", b"val2"), (b"key1", b"val3"), (b"key2", b"val1"), (b"key2", b"val2"), (b"key2", b"val3"), (b"key3", b"val1"), (b"key3", b"val2"), (b"key3", b"val3"), ] { txn.put(&table, k, v, WriteFlags::empty()).unwrap(); } txn.commit().unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); { let mut cur = txn.cursor(&table).unwrap(); let iter = cur.iter_dup_of::<(), [u8; 4]>(b"key1"); let vals = iter.map(|x| x.unwrap()).map(|(_, x)| x).collect::<Vec<_>>(); assert_eq!(vals, vec![*b"val1", *b"val2", *b"val3"]); } txn.commit().unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); for (k, v) in [(b"key1", Some(b"val2" as &[u8])), (b"key2", None)] { txn.del(&table, k, v).unwrap(); } txn.commit().unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); { let mut cur = txn.cursor(&table).unwrap(); let iter = cur.iter_dup_of::<(), [u8; 4]>(b"key1"); let vals = iter.map(|x| x.unwrap()).map(|(_, x)| x).collect::<Vec<_>>(); assert_eq!(vals, vec![*b"val1", *b"val3"]); let iter = cur.iter_dup_of::<(), ()>(b"key2"); assert_eq!(0, iter.count()); } txn.commit().unwrap(); } #[test] fn test_put_get_del_empty_key() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.create_table(None, Default::default()).unwrap(); txn.put(&table, b"", b"hello", WriteFlags::empty()).unwrap(); assert_eq!(txn.get(&table, b"").unwrap(), Some(*b"hello")); txn.commit().unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); assert_eq!(txn.get(&table, b"").unwrap(), Some(*b"hello")); txn.put(&table, b"", b"", WriteFlags::empty()).unwrap(); assert_eq!(txn.get(&table, b"").unwrap(), Some(*b"")); } #[test] fn test_reserve() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); { let mut writer = txn .reserve(&table, b"key1", 4, WriteFlags::empty()) .unwrap(); writer.write_all(b"val1").unwrap(); } txn.commit().unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); assert_eq!(txn.get(&table, b"key1").unwrap(), Some(*b"val1")); assert_eq!(txn.get::<()>(&table, b"key").unwrap(), None); txn.del(&table, b"key1", None).unwrap(); assert_eq!(txn.get::<()>(&table, b"key1").unwrap(), None); } #[test] fn test_nested_txn() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); let mut txn = db.begin_rw_txn().unwrap(); txn.put( &txn.open_table(None).unwrap(), b"key1", b"val1", WriteFlags::empty(), ) .unwrap(); { let nested = txn.begin_nested_txn().unwrap(); let table = nested.open_table(None).unwrap(); nested .put(&table, b"key2", b"val2", WriteFlags::empty()) .unwrap(); assert_eq!(nested.get(&table, b"key1").unwrap(), Some(*b"val1")); assert_eq!(nested.get(&table, b"key2").unwrap(), Some(*b"val2")); } let table = txn.open_table(None).unwrap(); assert_eq!(txn.get(&table, b"key1").unwrap(), Some(*b"val1")); assert_eq!(txn.get::<()>(&table, b"key2").unwrap(), None); } #[test] fn test_clear_table() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); { let txn = db.begin_rw_txn().unwrap(); txn.put( &txn.open_table(None).unwrap(), b"key", b"val", WriteFlags::empty(), ) .unwrap(); assert!(!txn.commit().unwrap()); } { let txn = db.begin_rw_txn().unwrap(); txn.clear_table(&txn.open_table(None).unwrap()).unwrap(); assert!(!txn.commit().unwrap()); } let txn = db.begin_ro_txn().unwrap(); assert_eq!( txn.get::<()>(&txn.open_table(None).unwrap(), b"key") .unwrap(), None ); } #[test] fn test_drop_table() { let dir = tempdir().unwrap(); { let db = Database::new().set_max_tables(2).open(dir.path()).unwrap(); { let txn = db.begin_rw_txn().unwrap(); txn.put( &txn.create_table(Some("test"), TableFlags::empty()).unwrap(), b"key", b"val", WriteFlags::empty(), ) .unwrap(); // Workaround for MDBX dbi drop issue txn.create_table(Some("canary"), TableFlags::empty()) .unwrap(); assert!(!txn.commit().unwrap()); } { let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(Some("test")).unwrap(); unsafe { txn.drop_table(table).unwrap(); } assert!(matches!( txn.open_table(Some("test")).unwrap_err(), Error::NotFound )); assert!(!txn.commit().unwrap()); } } let db = Database::new().set_max_tables(2).open(dir.path()).unwrap(); let txn = db.begin_ro_txn().unwrap(); txn.open_table(Some("canary")).unwrap(); assert!(matches!( txn.open_table(Some("test")).unwrap_err(), Error::NotFound )); } #[test] fn test_concurrent_readers_single_writer() { let dir = tempdir().unwrap(); let db: Arc<Database> = Arc::new(Database::new().open(dir.path()).unwrap()); let n = 10usize; // Number of concurrent readers let barrier = Arc::new(Barrier::new(n + 1)); let mut threads: Vec<JoinHandle<bool>> = Vec::with_capacity(n); let key = b"key"; let val = b"val"; for _ in 0..n { let reader_db = db.clone(); let reader_barrier = barrier.clone(); threads.push(thread::spawn(move || { { let txn = reader_db.begin_ro_txn().unwrap(); let table = txn.open_table(None).unwrap(); assert_eq!(txn.get::<()>(&table, key).unwrap(), None); } reader_barrier.wait(); reader_barrier.wait(); { let txn = reader_db.begin_ro_txn().unwrap(); let table = txn.open_table(None).unwrap(); txn.get::<[u8; 3]>(&table, key).unwrap().unwrap() == *val } })); } let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); println!("wait2"); barrier.wait(); txn.put(&table, key, val, WriteFlags::empty()).unwrap(); txn.commit().unwrap(); println!("wait1"); barrier.wait(); assert!(threads.into_iter().all(|b| b.join().unwrap())) } #[test] fn test_concurrent_writers() { let dir = tempdir().unwrap(); let db = Arc::new(Database::new().open(dir.path()).unwrap()); let n = 10usize; // Number of concurrent writers let mut threads: Vec<JoinHandle<bool>> = Vec::with_capacity(n); let key = "key"; let val = "val"; for i in 0..n { let writer_db = db.clone(); threads.push(thread::spawn(move || { let txn = writer_db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); txn.put( &table, format!("{key}{i}"), format!("{val}{i}"), WriteFlags::empty(), ) .unwrap(); txn.commit().is_ok() })); } assert!(threads.into_iter().all(|b| b.join().unwrap())); let txn = db.begin_ro_txn().unwrap(); let table = txn.open_table(None).unwrap(); for i in 0..n { assert_eq!( Cow::<Vec<u8>>::Owned(format!("{val}{i}").into_bytes()), txn.get(&table, format!("{key}{i}").as_bytes()) .unwrap() .unwrap() ); } } #[test] fn test_stat() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.create_table(None, TableFlags::empty()).unwrap(); for (k, v) in [(b"key1", b"val1"), (b"key2", b"val2"), (b"key3", b"val3")] { txn.put(&table, k, v, WriteFlags::empty()).unwrap(); } txn.commit().unwrap(); { let txn = db.begin_ro_txn().unwrap(); let table = txn.open_table(None).unwrap(); let stat = txn.table_stat(&table).unwrap(); assert_eq!(stat.entries(), 3); } let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); for k in [b"key1", b"key2"] { txn.del(&table, k, None).unwrap(); } txn.commit().unwrap(); { let txn = db.begin_ro_txn().unwrap(); let table = txn.open_table(None).unwrap(); let stat = txn.table_stat(&table).unwrap(); assert_eq!(stat.entries(), 1); } let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); for (k, v) in [(b"key4", b"val4"), (b"key5", b"val5"), (b"key6", b"val6")] { txn.put(&table, k, v, WriteFlags::empty()).unwrap(); } txn.commit().unwrap(); { let txn = db.begin_ro_txn().unwrap(); let table = txn.open_table(None).unwrap(); let stat = txn.table_stat(&table).unwrap(); assert_eq!(stat.entries(), 4); } } #[test] fn test_stat_dupsort() { let dir = tempdir().unwrap(); let db = Database::new().open(dir.path()).unwrap(); let txn = db.begin_rw_txn().unwrap(); let table = txn.create_table(None, TableFlags::DUP_SORT).unwrap(); for (k, v) in [ (b"key1", b"val1"), (b"key1", b"val2"), (b"key1", b"val3"), (b"key2", b"val1"), (b"key2", b"val2"), (b"key2", b"val3"), (b"key3", b"val1"), (b"key3", b"val2"), (b"key3", b"val3"), ] { txn.put(&table, k, v, WriteFlags::empty()).unwrap(); } txn.commit().unwrap(); { let txn = db.begin_ro_txn().unwrap(); let stat = txn.table_stat(&txn.open_table(None).unwrap()).unwrap(); assert_eq!(stat.entries(), 9); } let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); for (k, v) in [(b"key1", Some(b"val2" as &[u8])), (b"key2", None)] { txn.del(&table, k, v).unwrap(); } txn.commit().unwrap(); { let txn = db.begin_ro_txn().unwrap(); let stat = txn.table_stat(&txn.open_table(None).unwrap()).unwrap(); assert_eq!(stat.entries(), 5); } let txn = db.begin_rw_txn().unwrap(); let table = txn.open_table(None).unwrap(); for (k, v) in [(b"key4", b"val1"), (b"key4", b"val2"), (b"key4", b"val3")] { txn.put(&table, k, v, WriteFlags::empty()).unwrap(); } txn.commit().unwrap(); { let txn = db.begin_ro_txn().unwrap(); let stat = txn.table_stat(&txn.open_table(None).unwrap()).unwrap(); assert_eq!(stat.entries(), 8); } }
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::net::IpAddr; use std::str; use libc; use error::{Error, Result}; use hcore::util::sys; static LOGKEY: &'static str = "SY"; pub fn ip() -> Result<IpAddr> { match sys::ip() { Ok(s) => Ok(s), Err(e) => Err(sup_error!(Error::HabitatCore(e))), } } extern "C" { pub fn gethostname(name: *mut libc::c_char, size: libc::size_t) -> libc::c_int; } #[cfg(any(target_os="linux", target_os="macos"))] pub fn hostname() -> Result<String> { use std::ffi::CStr; let len = 255; let mut buf = Vec::<u8>::with_capacity(len); let ptr = buf.as_mut_slice().as_mut_ptr(); let err = unsafe { gethostname(ptr as *mut libc::c_char, len as libc::size_t) }; match err { 0 => { let slice = unsafe { CStr::from_ptr(ptr as *const i8) }; let s = try!(slice.to_str()); Ok(s.to_string()) } n => { error!("gethostname failure: {}", n); Err(sup_error!(Error::IPFailed)) } } } #[cfg(target_os = "windows")] pub fn hostname() -> Result<String> { use std::env; match env::var("COMPUTERNAME") { Ok(computername) => Ok(computername), Err(_) => Err(sup_error!(Error::IPFailed)), } } pub fn to_toml() -> Result<String> { let mut toml_string = String::from("[sys]\n"); let ip = try!(ip()).to_string(); toml_string.push_str(&format!("ip = \"{}\"\n", ip)); let hostname = try!(hostname()); toml_string.push_str(&format!("hostname = \"{}\"\n", hostname)); debug!("Sys Toml: {}", toml_string); Ok(toml_string) }
use crate::cards::*; use std::fmt; use serde::{Serialize, Deserialize}; use rand_core::SeedableRng; use rand::Rng; use crate::combinations::*; #[derive (Debug, PartialEq, Eq, Serialize, Deserialize)] enum Deal { One, Two, Three, Four, Five, Six } #[derive (Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] enum Step { Start , Deal , ExchangeElder , ExchangeYounger , DeclarePointElder , DeclarePointResponse , SetPointsPointElder , DeclareSequenceElder , DeclareSequenceResponse , SetPointsSequenceElder , DeclareSetElder , DeclareSetResponse , SetPointsSetElder , PlayFirstCard , SetPointsPointYounger , SetPointsSequenceYounger , SetPointsSetYounger , PlayCards , PlayEnd , End } impl Step { pub fn succ(&self) -> Option<Self> { use Step::*; match self { Start => Some(Deal) , Deal => Some(ExchangeElder) , ExchangeElder => Some(ExchangeYounger) , ExchangeYounger => Some(DeclarePointElder) , DeclarePointElder => Some(DeclarePointResponse) , DeclarePointResponse => Some(SetPointsPointElder) , SetPointsPointElder => Some(DeclareSequenceElder) , DeclareSequenceElder => Some(DeclareSequenceResponse) , DeclareSequenceResponse => Some(SetPointsSequenceElder) , SetPointsSequenceElder => Some(DeclareSetElder) , DeclareSetElder => Some(DeclareSetResponse) , DeclareSetResponse => Some(SetPointsSetElder) , SetPointsSetElder => Some(PlayFirstCard) , PlayFirstCard => Some(SetPointsPointYounger) , SetPointsPointYounger => Some(SetPointsSequenceYounger) , SetPointsSequenceYounger => Some(SetPointsSetYounger) , SetPointsSetYounger => Some(PlayCards) , PlayCards => Some(PlayEnd) , PlayEnd => Some(End) , End => None } } } #[derive (Debug, PartialEq, Eq, Serialize, Deserialize)] enum Move { P1Move(PlayerMove), P2Move(PlayerMove) } #[derive (Debug, PartialEq, Eq, Serialize, Deserialize)] enum DeclarationResponse { Good, NotGood, Equals } #[derive (Debug, PartialEq, Eq, Serialize, Deserialize)] struct Declaration(Combination); #[derive (Debug, PartialEq, Eq, Serialize, Deserialize)] enum PlayerMove { CarteBlanche , CarteRouge , Exchange(Hand) , DeclarationCount(CombinationType, u32) , DeclarationUpper(CombinationType, Rank) , PlayerResponse(CombinationType, DeclarationResponse) , Declaration(Combination) , Repique , PlayFirst(Card) , Pique , WinAsSecond , WinLastTrick , PlayCard(Card) , WinCards , Capot } impl PlayerMove { pub fn movePoints(&self) -> usize { use PlayerMove::*; match self { CarteBlanche => 10, CarteRouge => 20, Pique => 30, Repique => 60, WinCards => 10, Capot => 40, PlayFirst(_) => 1, WinAsSecond => 1, WinLastTrick => 1, Declaration(comb) => comb.points(), _ => 0 } } } #[derive (Debug, PartialEq, Eq, Serialize, Deserialize)] enum PiquetError { NotYourTurnError , InvalidForStepError(Step) , InvalidCombination , CardNotInHand , AlreadyConnectedError , NotConnectedError , UnknownCommand } #[derive (Debug, Serialize, Deserialize)] struct Player { hand: Hand , isElder: bool , leftUntilCarteRouge: Hand , cardPlayed: Option<Card> , pointCandidate: Option<Combination> , sequenceCandidate: Option<Combination> , setCandidate: Option<Combination> , dealPoints: u32 , dealWons: u32 , gamePoints: u32 , points: u32 , name: String } impl Player { pub fn new(name: String) -> Self { Player { hand: Hand::empty_hand() , isElder: false , leftUntilCarteRouge: Hand::empty_hand() , cardPlayed: None , pointCandidate: None , sequenceCandidate: None , setCandidate: None , dealPoints: 0 , dealWons: 0 , gamePoints: 0 , points: 0 , name } } } impl fmt::Display for Player { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} : {} rougeLeft={} : {}", self.name, self.dealPoints, self.leftUntilCarteRouge.len(), self.hand) } } #[derive (Debug, PartialEq, Eq)] enum DeclarationWinner { Elder, Younger, Tie, Nobody } #[derive (Debug)] pub struct Game { rng: rand_xorshift::XorShiftRng , dealNum : Deal , dealMoves : Vec<(Move, u32)> , deals : Vec<(Deal, Vec<(Move, u32)>)> , deck : Deck , visible : Deck , step : Step , player1 : Player , player2 : Player // , player1SendPortId : Option<SendPortId> // , player2SendPortId : Option<SendPortId> , isElderToPlay : bool , pointWinner : DeclarationWinner , pointCombination : Option<Combination> , sequenceWinner : DeclarationWinner , sequenceCombination : Option<Combination> , setWinner : DeclarationWinner , setCombination : Option<Combination> } impl Game { pub fn new(seed:[u8; 16]) -> Self { let mut deck = Deck::new(); let mut rng = rand_xorshift::XorShiftRng::from_seed(seed); deck.shuffle(&mut rng); Game { rng , dealNum: Deal::One , dealMoves: vec![] , deals: vec![] , deck , visible: Deck::empty_deck() , step: Step::Start , player1: Player::new("Roméo".to_string()) , player2: Player::new("Juliette".to_string()) // , player1SendPortId: None // , player2SendPortId: None , isElderToPlay: true , pointWinner: DeclarationWinner::Nobody , pointCombination: None , sequenceWinner: DeclarationWinner::Nobody , sequenceCombination: None , setWinner: DeclarationWinner::Nobody , setCombination: None } } pub fn choose_elder(&mut self){ self.player1.isElder = self.rng.gen(); self.player2.isElder = !self.player1.isElder; } pub fn deal<'hands>(&mut self){ // let hands = self.deck.draw_hands(12, 2); let hands = vec![Hand::new(vec![]), Hand::new(vec![])]; self.player1.hand = hands[0]; self.player1.leftUntilCarteRouge = self.player1.hand.clone(); self.player1.pointCandidate = None; self.player1.sequenceCandidate = None; self.player1.setCandidate = None; self.player2.hand = hands[1]; self.player2.leftUntilCarteRouge = self.player2.hand.clone(); self.player2.pointCandidate = None; self.player2.sequenceCandidate = None; self.player2.setCandidate = None; self.dealMoves = vec![]; self.step = Step::Deal.succ().expect("No more step"); } }
use std::cmp::PartialEq; pub trait NodeKey: PartialOrd + PartialEq + Copy {} impl<T: PartialOrd + PartialEq + Copy> NodeKey for T {} #[derive(Debug)] pub struct Bst<T:NodeKey> { root: Link<T>, } type Link<T> = Option<Box<Node<T>>>; #[derive(Debug)] struct Node<T: NodeKey> { elem: T, left: Link<T>, right: Link<T>, } pub struct IntoIter<T: NodeKey>(Bst<T>); pub struct Iter<'a, T: 'a + NodeKey > { next: Option<&'a Node<T>>, } pub struct IterMut<'a, T:'a + NodeKey> { next: Option<&'a mut Node<T>>, } impl<T: NodeKey> Iterator for IntoIter<T> { type Item = T; fn next(&mut self) -> Option<Self::Item> { self.0.root.take().map(|node| { let node = *node; self.0.root = node.right; node.elem }) } } impl<'a, T: NodeKey> Iterator for Iter<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.next.map(|node| { self.next = node.right.as_ref().map(|node| &**node); &node.elem }) } } impl<'a, T: NodeKey> Iterator for IterMut<'a, T> { type Item = &'a mut T; fn next(&mut self) -> Option<Self::Item> { self.next.take().map(|node| { self.next = node.right.as_mut().map(|node| &mut **node); &mut node.elem }) } } impl<T: NodeKey> IntoIterator for Bst<T> { type Item = T; type IntoIter = IntoIter<T>; fn into_iter(self) -> Self::IntoIter { self.into_iter() } } impl<'a, T:NodeKey> IntoIterator for &'a Bst<T> { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Self::IntoIter { self.iter() } } impl <'a, T:NodeKey> IntoIterator for &'a mut Bst<T> { type Item = &'a mut T; type IntoIter = IterMut<'a, T>; fn into_iter(mut self) -> Self::IntoIter { self.iter_mut() } } impl<T: NodeKey> Bst<T> { // add code here pub fn new() -> Self { Bst { root: None } } pub fn insert(&mut self, elem: T) -> bool { match self.root { Some(ref mut node) => { if node.search(elem) { false } else { node.insert(elem) } }, None => { let new_node = Box::new(Node { elem: elem, left: None, right: None, }); self.root = Some(new_node); true }, } } pub fn search(&mut self, elem: T) -> bool { match self.root { None => false, Some(ref mut boxed_node) => { boxed_node.search(elem) }, } } pub fn into_iter(self) -> IntoIter<T> { IntoIter(self) } pub fn iter(&self) -> Iter<T> { Iter { next: self.root.as_ref().map(|node| &**node) } } pub fn iter_mut(&mut self) -> IterMut<T> { IterMut { next: self.root.as_mut().map(|node| &mut **node) } } } impl<T: NodeKey> Node<T> { pub fn search(&mut self, elem: T) -> bool { if self.elem == elem { true } else if self.elem < elem { match self.right { None => false, Some(ref mut node) => { node.search(elem) }, } } else { match self.left { None => false, Some(ref mut node) => { node.search(elem) }, } } } pub fn insert(&mut self, elem: T) -> bool { if self.elem < elem { match self.right { None => { let new_node = Box::new(Node { elem: elem, left: None, right: None, }); self.right = Some(new_node); true }, Some(ref mut node) => { node.insert(elem) }, } } else { match self.left { None => { let new_node = Box::new(Node { elem: elem, left: None, right: None, }); self.left = Some(new_node); true }, Some(ref mut node) => { node.insert(elem) }, } } } /* fn drop(&mut self) { if self.right == None && self.left == None { mem::replace(&mut self, None); return; } match self.left { None => {}, Some(ref mut node) => { node.drop(); }, } match self.right { None => {}, Some(ref mut node) => { node.drop(); }, } } */ } /* impl Drop for Bst { // add code here fn drop(&mut self) { match self.root { Some(ref mut node) => { node.drop() }, None => {}, } } } */ #[cfg(test)] mod tests { use super::Bst; #[test] fn basics() { let mut bst = Bst::new(); assert_eq!(bst.search(5), false); bst.insert(5); bst.insert(3); bst.insert(7); println!("{:?}", bst); assert_eq!(bst.search(5), true); } #[test] fn gen_test() { let mut bst = Bst::new(); bst.insert(1.1); bst.insert(1.2); bst.insert(1.3); assert_eq!(bst.search(1.2), true); } #[test] fn into_iter() { let mut bst = Bst::new(); bst.insert(5); bst.insert(3); bst.insert(7); bst.insert(9); bst.insert(11); let mut iter = bst.into_iter(); assert_eq!(iter.next(), Some(5)); assert_eq!(iter.next(), Some(7)); assert_eq!(iter.next(), Some(9)); } #[test] fn intoIterator() { let mut bst = Bst::new(); bst.insert(5); bst.insert(3); bst.insert(7); bst.insert(9); bst.insert(11); for elt in bst { println!("{}", elt); } } #[test] fn iter() { let mut bst = Bst::new(); bst.insert(5); bst.insert(3); bst.insert(7); bst.insert(9); bst.insert(11); let mut iter = bst.iter(); assert_eq!(iter.next(), Some(&5)); assert_eq!(iter.next(), Some(&7)); assert_eq!(iter.next(), Some(&9)); } #[test] fn intoIterator_ref() { let mut bst = Bst::new(); bst.insert(5); bst.insert(3); bst.insert(7); bst.insert(9); bst.insert(11); for elt in &bst { println!("{}", elt); } } #[test] fn iter_mut() { let mut bst = Bst::new(); bst.insert(5); bst.insert(3); bst.insert(7); bst.insert(9); bst.insert(11); let mut iter = bst.iter_mut(); assert_eq!(iter.next(), Some(&mut 5)); assert_eq!(iter.next(), Some(&mut 7)); assert_eq!(iter.next(), Some(&mut 9)); } #[test] fn iter_mut_2() { let mut bst = Bst::new(); bst.insert(5); bst.insert(3); bst.insert(7); bst.insert(9); bst.insert(11); for elt in &mut bst { println!("{}", elt); } } }
//! The VAPIX application interface at `/axis-cgi/applications/*`. use crate::*; mod enums; pub use enums::*; /// A device's application management API. pub struct Applications<'a, T: Transport> { device: &'a Client<T>, _embedded_development_version: String, firmware_version: Option<String>, soc: Option<SOC>, architecture: Option<Architecture>, } impl<'a, T: Transport> Applications<'a, T> { pub(crate) async fn new(device: &'a Client<T>) -> Result<Option<Applications<'a, T>>> { let mut params = device .parameters() .list(Some( &[ "Properties.Firmware.Version", "Properties.EmbeddedDevelopment.Version", "Properties.System.Soc", "Properties.System.Architecture", ][..], )) .await?; // If we don't have an embedded development version, we don't have a platform let embedded_development_version = params.remove("Properties.EmbeddedDevelopment.Version"); let embedded_development_version = match embedded_development_version { Some(version) => version, None => return Ok(None), }; let firmware_version = params.remove("Properties.Firmware.Version"); let soc = params .remove("Properties.System.Soc") .and_then(|s| SOC::from_param(&s)); let architecture = params .remove("Properties.System.Architecture") .and_then(|s| Architecture::from_param(&s)); Ok(Some(Self { device, _embedded_development_version: embedded_development_version, firmware_version, soc, architecture, })) } /// The device's architecture, if known. pub fn architecture(&self) -> Option<Architecture> { self.architecture } /// The device's SOC, if known. pub fn soc(&self) -> Option<SOC> { self.soc } /// The device's firmware version, if known. pub fn firmware_version(&self) -> Option<&str> { self.firmware_version.as_ref().map(|s| s.as_ref()) } /// Upload an application package to the device. pub async fn upload(&self, application_package_data: &[u8]) -> Result<()> { let mut request_body = b"--fileboundary\r\n\ Content-Disposition: form-data; name=\"packfil\"; filename=\"application.eap\"\r\n\ Content-Type: application/octet-stream\r\n\ \r\n" .to_vec(); request_body.extend_from_slice(application_package_data); request_body.extend_from_slice(b"\r\n--fileboundary--\r\n\r\n"); let req = http::Request::builder() .method(http::Method::POST) .uri( self.device .uri_for("/axis-cgi/applications/upload.cgi") .unwrap(), ) .header( http::header::CONTENT_TYPE, "multipart/form-data; boundary=fileboundary", ) .header( http::header::CONTENT_LENGTH, format!("{}", request_body.len()), ) .body(request_body) .unwrap(); let (_resp, resp_body) = self.device.roundtrip(req, "text/plain").await?; let resp_body = std::str::from_utf8(resp_body.as_slice()).map_err(|_| Error::Other("invalid UTF-8"))?; if resp_body.starts_with("OK") { Ok(()) } else { // TODO: smuggle out the error value Err(Error::Other("application upload failed")) } } } #[cfg(test)] mod tests { use crate::v3::application::{Architecture, SOC}; #[test] fn new() { crate::test_with_devices(|test_device| async move { let applications = test_device.client.applications().await?; let architecture = applications.as_ref().and_then(|a| a.architecture()); let soc = applications.as_ref().and_then(|a| a.soc()); // Keyed on Brand.ProdShortName: match test_device.device_info.model.as_ref() { "AXIS Companion Bullet LE" => { assert!(applications.is_some()); assert_eq!(architecture, Some(Architecture::Mips)); assert_eq!(soc, Some(SOC::Artpec5)); } "AXIS P5512" => { assert!(applications.is_some()); assert_eq!(architecture, Some(Architecture::CrisV32)); assert_eq!(soc, None); // actually ARTPEC-3, but the API doesn't say } // Hello! If you're here, it's probably because you're trying to record a new device // model. This test requires additional information and human judgement. Please // determine the ground truth about your device: // * https://www.axis.com/en-us/developer-community/product-interface-guide // * `axctl shell`: // * head /proc/cpuinfo // * find /lib/modules -name '*.ko' // * cat /etc/opkg/arch.conf // // Then check what the API says: // * http://…/axis-cgi/param.cgi?action=list&group=Properties.System,Brand.ProdShortName // // ...and add the right test above. other => panic!( "device model {:?} has no expectations, please update the test", other ), }; Ok(()) }) } }
use ark_bls12_381::{Bls12_381, Fr}; use ark_poly_commit::kzg10::Powers; use super::{ proof::LookUpProof, table::{LookUpTable, PreProcessedTable}, }; use crate::{multiset::MultiSet, transcript::TranscriptProtocol}; pub struct LookUp<T: LookUpTable> { table: T, // This is the set of values which we want to prove is a subset of the // table values. This may or may not be equal to the whole witness. left_wires: MultiSet, right_wires: MultiSet, output_wires: MultiSet, } impl<T: LookUpTable> LookUp<T> { pub fn new(table: T) -> LookUp<T> { LookUp { table: table, left_wires: MultiSet::new(), right_wires: MultiSet::new(), output_wires: MultiSet::new(), } } // First reads a value from the underlying table // Then we add the key and value to their respective multisets // Returns true if the value existed in the table pub fn read(&mut self, key: &(Fr, Fr)) -> bool { let option_output = self.table.read(key); if option_output.is_none() { return false; } let output = *option_output.unwrap(); // Add (input, output) combination into the corresponding multisets self.left_wires.push(key.0); self.right_wires.push(key.1); self.output_wires.push(output); return true; } /// Creates a proof that the values (f_1, f_2, f_3) are within the table of values (t_1, t_2,t_3) pub fn prove( &mut self, proving_key: &Powers<Bls12_381>, preprocessed_table: &PreProcessedTable, transcript: &mut dyn TranscriptProtocol, ) -> LookUpProof { LookUpProof::prove( &self.left_wires, &self.right_wires, &self.output_wires, proving_key, preprocessed_table, transcript, ) } } #[cfg(test)] mod test { use super::*; use crate::kzg10; use crate::lookup::table::four_bits::XOR4Bit; use merlin::Transcript; #[test] fn test_proof() { // Setup SRS let (proving_key, verifier_key) = kzg10::trusted_setup(2usize.pow(12)); // Setup Lookup with a 4 bit table let table = XOR4Bit::new(); let preprocessed_table = table.preprocess(&proving_key, 2usize.pow(8)); let mut lookup = LookUp::new(table); // Adds 1 XOR 2 lookup.read(&(Fr::from(1u8), Fr::from(2u8))); // Adds 2 XOR 4 lookup.read(&(Fr::from(2u8), Fr::from(4u8))); // Adds 3 XOR 5 lookup.read(&(Fr::from(3u8), Fr::from(5u8))); let mut prover_transcript = Transcript::new(b"lookup"); let proof = lookup.prove(&proving_key, &preprocessed_table, &mut prover_transcript); let mut verifier_transcript = Transcript::new(b"lookup"); let ok = proof.verify(&verifier_key, &preprocessed_table, &mut verifier_transcript); assert!(ok); } }
use std::convert::TryFrom; use std::fs; struct Password<'a> { content: &'a str } impl <'a> From<&'a str> for Password<'a> { fn from(line: &'a str) -> Self { Password { content: line.trim() } } } impl <'a> Password<'a> { fn valid_for_count(&self, policy: &PasswordPolicy) -> bool { let character_count = self.content .chars() .into_iter() .filter(|character| *character == policy.character) .count(); character_count >= policy.first && character_count <= policy.second } fn valid_for_xor(&self, policy: &PasswordPolicy) -> bool { let first_is_match = match self.content.chars().nth(policy.first - 1) { Some(character) => character == policy.character, None => false }; let second_is_match = match self.content.chars().nth(policy.second - 1) { Some(character) => character == policy.character, None => false }; (first_is_match || second_is_match) && !(first_is_match && second_is_match) } } struct PasswordPolicy { first: usize, second: usize, character: char } #[derive(Debug)] enum PasswordPolicyError { HyphenNotFound, SpaceNotFound, FirstParseError, SecondParseError, CharacterError } impl TryFrom<&str> for PasswordPolicy { type Error = PasswordPolicyError; fn try_from(line: &str) -> Result<Self, Self::Error> { let hyphen_position = line.find('-') .ok_or(PasswordPolicyError::HyphenNotFound)?; let space_position = line.find(' ') .ok_or(PasswordPolicyError::SpaceNotFound)?; let first = line[0..hyphen_position].parse::<usize>() .or(Err(PasswordPolicyError::FirstParseError))?; let second = line[hyphen_position + 1..space_position].parse::<usize>() .or(Err(PasswordPolicyError::SecondParseError))?; let character = line.chars().nth(space_position + 1) .ok_or(PasswordPolicyError::CharacterError)?; Ok(PasswordPolicy{ first, second, character }) } } fn parse_line<'a>(line: &'a str) -> (PasswordPolicy, Password<'a>) { let colon_position = line.find(':').expect("file contains invalid entries"); let policy_str = &line[0..colon_position]; let password_str = &line[colon_position + 1..]; ( PasswordPolicy::try_from(policy_str).expect("file contains invalid entries"), Password::from(password_str) ) } fn main() { let input = fs::read_to_string("input.txt").expect("file not found"); // Part 1 let compliant_passwords = input .lines() .into_iter() .map( |line| parse_line(line)) .filter(|(policy, password)| password.valid_for_count(&policy)) .count(); println!("Compliant passwords by count: {}", compliant_passwords); // Part 2 let compliant_passwords = input .lines() .into_iter() .map( |line| parse_line(line)) .filter(|(policy, password)| password.valid_for_xor(&policy)) .count(); println!("Compliant passwords by position: {}", compliant_passwords); } #[test] fn test_conversion() { let policy_string = "1-3 a"; let policy = PasswordPolicy::try_from(policy_string).unwrap(); assert_eq!(policy.first, 1); assert_eq!(policy.second, 3); assert_eq!(policy.character, 'a'); } #[test] fn test_full_line() { let (policy, password) = parse_line("1-3 a: abcde"); assert_eq!(policy.first, 1); assert_eq!(policy.second, 3); assert_eq!(policy.character, 'a'); assert_eq!(password.content, "abcde"); } #[test] fn test_examples() { let (policy1, password1) = parse_line("1-3 a: abcde"); assert!(password1.valid_for_count(&policy1)); assert!(password1.valid_for_xor(&policy1)); let (policy2, password2) = parse_line("1-3 b: cdefg"); assert_eq!(password2.valid_for_count(&policy2), false); assert_eq!(password2.valid_for_xor(&policy2), false); let (policy3, password3) = parse_line("2-9 c: ccccccccc"); assert!(password3.valid_for_count(&policy3)); assert_eq!(password3.valid_for_xor(&policy3), false); }
use ::raw::c_void; use ::{Result, Handle}; #[repr(C)] #[derive(Clone, Copy)] pub struct TitleList { titleID: u64, size: u64, titleVersion: u16, unknown2: [u8; 6usize] } extern "C" { pub fn amInit() -> Result; pub fn amExit() -> Result; pub fn amGetSessionHandle() -> *mut Handle; pub fn AM_GetTitleCount(mediatype: u8, count: *mut u32) -> Result; pub fn AM_GetTitleIdList(mediatype: u8, count: u32, titleIDs: *mut u64) -> Result; pub fn AM_GetDeviceId(deviceID: *mut u32) -> Result; pub fn AM_ListTitles(mediatype: u8, titleCount: u32, titleIdList: *mut u64, titleList: *mut TitleList) -> Result; pub fn AM_StartCiaInstall(mediatype: u8, ciaHandle: *mut Handle) -> Result; pub fn AM_StartDlpChildCiaInstall(ciaHandle: *mut Handle) -> Result; pub fn AM_CancelCIAInstall(ciaHandle: *mut Handle) -> Result; pub fn AM_FinishCiaInstall(mediatype: u8, ciaHandle: *mut Handle) -> Result; pub fn AM_DeleteTitle(mediatype: u8, titleID: u64) -> Result; pub fn AM_DeleteAppTitle(mediatype: u8, titleID: u64) -> Result; pub fn AM_InstallFIRM(titleID: u64) -> Result; pub fn AM_GetTitleProductCode(mediatype: u8, titleID: u64, productCode: *mut c_void) -> Result; }
use hacspec_lib::prelude::*; use unsafe_hacspec_examples::sha2::hash; #[test] fn test_sha256_kat() { let msg = ByteSeq::from_hex("686163737065632072756c6573"); let expected_256 = "b37db5ed72c97da3b2579537afbc3261ed3d5a56f57b3d8e5c1019ae35929964"; let digest = hash(&msg); println!("{:?}", expected_256); println!("{:x?}", digest); assert_eq!(expected_256, digest.to_hex()); let msg = ByteSeq::from_hex("6861637370656320697320612070726f706f73616c20666f722061206e65772073706563696669636174696f6e206c616e677561676520666f722063727970746f207072696d69746976657320746861742069732073756363696e63742c2074686174206973206561737920746f207265616420616e6420696d706c656d656e742c20616e642074686174206c656e647320697473656c6620746f20666f726d616c20766572696669636174696f6e2e"); let expected_256 = "348ef044446d56e05210361af5a258588ad31765f446bf4cb3b67125a187a64a"; let digest = hash(&msg); println!("{:?}", expected_256); println!("{:x?}", digest); assert_eq!(expected_256, digest.to_hex()); }
mod data; use data::NonterminalDescription; use data::RspgContent; use data::RspgMod; use data::TerminalDescription; use proc_macro2::Span; use proc_macro2::TokenStream; use quote::quote; use quote::quote_spanned; use quote::ToTokens; use rspg::grammar::Grammar; use std::cell::RefCell; use std::collections::BTreeMap; use syn::spanned::Spanned; use syn::Ident; use syn::LitStr; macro_rules! use_names { () => (); ($name:ident, $($tail:tt)*) => ( use_name!($name $name); use_names!($($tail)*) ); ($span:expr => $name:ident, $($tail:tt)*) => ( use_name!($span => $name $name); use_names!($($tail)*) ); } // See <https://github.com/SergioBenitez/Rocket/blob/45b4436ed3a7ab913d96c2b69ee4df7fd8c0c618/core/codegen/src/lib.rs#L61> macro_rules! define_names { ($($name:ident => $path:path,)*) => ( macro_rules! use_name { $( ($i:ident $name) => ( #[allow(non_snake_case)] let $i = quote!{$path}; ); )* $( ($span:expr => $i:ident $name) => ( #[allow(non_snake_case)] let $i = quote_spanned!{$span => $path}; ); )* } ); } define_names! { _ron_from_str => ::ron::de::from_str, _lazy_static => ::lazy_static::lazy_static, _Grammar => ::rspg::grammar::Grammar, _TerminalIndex => ::rspg::grammar::TerminalIndex, _RuleIndex => ::rspg::grammar::RuleIndex, _FirstSets => ::rspg::set::FirstSets, _FollowSets => ::rspg::set::FollowSets, _Table => ::rspg::lr1::table::Table, _Token => ::rspg::token::Token, _Reduce => ::rspg::lr1::parser::Reduce, _Parser => ::rspg::lr1::parser::Parser, _ParserError => ::rspg::lr1::parser::Error, _String => ::std::string::String, _Vec => ::std::vec::Vec, _Result => ::std::result::Result, _Iterator => ::std::iter::Iterator, _Ord => ::std::cmp::Ord, _PhantomData => ::std::marker::PhantomData, } pub fn generate(input: TokenStream) -> syn::Result<TokenStream> { let parsed: RspgMod = syn::parse2(input)?; // TODO: debug output let mod_vis = parsed.visibility; let mod_name = parsed.mod_name; let mod_outer_attrs = parsed.outer_attrs; let mod_inner_attrs = parsed.inner_attrs; let contents = build_contents(parsed.content)?; let result = quote! { #(#mod_outer_attrs)* #mod_vis mod #mod_name { #(#mod_inner_attrs)* use super::*; #contents } }; Ok(result) } pub struct Context { content: RspgContent, grammar: Grammar<String, String>, nonterminal_map: BTreeMap<String, NonterminalDescription>, terminal_map: BTreeMap<String, TerminalDescription>, } pub fn build_contents(content: RspgContent) -> syn::Result<TokenStream> { let ctx = build_context(content)?; let rules: Vec<_> = ctx.grammar.rule_indices().collect(); let first_sets = rspg::set::FirstSets::of_grammar(&ctx.grammar); let follow_sets = rspg::set::FollowSets::of_grammar(&ctx.grammar, &first_sets); let table = rspg::lr1::generator::Generator::construct(&ctx.grammar, &first_sets, "".to_string()) .generate(&ctx.grammar) .ok_or_else(|| syn::Error::new(Span::call_site(), "failed to construct LR(1) table"))?; let mut result = proc_macro2::TokenStream::new(); use_names! { _Grammar, _Vec, _String, _RuleIndex, _FirstSets, _FollowSets, _Table, } result.extend(embed_data( &ctx.grammar, "GRAMMAR", quote!(#_Grammar<#_String, #_String>), )); result.extend(embed_data(&rules, "RULES", quote!(#_Vec<#_RuleIndex>))); result.extend(embed_data(&first_sets, "FIRST_SETS", quote!(#_FirstSets))); result.extend(embed_data( &follow_sets, "FOLLOW_SETS", quote!(#_FollowSets), )); result.extend(embed_data(&table, "TABLE", quote!(#_Table))); result.extend(enum_parsed(&ctx)); result.extend(token_impl(&ctx)); result.extend(reducer(&ctx)); result.extend(parser(&ctx)); Ok(result) } pub fn build_context(content: RspgContent) -> syn::Result<Context> { let nonterminal_map: BTreeMap<_, _> = content .nonterminals .iter() .map(|n| (n.ident.to_string(), n.clone())) .collect(); let terminal_map: BTreeMap<_, _> = content .terminals .iter() .map(|t| (t.lit.value(), t.clone())) .collect(); let mut builder = rspg::grammar::GrammarBuilder::new(); content.nonterminals.iter().for_each(|n| { builder.add_and_get_nonterminal(n.ident.to_string()); }); content.terminals.iter().for_each(|t| { builder.add_and_get_terminal(t.lit.value()); }); let errors = RefCell::new(Vec::new()); let meet_nonterminal = |n: &Ident| { let s = n.to_string(); if !nonterminal_map.contains_key(&s) { let message = format!("undefined nonterminal: {s}"); errors .borrow_mut() .push(syn::Error::new_spanned(n, message)); } }; let meet_terminal = |t: &LitStr| { let s = t.value(); if !terminal_map.contains_key(&s) { let message = format!("undefined terminal: {s:?}"); errors .borrow_mut() .push(syn::Error::new_spanned(t, message)); } }; let start = &content.start.nonterminal; meet_nonterminal(start); builder = builder.start(start.to_string()); for rule in &content.rules { meet_nonterminal(&rule.left); builder = builder.push_rule_left(rule.left.to_string()); for right in &rule.right { builder = match &right.symbol { data::Symbol::Nonterminal(n) => { meet_nonterminal(n); builder.push_rule_right_nonterminal(n.to_string()) } data::Symbol::Terminal(t) => { meet_terminal(t); builder.push_rule_right_terminal(t.value()) } }; } } let grammar = builder.build(); match collect_errors(errors.into_inner().into_iter()) { None => Ok(Context { content, grammar, nonterminal_map, terminal_map, }), Some(e) => Err(e), } } pub fn embed_data<T>(data: &T, name: &str, ty: TokenStream) -> TokenStream where T: serde::Serialize, { let ident = Ident::new(name, Span::call_site()); let string = ron::ser::to_string(data).unwrap_or_else(|_| panic!("failed to serialize data {name}")); use_names! { _lazy_static, _ron_from_str, } quote! { #_lazy_static! { pub static ref #ident: #ty = #_ron_from_str(#string).expect("failed to load embedded data"); } } } pub fn enum_parsed(ctx: &Context) -> TokenStream { let Context { content: RspgContent { nonterminals, .. }, .. } = ctx; let mut result = TokenStream::new(); let idents = nonterminals.iter().map(|d| &d.ident); let types = nonterminals.iter().map(|d| &d.ty); let type_def = { let idents = idents.clone(); let types = types.clone(); quote! { pub enum Parsed { #(#idents(#types),)* } } }; result.extend(type_def); let unwrap_idents = idents.clone().map(unwrap_ident); let type_impl = quote! { impl Parsed { #( pub fn #unwrap_idents(self) -> #types { match self { Self::#idents(x) => x, _ => panic!("expect {}", stringify!(#idents)), } } )* } }; result.extend(type_impl); result } pub fn token_impl(ctx: &Context) -> TokenStream { let Context { grammar, content: RspgContent { token, terminals, .. }, .. } = ctx; let ty = &token.ty; let lits = terminals.iter().map(|d| &d.lit); let pats = terminals.iter().map(|d| &d.pat); let indices = lits .clone() .map(|l| grammar.terminal_index(&l.value()).value()); let _exprs = terminals.iter().map(|d| &d.expr); let pats2 = pats.clone(); use_names! { _Grammar, _TerminalIndex, _String, _Token, _Ord, } // `_TerminalIndex` is used twice in `quote!`, // causing quote create a new `non_snake_case` variable. #[allow(non_snake_case)] { quote_spanned! {token.span() => pub struct WrappedToken(#ty); impl #_Token<#_String> for WrappedToken { #[allow(unused_variables)] fn terminal(&self) -> #_String { match self { #( WrappedToken(#pats) => #lits.to_string(), )* } } #[allow(unused_variables)] fn terminal_index<N>( &self, grammar: &#_Grammar<N, #_String> ) -> #_TerminalIndex where N: #_Ord, { match self { #( WrappedToken(#pats2) => unsafe { #_TerminalIndex::new(#indices) }, )* } } } } } } pub fn reducer(ctx: &Context) -> TokenStream { let Context { content, grammar, .. } = ctx; let rule_index_values = grammar.rule_indices().map(rspg::grammar::RuleIndex::value); let error_type = &content.error.ty; use_names! { _Result, } let result_type = quote! {#_Result<Parsed, #error_type>}; let reduce = Ident::new("reduce", Span::call_site()); let rule_body = rule_index_values .clone() .map(|i| rule_reducer(ctx, &reduce, &content.rules[i])); use_names! { _Reduce, } quote! { #[allow(clippy::let_unit_value)] fn reducer(mut #reduce: #_Reduce<Parsed, WrappedToken>) -> #result_type { match #reduce.rule.value() { #( #rule_index_values => #rule_body, )* _ => unreachable!(), } } } } pub fn rule_reducer(ctx: &Context, reduce: &Ident, rule: &data::Rule) -> TokenStream { let left = &rule.left; let left_type = &ctx.nonterminal_map[&left.to_string()].ty; let body = &rule.body; let binders = rule .right .iter() .map(|pat_symbol| binder(ctx, reduce, pat_symbol)); let error_type = &ctx.content.error.ty; use_names! { _Result, } quote_spanned! {rule.span() => { #( #binders )* #[allow(clippy::redundant_closure_call)] let value = (|| -> #_Result<#left_type, #error_type> { #body })(); value.map(Parsed::#left) } } } pub fn binder(ctx: &Context, reduce: &Ident, pat_symbol: &data::PatSymbol) -> TokenStream { let data::PatSymbol { pat, symbol } = pat_symbol; let expr = match symbol { data::Symbol::Nonterminal(n) => { let unwrapper = unwrap_ident(n); quote_spanned! {pat_symbol.span() => #reduce.from .pop_front() .expect("expect a stack item") .parsed() .expect("expect a nonterminal") .#unwrapper() } } data::Symbol::Terminal(t) => { let terminal = &ctx.terminal_map[&t.value()]; let terminal_pat = &terminal.pat; let expr = &terminal.expr; quote_spanned! {pat_symbol.span() => match #reduce.from .pop_front() .expect("expect a stack item") .token() .expect("expect a terminal") { WrappedToken(#terminal_pat) => #expr, _ => unreachable!(), } } } }; let pat = pat .clone() .map(|(_, p, _)| p.to_token_stream()) .unwrap_or(quote_spanned! { expr.span() => _ }); quote_spanned! {pat_symbol.span() => let #pat = #expr; } } pub fn parser(ctx: &Context) -> TokenStream { let Context { content, .. } = ctx; let token_type = &content.token.ty; let error_type = &content.error.ty; let start_type = &content .nonterminals .iter() .find(|n| n.ident == content.start.nonterminal) .unwrap() .ty; let unwrap_start = unwrap_ident(&content.start.nonterminal); use_names! { _lazy_static, _Result, _PhantomData, _Parser, _Reduce, _String, _Iterator, _ParserError, } quote! { #_lazy_static! { pub static ref PARSER: #_Parser< 'static, 'static, #_String, #_String, WrappedToken, Parsed, fn(#_Reduce<Parsed, WrappedToken>) -> #_Result<Parsed, #error_type>, #error_type, > = #_Parser { grammar: &GRAMMAR, table: &TABLE, reducer, phantom: #_PhantomData, }; } pub fn parse<I>(input: I) -> #_Result<#start_type, #_ParserError<#error_type>> where I: #_Iterator<Item = #token_type>, { PARSER.parse(input.map(WrappedToken)).map(Parsed::#unwrap_start) } } } pub fn unwrap_ident(i: &Ident) -> Ident { Ident::new( &format!("unwrap_{}", i.to_string().to_lowercase()), i.span(), ) } pub fn collect_errors<I>(mut es: I) -> Option<syn::Error> where I: Iterator<Item = syn::Error>, { match es.next() { Some(mut first) => { first.extend(es); Some(first) } None => None, } }
struct QueryParser {} impl QueryParser {}
//! Contains the node configuration parsing code. mod builder; mod cli; mod file; use std::{fmt::Display, net::SocketAddr, path::PathBuf, str::FromStr}; use enum_iterator::IntoEnumIterator; use reqwest::Url; const DEFAULT_HTTP_RPC_ADDR: &str = "127.0.0.1:9545"; /// Possible configuration options. #[derive(Debug, PartialEq, Clone, Copy, Hash, Eq, IntoEnumIterator)] pub enum ConfigOption { /// The Ethereum URL. EthereumHttpUrl, /// The Ethereum password. EthereumPassword, /// The HTTP-RPC listening socket address. HttpRpcAddress, /// Path to the node's data directory. DataDirectory, /// The Sequencer's HTTP URL. SequencerHttpUrl, /// Number of Python sub-processes to start. PythonSubprocesses, /// Enable SQLite write-ahead logging. EnableSQLiteWriteAheadLogging, /// Enable pending polling. PollPending, } impl Display for ConfigOption { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ConfigOption::EthereumHttpUrl => f.write_str("Ethereum HTTP URL"), ConfigOption::EthereumPassword => f.write_str("Ethereum password"), ConfigOption::DataDirectory => f.write_str("Data directory"), ConfigOption::HttpRpcAddress => f.write_str("HTTP-RPC socket address"), ConfigOption::SequencerHttpUrl => f.write_str("Sequencer HTTP URL"), ConfigOption::PythonSubprocesses => f.write_str("Number of Python subprocesses"), ConfigOption::EnableSQLiteWriteAheadLogging => { f.write_str("Enable SQLite write-ahead logging") } ConfigOption::PollPending => f.write_str("Enable pending block polling"), } } } /// Ethereum configuration parameters. #[derive(Debug, PartialEq, Eq)] pub struct EthereumConfig { /// The Ethereum URL. pub url: Url, /// The optional Ethereum password. pub password: Option<String>, } /// Node configuration options. #[derive(Debug, PartialEq, Eq)] pub struct Configuration { /// The Ethereum settings. pub ethereum: EthereumConfig, /// The HTTP-RPC listening address and port. pub http_rpc_addr: SocketAddr, /// The node's data directory. pub data_directory: PathBuf, /// The Sequencer's HTTP URL. pub sequencer_url: Option<Url>, /// The number of Python subprocesses to start. pub python_subprocesses: std::num::NonZeroUsize, /// Enable SQLite write-ahead logging. pub sqlite_wal: bool, /// Enable pending polling. pub poll_pending: bool, } impl Configuration { /// Creates a [node configuration](Configuration) based on the options specified /// via the command-line and config file. /// /// The config filepath may be specified as a command-line parameter. /// /// Options from the command-line and config file will be merged, with the /// command-line taking precedence. It is valid for no configuration file to exist, /// so long as all required options are covered by the command-line arguments. /// /// Errors if the configuration file couldn't be parsed, or if any required options /// are not specified. /// /// Note: This will terminate the program if invalid command-line arguments are supplied. /// This is intended, as [clap] will show the program usage / help. pub fn parse_cmd_line_and_cfg_file() -> std::io::Result<Self> { // Parse command-line arguments. This must be first in order to use // users config filepath (if supplied). let (cfg_filepath, cli_cfg) = cli::parse_cmd_line(); // Parse configuration file if specified. let file_cfg = match cfg_filepath { Some(filepath) => { let filepath = PathBuf::from_str(&filepath).map_err(|err| { std::io::Error::new(std::io::ErrorKind::InvalidInput, err.to_string()) })?; Some(file::config_from_filepath(&filepath)?) } None => None, }; let cfg = match file_cfg { Some(cfg) => cli_cfg.merge(cfg), None => cli_cfg, }; let cfg = cfg.try_build()?; Ok(cfg) } }
use libc::{c_int, c_void}; use std::os::raw::{c_char, c_double}; use EpDouble; extern "C" { pub fn EpdAlloc() -> *mut EpDouble; pub fn EpdCmp(key1: *const c_void, key2: *const c_void) -> c_int; pub fn EpdFree(epd: *mut EpDouble) -> c_void; pub fn EpdGetString(epd: *const EpDouble, str: *mut c_char) -> c_void; pub fn EpdConvert(value: c_double, epd: *mut EpDouble) -> c_void; pub fn EpdMultiply(epd1: *mut EpDouble, value: c_double) -> c_void; pub fn EpdMultiply2(epd1: *mut EpDouble, epd2: *const EpDouble) -> c_void; pub fn EpdMultiply2Decimal(epd1: *mut EpDouble, epd2: *const EpDouble) -> c_void; pub fn EpdMultiply3( epd1: *const EpDouble, epd2: *const EpDouble, epd3: *mut EpDouble, ) -> c_void; pub fn EpdMultiply3Decimal( epd1: *const EpDouble, epd2: *const EpDouble, epd3: *mut EpDouble, ) -> c_void; pub fn EpdDivide(epd1: *mut EpDouble, value: c_double) -> c_void; pub fn EpdDivide2(epd1: *mut EpDouble, epd2: *const EpDouble) -> c_void; pub fn EpdDivide3(epd1: *const EpDouble, epd2: *const EpDouble, epd3: *mut EpDouble) -> c_void; pub fn EpdAdd(epd1: *mut EpDouble, value: c_double) -> c_void; pub fn EpdAdd2(epd1: *mut EpDouble, epd2: *const EpDouble) -> c_void; pub fn EpdAdd3(epd1: *const EpDouble, epd2: *const EpDouble, epd3: *mut EpDouble) -> c_void; pub fn EpdSubtract(epd1: *mut EpDouble, value: c_double) -> c_void; pub fn EpdSubtract2(epd1: *mut EpDouble, epd2: *const EpDouble) -> c_void; pub fn EpdSubtract3( epd1: *const EpDouble, epd2: *const EpDouble, epd3: *mut EpDouble, ) -> c_void; pub fn EpdPow2(n: c_int, epd: *mut EpDouble) -> c_void; pub fn EpdPow2Decimal(n: c_int, epd: *mut EpDouble) -> c_void; pub fn EpdNormalize(epd: *mut EpDouble) -> c_void; pub fn EpdNormalizeDecimal(epd: *mut EpDouble) -> c_void; pub fn EpdGetValueAndDecimalExponent( epd: *const EpDouble, value: *mut c_double, exponent: *mut c_int, ) -> c_void; pub fn EpdGetExponent(value: c_double) -> c_int; pub fn EpdGetExponentDecimal(value: c_double) -> c_int; pub fn EpdMakeInf(epd: *mut EpDouble, sign: c_int) -> c_void; pub fn EpdMakeZero(epd: *mut EpDouble, sign: c_int) -> c_void; pub fn EpdMakeNan(epd: *mut EpDouble) -> c_void; pub fn EpdCopy(from: *const EpDouble, to: *mut EpDouble) -> c_void; pub fn EpdIsInf(epd: *const EpDouble) -> c_int; pub fn EpdIsZero(epd: *const EpDouble) -> c_int; pub fn EpdIsNan(epd: *const EpDouble) -> c_int; pub fn EpdIsNanOrInf(epd: *const EpDouble) -> c_int; pub fn IsInfDouble(value: c_double) -> c_int; pub fn IsNanDouble(value: c_double) -> c_int; pub fn IsNanOrInfDouble(value: c_double) -> c_int; }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { _reserved_0_ctlr: [u8; 4usize], #[doc = "0x04 - GICC input priority mask register"] pub pmr: PMR, _reserved_2_bpr: [u8; 4usize], #[doc = "0x0c - GICC interrupt acknowledge register"] pub iar: IAR, #[doc = "0x10 - GICC end of interrupt register"] pub eoir: EOIR, #[doc = "0x14 - GICC running priority register"] pub rpr: RPR, #[doc = "0x18 - GICC highest priority pending interrupt register"] pub hppir: HPPIR, #[doc = "0x1c - GICC aliased binary point register"] pub abpr: ABPR, #[doc = "0x20 - GICC aliased interrupt acknowledge register"] pub aiar: AIAR, #[doc = "0x24 - GICC aliased end of interrupt register"] pub aeoir: AEOIR, #[doc = "0x28 - GICC aliased highest priority pending interrupt register"] pub ahppir: AHPPIR, _reserved11: [u8; 164usize], #[doc = "0xd0 - GICC active priority register"] pub apr0: APR0, _reserved12: [u8; 12usize], #[doc = "0xe0 - GICC non-secure active priority register"] pub nsapr0: NSAPR0, _reserved13: [u8; 24usize], #[doc = "0xfc - GICC interface identification register"] pub iidr: IIDR, _reserved14: [u8; 3840usize], #[doc = "0x1000 - GICC deactivate interrupt register"] pub dir: DIR, } impl RegisterBlock { #[doc = "0x00 - GICC control (non-secure access) register"] #[inline(always)] pub fn ctlrns(&self) -> &CTLRNS { unsafe { &*(((self as *const Self) as *const u8).add(0usize) as *const CTLRNS) } } #[doc = "0x00 - GICC control (non-secure access) register"] #[inline(always)] pub fn ctlrns_mut(&self) -> &mut CTLRNS { unsafe { &mut *(((self as *const Self) as *mut u8).add(0usize) as *mut CTLRNS) } } #[doc = "0x00 - Control register"] #[inline(always)] pub fn ctlr(&self) -> &CTLR { unsafe { &*(((self as *const Self) as *const u8).add(0usize) as *const CTLR) } } #[doc = "0x00 - Control register"] #[inline(always)] pub fn ctlr_mut(&self) -> &mut CTLR { unsafe { &mut *(((self as *const Self) as *mut u8).add(0usize) as *mut CTLR) } } #[doc = "0x08 - GICC binary point (non-secure access) register"] #[inline(always)] pub fn bprns(&self) -> &BPRNS { unsafe { &*(((self as *const Self) as *const u8).add(8usize) as *const BPRNS) } } #[doc = "0x08 - GICC binary point (non-secure access) register"] #[inline(always)] pub fn bprns_mut(&self) -> &mut BPRNS { unsafe { &mut *(((self as *const Self) as *mut u8).add(8usize) as *mut BPRNS) } } #[doc = "0x08 - GICC binary point register"] #[inline(always)] pub fn bpr(&self) -> &BPR { unsafe { &*(((self as *const Self) as *const u8).add(8usize) as *const BPR) } } #[doc = "0x08 - GICC binary point register"] #[inline(always)] pub fn bpr_mut(&self) -> &mut BPR { unsafe { &mut *(((self as *const Self) as *mut u8).add(8usize) as *mut BPR) } } } #[doc = "Control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ctlr](ctlr) module"] pub type CTLR = crate::Reg<u32, _CTLR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CTLR; #[doc = "`read()` method returns [ctlr::R](ctlr::R) reader structure"] impl crate::Readable for CTLR {} #[doc = "`write(|w| ..)` method takes [ctlr::W](ctlr::W) writer structure"] impl crate::Writable for CTLR {} #[doc = "Control register"] pub mod ctlr; #[doc = "GICC control (non-secure access) register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ctlrns](ctlrns) module"] pub type CTLRNS = crate::Reg<u32, _CTLRNS>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CTLRNS; #[doc = "`read()` method returns [ctlrns::R](ctlrns::R) reader structure"] impl crate::Readable for CTLRNS {} #[doc = "`write(|w| ..)` method takes [ctlrns::W](ctlrns::W) writer structure"] impl crate::Writable for CTLRNS {} #[doc = "GICC control (non-secure access) register"] pub mod ctlrns; #[doc = "GICC input priority mask register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pmr](pmr) module"] pub type PMR = crate::Reg<u32, _PMR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _PMR; #[doc = "`read()` method returns [pmr::R](pmr::R) reader structure"] impl crate::Readable for PMR {} #[doc = "`write(|w| ..)` method takes [pmr::W](pmr::W) writer structure"] impl crate::Writable for PMR {} #[doc = "GICC input priority mask register"] pub mod pmr; #[doc = "GICC binary point register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [bpr](bpr) module"] pub type BPR = crate::Reg<u32, _BPR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _BPR; #[doc = "`read()` method returns [bpr::R](bpr::R) reader structure"] impl crate::Readable for BPR {} #[doc = "`write(|w| ..)` method takes [bpr::W](bpr::W) writer structure"] impl crate::Writable for BPR {} #[doc = "GICC binary point register"] pub mod bpr; #[doc = "GICC binary point (non-secure access) register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [bprns](bprns) module"] pub type BPRNS = crate::Reg<u32, _BPRNS>; #[allow(missing_docs)] #[doc(hidden)] pub struct _BPRNS; #[doc = "`read()` method returns [bprns::R](bprns::R) reader structure"] impl crate::Readable for BPRNS {} #[doc = "`write(|w| ..)` method takes [bprns::W](bprns::W) writer structure"] impl crate::Writable for BPRNS {} #[doc = "GICC binary point (non-secure access) register"] pub mod bprns; #[doc = "GICC interrupt acknowledge register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [iar](iar) module"] pub type IAR = crate::Reg<u32, _IAR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _IAR; #[doc = "`read()` method returns [iar::R](iar::R) reader structure"] impl crate::Readable for IAR {} #[doc = "GICC interrupt acknowledge register"] pub mod iar; #[doc = "GICC end of interrupt register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [eoir](eoir) module"] pub type EOIR = crate::Reg<u32, _EOIR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _EOIR; #[doc = "`write(|w| ..)` method takes [eoir::W](eoir::W) writer structure"] impl crate::Writable for EOIR {} #[doc = "GICC end of interrupt register"] pub mod eoir; #[doc = "GICC running priority register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rpr](rpr) module"] pub type RPR = crate::Reg<u32, _RPR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _RPR; #[doc = "`write(|w| ..)` method takes [rpr::W](rpr::W) writer structure"] impl crate::Writable for RPR {} #[doc = "GICC running priority register"] pub mod rpr; #[doc = "GICC highest priority pending interrupt register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [hppir](hppir) module"] pub type HPPIR = crate::Reg<u32, _HPPIR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _HPPIR; #[doc = "`read()` method returns [hppir::R](hppir::R) reader structure"] impl crate::Readable for HPPIR {} #[doc = "GICC highest priority pending interrupt register"] pub mod hppir; #[doc = "GICC aliased binary point register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [abpr](abpr) module"] pub type ABPR = crate::Reg<u32, _ABPR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _ABPR; #[doc = "`read()` method returns [abpr::R](abpr::R) reader structure"] impl crate::Readable for ABPR {} #[doc = "`write(|w| ..)` method takes [abpr::W](abpr::W) writer structure"] impl crate::Writable for ABPR {} #[doc = "GICC aliased binary point register"] pub mod abpr; #[doc = "GICC aliased interrupt acknowledge register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [aiar](aiar) module"] pub type AIAR = crate::Reg<u32, _AIAR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _AIAR; #[doc = "`read()` method returns [aiar::R](aiar::R) reader structure"] impl crate::Readable for AIAR {} #[doc = "GICC aliased interrupt acknowledge register"] pub mod aiar; #[doc = "GICC aliased end of interrupt register\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [aeoir](aeoir) module"] pub type AEOIR = crate::Reg<u32, _AEOIR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _AEOIR; #[doc = "`write(|w| ..)` method takes [aeoir::W](aeoir::W) writer structure"] impl crate::Writable for AEOIR {} #[doc = "GICC aliased end of interrupt register"] pub mod aeoir; #[doc = "GICC aliased highest priority pending interrupt register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ahppir](ahppir) module"] pub type AHPPIR = crate::Reg<u32, _AHPPIR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _AHPPIR; #[doc = "`read()` method returns [ahppir::R](ahppir::R) reader structure"] impl crate::Readable for AHPPIR {} #[doc = "GICC aliased highest priority pending interrupt register"] pub mod ahppir; #[doc = "GICC active priority register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [apr0](apr0) module"] pub type APR0 = crate::Reg<u32, _APR0>; #[allow(missing_docs)] #[doc(hidden)] pub struct _APR0; #[doc = "`read()` method returns [apr0::R](apr0::R) reader structure"] impl crate::Readable for APR0 {} #[doc = "`write(|w| ..)` method takes [apr0::W](apr0::W) writer structure"] impl crate::Writable for APR0 {} #[doc = "GICC active priority register"] pub mod apr0; #[doc = "GICC non-secure active priority register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [nsapr0](nsapr0) module"] pub type NSAPR0 = crate::Reg<u32, _NSAPR0>; #[allow(missing_docs)] #[doc(hidden)] pub struct _NSAPR0; #[doc = "`read()` method returns [nsapr0::R](nsapr0::R) reader structure"] impl crate::Readable for NSAPR0 {} #[doc = "`write(|w| ..)` method takes [nsapr0::W](nsapr0::W) writer structure"] impl crate::Writable for NSAPR0 {} #[doc = "GICC non-secure active priority register"] pub mod nsapr0; #[doc = "GICC interface identification register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [iidr](iidr) module"] pub type IIDR = crate::Reg<u32, _IIDR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _IIDR; #[doc = "`read()` method returns [iidr::R](iidr::R) reader structure"] impl crate::Readable for IIDR {} #[doc = "GICC interface identification register"] pub mod iidr; #[doc = "GICC deactivate interrupt register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dir](dir) module"] pub type DIR = crate::Reg<u32, _DIR>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DIR; #[doc = "`read()` method returns [dir::R](dir::R) reader structure"] impl crate::Readable for DIR {} #[doc = "GICC deactivate interrupt register"] pub mod dir;
use hacspec_lib::prelude::*; /// NtruPrime parameters pub struct Parameters { pub p: usize, pub q: i128, pub w: usize, pub irr: Seq<i128>, } /// Positions and coefficients for a polynomial. pub struct Poly { pub positions: Seq<i128>, pub coefficients: Seq<i128>, } type SecretKey = (Seq<i128>, Seq<i128>); pub enum Version { NtruPrime653, NtruPrime761, NtruPrime857, } fn set_irr(p: usize) -> Seq<i128> { let mut irr: Seq<i128> = Seq::new(p + 1); irr[0] = -1i128; irr[1] = -1i128; irr[p] = 1i128; irr } pub fn get_parameters(v: Version) -> Parameters { match v { Version::NtruPrime653 => Parameters { p: 653, q: 4621, w: 288, irr: set_irr(653), }, Version::NtruPrime761 => Parameters { p: 761, q: 4591, w: 286, irr: set_irr(761), }, Version::NtruPrime857 => Parameters { p: 857, q: 5167, w: 322, irr: set_irr(857), }, } } /// First transform each coefficients to a value between −(q−1)/2 and (q−1)/2 /// then round it to the nearest multiple of 3 pub fn round_to_3(poly: &Seq<i128>, q: i128) -> Seq<i128> { let mut result = poly.clone(); let q_12 = (q - 1) / 2; for i in 0..poly.len() { if poly[i] > q_12 { result[i] = poly[i] - q; } } for i in 0..result.len() { if result[i] % 3 == 0 { continue; } result[i] -= 1; if result[i] % 3 != 0 { result[i] += 2; } } result } /// r is the plaintext, h is the public key pub fn encrypt(r: &Seq<i128>, h: &Seq<i128>, n_v: &Parameters) -> Seq<i128> { let pre = mul_poly_irr(r, &h, &n_v.irr, n_v.q); round_to_3(&pre, n_v.q) } pub fn decrypt( c: &Seq<i128>, key: &SecretKey, n_v: &Parameters, ) -> Result<Seq<i128>, &'static str> { let (f, v) = key; // calculate 3*f and 3*f*c let f_c = mul_poly_irr(&f, &c, &n_v.irr, n_v.q); let (mut f_3_c, ok) = poly_to_ring( &n_v.irr, &add_poly(&f_c, &add_poly(&f_c, &f_c, n_v.q), n_v.q), n_v.q, ); // view coefficients as values between -(q-1/2) and (q-1/2) let q_12 = (n_v.q - 1) / 2; for i in 0..f_3_c.len() { if f_3_c[i] > q_12 { f_3_c[i] -= n_v.q; } } // lift f_3_c to R_3 let mut e: Seq<i128> = Seq::new(f_3_c.len()); for i in 0..e.len() { e[i] = f_3_c[i] % 3; } e = make_positive(&e, 3); // calculate e * v in R let mut r = mul_poly_irr(&e, &v, &n_v.irr, 3); // to R_short for i in 0..r.len() { if r[i] == 2 { r[i] = -1 as i128; } } if ok { Ok(r) } else { Err("unable to decrypt") } } /// This function creates a polynomial with w many -1 or 1 and with the highest degree of h_deg. /// Randomness of the coefficients and positions has to be provided. pub fn build_poly(poly: &Poly, h_deg: usize) -> Seq<i128> { debug_assert_eq!(poly.coefficients.len(), poly.positions.len()); let mut polynomial: Seq<i128> = Seq::new(h_deg + 1); for i in 0..poly.coefficients.len() { polynomial = polynomial.set_chunk( 1, poly.positions[i] as usize, &Seq::from_native_slice(&[poly.coefficients[i]]), ); } polynomial } fn build_invertible_poly( poly: &Poly, n: &Parameters, modulus: i128, ) -> (Seq<i128>, Result<Seq<i128>, &'static str>) { let f = build_poly(poly, n.p); let x = extended_euclid(&f, &n.irr, modulus); (f, x) } /// Generate a key from given polynomials `f` and `g`. /// Generating the polynomials at random has to happen outside. pub fn key_gen( g: &Poly, f: &Poly, n_v: &Parameters, ) -> Result<(Seq<i128>, SecretKey), &'static str> { let poly_g = build_invertible_poly(g, n_v, 3); let g_inv = match poly_g.1 { Ok(v) => v, Err(_) => return Err("This polynomial isn't invertible. Try another one."), }; let f = build_poly(f, n_v.p); let f_3times = add_poly(&f, &add_poly(&f, &f, n_v.q), n_v.q); let f_3times_pre_inv = extended_euclid(&f_3times, &n_v.irr, n_v.q); let f_inv_3times = match f_3times_pre_inv { Ok(v) => v, Err(_) => return Err("Key generating, failed"), }; let h = mul_poly_irr(&poly_g.0, &f_inv_3times, &n_v.irr, n_v.q); Ok((h, (f, g_inv))) }
#![feature(specialization)] extern crate numpy; extern crate pyo3; extern crate rand; mod game; use numpy::{PyArray, PyArrayModule}; use pyo3::prelude::*; #[pyclass] struct Game { game: game::Game, } #[pymethods] impl Game { #[new] fn __new__(obj: &PyRawObject) -> PyResult<()> { obj.init(|_| Game { game: game::Game::new(), }) } fn step(&mut self, direction: u8) -> PyResult<(usize, bool)> { let direction = match direction { 0 => game::Direction::Left, 1 => game::Direction::Up, 2 => game::Direction::Right, 3 => game::Direction::Down, _ => return Err(exc::ValueError::new("Invalid direction")), }; let (next, reward, done) = self.game.step(&direction); self.game = next; Ok((reward, done)) } //#[getter] fn board(&self, py: Python) -> PyResult<PyArray<u8>> { let np = PyArrayModule::import(py)?; Ok(PyArray::from_vec(py, &np, self.game.board.to_vec())) } } #[pyproto] impl PyObjectProtocol for Game { fn __repr__(&self) -> PyResult<String> { Ok(format!("{:?}", self.game)) } } #[pymodinit] fn zwovieracht(py: Python, m: &PyModule) -> PyResult<()> { let _np = PyArrayModule::import(py)?; m.add_class::<Game>()?; Ok(()) }
fn main() { println!("jroot"); }
use std::ffi::{CStr, CString}; use futures::{Async, Future}; use spatialos_sdk_sys::worker::*; use crate::worker::internal::utils::cstr_to_string; use crate::worker::parameters::ProtocolLoggingParameters; pub struct Locator { pub(crate) locator: *mut Worker_Locator, } impl Locator { pub fn new<T: Into<Vec<u8>>>(hostname: T, params: &LocatorParameters) -> Self { unsafe { let hostname = CString::new(hostname).unwrap(); let worker_params = params.to_worker_sdk(); let ptr = Worker_Locator_Create(hostname.as_ptr(), &worker_params); assert!(!ptr.is_null()); Locator { locator: ptr } } } pub fn get_deployment_list_async(&self) -> DeploymentListFuture { unsafe { let future_ptr = Worker_Locator_GetDeploymentListAsync(self.locator); assert!(!future_ptr.is_null()); DeploymentListFuture { internal: future_ptr, consumed: false, } } } } impl Drop for Locator { fn drop(&mut self) { if !self.locator.is_null() { unsafe { Worker_Locator_Destroy(self.locator) } } } } pub struct LocatorParameters { pub project_name: CString, pub credentials: LocatorCredentials, pub logging: ProtocolLoggingParameters, pub enable_logging: bool, } impl LocatorParameters { fn to_worker_sdk(&self) -> Worker_LocatorParameters { let credentials = self.credentials.to_worker_sdk(); let (credentials_type, login_token, steam) = credentials; Worker_LocatorParameters { project_name: self.project_name.as_ptr(), credentials_type, login_token, steam, logging: self.logging.to_worker_sdk(), enable_logging: self.enable_logging as u8, } } pub fn new<T: AsRef<str>>(project_name: T, credentials: LocatorCredentials) -> Self { LocatorParameters { project_name: CString::new(project_name.as_ref()) .expect("`project_name` contains a null byte"), credentials, logging: ProtocolLoggingParameters::default(), enable_logging: false, } } pub fn with_logging(mut self) -> Self { self.enable_logging = true; self } pub fn with_logging_parameters(mut self, params: ProtocolLoggingParameters) -> Self { self.logging = params; self.with_logging() } } pub enum LocatorCredentials { LoginToken(CString), Steam(SteamCredentials), } impl LocatorCredentials { pub fn login_token<S: AsRef<str>>(token: S) -> Self { LocatorCredentials::LoginToken( CString::new(token.as_ref()).expect("`token` contained null byte"), ) } } impl LocatorCredentials { fn to_worker_sdk(&self) -> (u8, Worker_LoginTokenCredentials, Worker_SteamCredentials) { match self { LocatorCredentials::LoginToken(token) => ( Worker_LocatorCredentialsTypes_WORKER_LOCATOR_LOGIN_TOKEN_CREDENTIALS as u8, Worker_LoginTokenCredentials { token: token.as_ptr(), }, Worker_SteamCredentials { ticket: ::std::ptr::null(), deployment_tag: ::std::ptr::null(), }, ), LocatorCredentials::Steam(steam_credentials) => ( Worker_LocatorCredentialsTypes_WORKER_LOCATOR_STEAM_CREDENTIALS as u8, Worker_LoginTokenCredentials { token: ::std::ptr::null(), }, Worker_SteamCredentials { ticket: steam_credentials.ticket.as_ptr(), deployment_tag: steam_credentials.deployment_tag.as_ptr(), }, ), } } } pub struct SteamCredentials { pub ticket: CString, pub deployment_tag: CString, } impl SteamCredentials { pub fn new<S: AsRef<str>, T: AsRef<str>>(ticket: S, deployment_tag: T) -> Self { SteamCredentials { ticket: CString::new(ticket.as_ref()).expect("`ticket` contained null byte"), deployment_tag: CString::new(deployment_tag.as_ref()) .expect("`deployment_tag` contained null byte"), } } } pub struct Deployment { pub deployment_name: String, pub assembly_name: String, pub description: String, pub users_connected: u32, pub users_capacity: u32, } impl Deployment { fn from_worker_sdk(deployment: &Worker_Deployment) -> Self { Deployment { deployment_name: cstr_to_string(deployment.deployment_name), assembly_name: cstr_to_string(deployment.assembly_name), description: cstr_to_string(deployment.description), users_connected: deployment.users_connected, users_capacity: deployment.users_capacity, } } } pub struct DeploymentListFuture { internal: *mut Worker_DeploymentListFuture, consumed: bool, } impl DeploymentListFuture { extern "C" fn callback_handler( user_data: *mut ::std::os::raw::c_void, deployment_list: *const Worker_DeploymentList, ) { assert!(!deployment_list.is_null()); unsafe { let list = *deployment_list; let data = &mut *(user_data as *mut Option<Result<Vec<Deployment>, String>>); if !list.error.is_null() { let err = cstr_to_string(list.error); *data = Some(Err(err)); return; } let deployments = ::std::slice::from_raw_parts(list.deployments, list.deployment_count as usize) .iter() .map(|deployment| Deployment::from_worker_sdk(deployment)) .collect::<Vec<Deployment>>(); *data = Some(Ok(deployments)); } } } impl Future for DeploymentListFuture { type Item = Vec<Deployment>; type Error = String; fn poll(&mut self) -> Result<Async<<Self as Future>::Item>, <Self as Future>::Error> { if self.consumed { return Err("DeploymentListFuture has already been consumed".to_owned()); } assert!(!self.internal.is_null()); let mut data: Option<Result<Vec<Deployment>, String>> = None; unsafe { Worker_DeploymentListFuture_Get( self.internal, &0, &mut data as *mut _ as *mut ::std::os::raw::c_void, Some(DeploymentListFuture::callback_handler), ); } data.map_or(Ok(Async::NotReady), |result| { self.consumed = true; result.map(Async::Ready) }) } fn wait(self) -> Result<<Self as Future>::Item, <Self as Future>::Error> where Self: Sized, { if self.consumed { return Err("DeploymentListFuture has already been consumed".to_owned()); } assert!(!self.internal.is_null()); let mut data: Option<Result<Vec<Deployment>, String>> = None; unsafe { Worker_DeploymentListFuture_Get( self.internal, ::std::ptr::null(), &mut data as *mut _ as *mut ::std::os::raw::c_void, Some(DeploymentListFuture::callback_handler), ); } data.expect("Blocking call to Worker_DeploymentListFuture_Get did not trigger callback") } } impl Drop for DeploymentListFuture { fn drop(&mut self) { if !self.internal.is_null() { unsafe { Worker_DeploymentListFuture_Destroy(self.internal) } } } } pub type QueueStatusCallback = fn(&Result<u32, String>) -> bool; pub(crate) extern "C" fn queue_status_callback_handler( user_data: *mut ::std::os::raw::c_void, queue_status: *const Worker_QueueStatus, ) -> u8 { unsafe { let status = *queue_status; let callback = *(user_data as *mut QueueStatusCallback); if status.error.is_null() { return callback(&Ok(status.position_in_queue)) as u8; } let str = CStr::from_ptr(status.error); callback(&Err(str.to_string_lossy().to_string())) as u8 } }
/** * wNear NEP21 Token contract * * NEP21 Based on: * https://github.com/near/near-sdk-rs/blob/ab5c01ca4c61a6414484b69302b84e5ce3113f2f/examples/fungible-token/src/lib.rs * * The aim of the contract is to enable the wrapping of the native Ⓝ token into a NEP21 compatible token. * It's the equivalent of wrapping ETH into wETH via the WETH. This contract is based on the functionality * of the WETH9 and WETH10 Solidity smart contracts. */ use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize}; use near_sdk::collections::LookupMap; use near_sdk::json_types::U128; use near_sdk::{env, near_bindgen, AccountId, Balance, Promise, StorageUsage}; #[global_allocator] static ALLOC: near_sdk::wee_alloc::WeeAlloc<'_> = near_sdk::wee_alloc::WeeAlloc::INIT; /// Price per 1 byte of storage from mainnet genesis config. const STORAGE_PRICE_PER_BYTE: Balance = 100_000_000_000_000_000_000; /// Contains balance and allowances information for one account. #[derive(BorshDeserialize, BorshSerialize)] pub struct Account { /// Current account balance. pub balance: Balance, /// Escrow Account ID hash to the allowance amount. /// Allowance is the amount of tokens the Escrow Account ID can spent on behalf of the account /// owner. pub allowances: LookupMap<Vec<u8>, Balance>, /// The number of allowances pub num_allowances: u32, } impl Account { /// Initializes a new Account with 0 balance and no allowances for a given `account_hash`. pub fn new(account_hash: Vec<u8>) -> Self { Self { balance: 0, allowances: LookupMap::new(account_hash), num_allowances: 0 } } /// Sets allowance for account `escrow_account_id` to `allowance`. pub fn set_allowance(&mut self, escrow_account_id: &AccountId, allowance: Balance) { let escrow_hash = env::sha256(escrow_account_id.as_bytes()); if allowance > 0 { if self.allowances.insert(&escrow_hash, &allowance).is_none() { self.num_allowances += 1; } } else { if self.allowances.remove(&escrow_hash).is_some() { self.num_allowances -= 1; } } } /// Returns the allowance of account `escrow_account_id`. pub fn get_allowance(&self, escrow_account_id: &AccountId) -> Balance { let escrow_hash = env::sha256(escrow_account_id.as_bytes()); self.allowances.get(&escrow_hash).unwrap_or(0) } } #[near_bindgen] #[derive(BorshDeserialize, BorshSerialize)] pub struct FungibleToken { /// sha256(AccountID) -> Account details. pub accounts: LookupMap<Vec<u8>, Account>, /// Total supply of the all token. pub total_supply: Balance, } impl Default for FungibleToken { fn default() -> Self { env::panic(b"Contract should be initialized before usage.") } } #[near_bindgen] impl FungibleToken { #[init] pub fn new() -> Self { assert!(!env::state_exists(), "Already initialized"); Self { accounts: LookupMap::new(b"a".to_vec()), total_supply: 0u128 } } /// Deposit NEAR and send wNear tokens to the predecessor account /// Requirements: /// * `amount` must be a positive integer /// * Caller of the method has to attach deposit enough to cover: /// * The `amount` of wNear tokens being minted, and /// * The storage difference at the fixed storage price defined in the contract. #[payable] pub fn deposit(&mut self, amount: U128) { // Proxy through to deposit_to() making the receiver_id the predecessor self.deposit_to(env::predecessor_account_id(), amount); } /// Deposit NEAR from the predecessor account and send wNear to a specific receiver_id /// Requirements: /// * `receiver_id` cannot be this contract /// * `receiver_id` must be a valid account Id /// * `amount` must be a positive integer /// * Caller of the method has to attach deposit enough to cover: /// * The `amount` of wNear tokens being minted, and /// * The storage difference at the fixed storage price defined in the contract. #[payable] pub fn deposit_to(&mut self, receiver_id: AccountId, amount: U128) { let initial_storage = env::storage_usage(); // As attached deposit includes tokens for storage, deposit amount needs to be explicit let amount: Balance = amount.into(); if amount == 0 { env::panic(b"Deposit amount must be greater than zero"); } assert!( env::is_valid_account_id(receiver_id.as_bytes()), "New owner's account ID is invalid" ); assert_ne!( receiver_id, env::current_account_id(), "Invalid transfer to this contract" ); // Mint to receiver_id self.mint(&receiver_id, amount.clone()); // Check we have enough attached deposit let current_storage = env::storage_usage(); let attached_deposit = env::attached_deposit(); let required_deposit_for_tokens_and_storage = if current_storage > initial_storage { (Balance::from(current_storage - initial_storage) * STORAGE_PRICE_PER_BYTE) + amount } else { amount }; assert!( attached_deposit >= required_deposit_for_tokens_and_storage, "The required attached deposit is {}, but the given attached deposit is is {}", required_deposit_for_tokens_and_storage, attached_deposit, ); env::log(format!("{} wNear tokens minted", amount).as_bytes()); // Send back any money that is sent over value for required_deposit_for_tokens_and_storage let refund_amount = if attached_deposit > required_deposit_for_tokens_and_storage { attached_deposit - required_deposit_for_tokens_and_storage } else { 0 }; if refund_amount > 0 { env::log(format!("Refunding {} excess tokens", refund_amount).as_bytes()); Promise::new(env::predecessor_account_id()).transfer(refund_amount); } } /// Unwrap wNear and send Near back to the predecessor account /// Requirements: /// * `amount` must be a positive integer /// * Caller must have a balance that is greater than or equal to `amount` /// * Caller of the method has to attach deposit enough to cover storage difference at the /// fixed storage price defined in the contract. #[payable] pub fn withdraw(&mut self, amount: U128) { // Proxy through to withdraw_to() sending the Near to the predecessor account self.withdraw_to(env::predecessor_account_id(), amount); } /// Unwraps wNear from the predecessor account and sends the Near to a specific receiver_id /// Requirements: /// * `receiver_id` cannot be this contract /// * `receiver_id` must be a valid account Id /// * `amount` should be a positive integer /// * Caller must have a balance that is greater than or equal to `amount`. /// * Caller of the method has to attach deposit enough to cover storage difference at the /// fixed storage price defined in the contract. #[payable] pub fn withdraw_to(&mut self, receiver_id: AccountId, amount: U128) { let initial_storage = env::storage_usage(); let amount: Balance = amount.into(); if amount == 0 { env::panic(b"Withdrawal amount must be greater than zero"); } assert!( env::is_valid_account_id(receiver_id.as_bytes()), "New owner's account ID is invalid" ); assert_ne!( receiver_id, env::current_account_id(), "Invalid transfer to this contract" ); // Decrease the predecessor's wNear balance and reduce total supply self.burn(&env::predecessor_account_id(), amount.clone()); // Send near `amount` to receiver_id env::log(format!("Withdrawal of {} wNear", amount).as_bytes()); Promise::new(receiver_id).transfer(amount); self.refund_storage(initial_storage); } /// The withdraw_from function allows to unwrap wNear from an owner wallet to a receiver_id wallet /// Requirements: /// * `receiver_id` of the Near tokens cannot be this contract /// * `receiver_id` must be a valid account Id /// * `receiver_id` cannot be the same as `owner_id`. Use `withdraw()` in that scenario. /// * `amount` should be a positive integer. /// * `owner_id` should have balance on the account greater or equal than the withdraw `amount`. /// * If this function is called by an escrow account (`owner_id != predecessor_account_id`), /// then the allowance of the caller of the function (`predecessor_account_id`) on /// the account of `owner_id` should be greater or equal than the transfer `amount`. /// * Alternatively, if they have infinite approval, their approval amount wont be reduced. /// * Caller of the method has to attach deposit enough to cover storage difference at the /// fixed storage price defined in the contract. #[payable] pub fn withdraw_from(&mut self, owner_id: AccountId, receiver_id: AccountId, amount: U128) { let initial_storage = env::storage_usage(); let amount: Balance = amount.into(); if amount == 0 { env::panic(b"Withdrawal amount must be greater than zero"); } assert!( env::is_valid_account_id(receiver_id.as_bytes()), "New owner's account ID is invalid" ); assert_ne!( receiver_id, env::current_account_id(), "Invalid transfer to this contract" ); assert_ne!( owner_id, receiver_id, "The new owner should be different from the current owner" ); // If transferring by allowance, need to check and update allowance. let escrow_account_id = env::predecessor_account_id(); if escrow_account_id != owner_id { let mut account = self.get_account(&owner_id); let allowance = account.get_allowance(&escrow_account_id); if allowance != std::u128::MAX { if allowance < amount { env::panic(b"Not enough allowance"); } account.set_allowance(&escrow_account_id, allowance - amount); } } self.burn(&owner_id, amount.clone()); // Send near `amount` to receiver_id env::log(format!("Withdrawal of {} wNear", amount).as_bytes()); Promise::new(receiver_id).transfer(amount); self.refund_storage(initial_storage); } /// Increments the `allowance` for `escrow_account_id` by `amount` on the account of the caller of this contract /// (`predecessor_id`) who is the balance owner. /// Requirements: /// * Caller of the method has to attach deposit enough to cover storage difference at the /// fixed storage price defined in the contract. #[payable] pub fn inc_allowance(&mut self, escrow_account_id: AccountId, amount: U128) { let initial_storage = env::storage_usage(); assert!( env::is_valid_account_id(escrow_account_id.as_bytes()), "Escrow account ID is invalid" ); let owner_id = env::predecessor_account_id(); if escrow_account_id == owner_id { env::panic(b"Can not increment allowance for yourself"); } let mut account = self.get_account(&owner_id); let current_allowance = account.get_allowance(&escrow_account_id); account.set_allowance(&escrow_account_id, current_allowance.saturating_add(amount.0)); self.set_account(&owner_id, &account); self.refund_storage(initial_storage); } /// Decrements the `allowance` for `escrow_account_id` by `amount` on the account of the caller of this contract /// (`predecessor_id`) who is the balance owner. /// Requirements: /// * Caller of the method has to attach deposit enough to cover storage difference at the /// fixed storage price defined in the contract. #[payable] pub fn dec_allowance(&mut self, escrow_account_id: AccountId, amount: U128) { let initial_storage = env::storage_usage(); assert!( env::is_valid_account_id(escrow_account_id.as_bytes()), "Escrow account ID is invalid" ); let owner_id = env::predecessor_account_id(); if escrow_account_id == owner_id { env::panic(b"Can not decrement allowance for yourself"); } let mut account = self.get_account(&owner_id); let current_allowance = account.get_allowance(&escrow_account_id); account.set_allowance(&escrow_account_id, current_allowance.saturating_sub(amount.0)); self.set_account(&owner_id, &account); self.refund_storage(initial_storage); } /// Transfers the `amount` of tokens from `owner_id` to the `new_owner_id`. /// Requirements: /// * Recipient of the wNear tokens cannot be this contract /// * `amount` should be a positive integer. /// * `owner_id` should have balance on the account greater or equal than the transfer `amount`. /// * If this function is called by an escrow account (`owner_id != predecessor_account_id`), /// then the allowance of the caller of the function (`predecessor_account_id`) on /// the account of `owner_id` should be greater or equal than the transfer `amount`. /// * Alternatively, if they have infinite approval, their approval amount wont be reduced. /// * Caller of the method has to attach deposit enough to cover storage difference at the /// fixed storage price defined in the contract. #[payable] pub fn transfer_from(&mut self, owner_id: AccountId, new_owner_id: AccountId, amount: U128) { let initial_storage = env::storage_usage(); // Stop people accidentally sending tokens to the contract assert_ne!( new_owner_id, env::current_account_id(), "Invalid transfer to this contract" ); assert!( env::is_valid_account_id(new_owner_id.as_bytes()), "New owner's account ID is invalid" ); let amount = amount.into(); if amount == 0 { env::panic(b"Can't transfer 0 tokens"); } assert_ne!( owner_id, new_owner_id, "The new owner should be different from the current owner" ); // Retrieving the account from the state. let mut account = self.get_account(&owner_id); // Checking and updating unlocked balance if account.balance < amount { env::panic(b"Not enough balance"); } account.balance -= amount; // If transferring by escrow, need to check and update allowance. let escrow_account_id = env::predecessor_account_id(); if escrow_account_id != owner_id { let allowance = account.get_allowance(&escrow_account_id); if allowance != std::u128::MAX { if allowance < amount { env::panic(b"Not enough allowance"); } account.set_allowance(&escrow_account_id, allowance - amount); } } // Saving the account back to the state. self.set_account(&owner_id, &account); // Deposit amount to the new owner and save the new account to the state. let mut new_account = self.get_account(&new_owner_id); new_account.balance += amount; self.set_account(&new_owner_id, &new_account); self.refund_storage(initial_storage); } /// Transfer `amount` of tokens from the caller of the contract (`predecessor_id`) to /// `new_owner_id`. /// Act the same was as `transfer_from` with `owner_id` equal to the caller of the contract /// (`predecessor_id`). /// Requirements: /// * Caller of the method has to attach deposit enough to cover storage difference at the /// fixed storage price defined in the contract. #[payable] pub fn transfer(&mut self, new_owner_id: AccountId, amount: U128) { // NOTE: New owner's Account ID checked in transfer_from. // Storage fees are also refunded in transfer_from. self.transfer_from(env::predecessor_account_id(), new_owner_id, amount); } /// Returns total supply of tokens. pub fn get_total_supply(&self) -> U128 { self.total_supply.into() } /// Returns balance of the `owner_id` account. pub fn get_balance(&self, owner_id: AccountId) -> U128 { self.get_account(&owner_id).balance.into() } /// Returns current allowance of `escrow_account_id` for the account of `owner_id`. /// /// NOTE: Other contracts should not rely on this information, because by the moment a contract /// receives this information, the allowance may already be changed by the owner. /// So this method should only be used on the front-end to see the current allowance. pub fn get_allowance(&self, owner_id: AccountId, escrow_account_id: AccountId) -> U128 { assert!( env::is_valid_account_id(escrow_account_id.as_bytes()), "Escrow account ID is invalid" ); self.get_account(&owner_id).get_allowance(&escrow_account_id).into() } } impl FungibleToken { /// Internal method for minting an `amount` to `receiver_id` AccountId fn mint(&mut self, receiver_id: &AccountId, amount: Balance) { if self.total_supply == std::u128::MAX { env::panic(b"Total supply limit reached"); } if std::u128::MAX - self.total_supply < amount { env::panic(b"Amount will exceed max permitted total supply"); } let mut account = self.get_account(&receiver_id); account.balance += amount; self.set_account(&receiver_id, &account); // Increase total supply self.total_supply += amount; } /// Internal method for burning an `amount` from `owner_id` AccountId fn burn(&mut self, owner_id: &AccountId, amount: Balance) { let mut account = self.get_account(&owner_id); if account.balance < amount { env::panic(b"Burning more than the account balance"); } account.balance -= amount; self.set_account(&owner_id, &account); // Decrease total supply self.total_supply -= amount; } /// Helper method to get the account details for `owner_id`. fn get_account(&self, owner_id: &AccountId) -> Account { assert!(env::is_valid_account_id(owner_id.as_bytes()), "Owner's account ID is invalid"); let account_hash = env::sha256(owner_id.as_bytes()); self.accounts.get(&account_hash).unwrap_or_else(|| Account::new(account_hash)) } /// Helper method to set the account details for `owner_id` to the state. fn set_account(&mut self, owner_id: &AccountId, account: &Account) { let account_hash = env::sha256(owner_id.as_bytes()); if account.balance > 0 || account.num_allowances > 0 { self.accounts.insert(&account_hash, &account); } else { self.accounts.remove(&account_hash); } } fn refund_storage(&self, initial_storage: StorageUsage) { let current_storage = env::storage_usage(); let attached_deposit = env::attached_deposit(); let refund_amount = if current_storage > initial_storage { let required_deposit = Balance::from(current_storage - initial_storage) * STORAGE_PRICE_PER_BYTE; assert!( required_deposit <= attached_deposit, "The required attached deposit is {}, but the given attached deposit is is {}", required_deposit, attached_deposit, ); attached_deposit - required_deposit } else { attached_deposit + Balance::from(initial_storage - current_storage) * STORAGE_PRICE_PER_BYTE }; if refund_amount > 0 { env::log(format!("Refunding {} tokens for storage", refund_amount).as_bytes()); Promise::new(env::predecessor_account_id()).transfer(refund_amount); } } } #[cfg(not(target_arch = "wasm32"))] #[cfg(test)] mod w_near_tests { use near_sdk::MockedBlockchain; use near_sdk::{testing_env, VMContext}; use super::*; const ZERO_U128: Balance = 0u128; fn alice() -> AccountId { "alice.near".to_string() } fn bob() -> AccountId { "bob.near".to_string() } fn carol() -> AccountId { "carol.near".to_string() } fn w_near() -> AccountId { "w_near.near".to_string() } fn invalid_account_id() -> AccountId { "".to_string() } fn get_context(predecessor_account_id: AccountId) -> VMContext { VMContext { current_account_id: w_near(), signer_account_id: bob(), signer_account_pk: vec![0, 1, 2], predecessor_account_id, input: vec![], block_index: 0, block_timestamp: 0, account_balance: 1000 * 10u128.pow(24), account_locked_balance: 0, storage_usage: 10u64.pow(6), attached_deposit: 0, prepaid_gas: 10u64.pow(18), random_seed: vec![0, 1, 2], is_view: false, output_data_receivers: vec![], epoch_height: 0, } } #[test] fn contract_creation_with_new() { testing_env!(get_context(carol())); //let total_supply = 1_000_000_000_000_000u128; let contract = FungibleToken::new(); assert_eq!(contract.get_total_supply().0, ZERO_U128); assert_eq!(contract.get_balance(alice()).0, ZERO_U128); assert_eq!(contract.get_balance(bob()).0, ZERO_U128); assert_eq!(contract.get_balance(carol()).0, ZERO_U128); } #[test] #[should_panic(expected = "Contract should be initialized before usage.")] fn default_fails() { testing_env!(get_context(carol())); let _contract = FungibleToken::default(); } #[test] fn deposit() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount + (133 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); contract.deposit(deposit_amount.into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); } #[test] fn deposit_to_bob_from_carol() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount + (133 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); contract.deposit_to(bob(), deposit_amount.into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, 0); assert_eq!(contract.get_balance(bob()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); } #[test] #[should_panic(expected = "Deposit amount must be greater than zero")] fn deposit_fails_when_amount_is_zero() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.deposit_to(bob(), ZERO_U128.into()); } #[test] #[should_panic(expected = "New owner's account ID is invalid")] fn deposit_to_fails_when_recipient_is_invalid() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.deposit_to(invalid_account_id(), (5u128).into()); } #[test] #[should_panic(expected = "Invalid transfer to this contract")] fn deposit_to_fails_when_recipient_is_w_near_contract() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.deposit_to(w_near(), (5u128).into()); } #[test] #[should_panic(expected = "The required attached deposit is 13300001000000000000000, but the given attached deposit is is 13300000000000000000000")] fn deposit_to_fails_when_the_required_deposit_is_not_attached() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = 133 * STORAGE_PRICE_PER_BYTE; // attach required storage but not deposit testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); contract.deposit(deposit_amount.into()); } #[test] fn simple_deposit_and_withdrawal() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (133 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); contract.withdraw(deposit_amount.clone().into()); assert_eq!(contract.get_balance(carol()).0, 0); assert_eq!(contract.get_total_supply().0, 0); } #[test] fn simple_deposit_by_carol_and_withdrawal_to_alice() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (133 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); contract.withdraw_to(alice(), deposit_amount.clone().into()); // TODO: check alice near balance has increased assert_eq!(contract.get_balance(carol()).0, 0); assert_eq!(contract.get_total_supply().0, 0); } #[test] #[should_panic(expected = "Withdrawal amount must be greater than zero")] fn withdraw_fails_when_withdrawal_amount_is_zero() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw(ZERO_U128.into()); } #[test] #[should_panic(expected = "New owner's account ID is invalid")] fn withdraw_to_fails_when_recipient_is_invalid() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw_to(invalid_account_id(), (5u128).into()); } #[test] #[should_panic(expected = "Invalid transfer to this contract")] fn withdraw_to_fails_when_recipient_is_w_near_contract() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw_to(w_near(), (5u128).into()); } #[test] #[should_panic(expected = "Burning more than the account balance")] fn withdraw_to_fails_when_carol_tries_to_withdraw_more_than_her_w_near_balance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (133 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); contract.withdraw((deposit_amount.clone()+1).into()); } #[test] #[should_panic(expected = "Withdrawal amount must be greater than zero")] fn withdraw_from_fails_when_the_withdrawal_amount_is_zero() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw_from(alice(), carol(), ZERO_U128.into()); } #[test] #[should_panic(expected = "New owner's account ID is invalid")] fn withdraw_from_fails_when_the_recipient_is_invalid() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw_from(alice(), invalid_account_id(), (5u128).into()); } #[test] #[should_panic(expected = "Invalid transfer to this contract")] fn withdraw_from_fails_when_the_recipient_is_the_w_near_contract() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw_from(alice(), w_near(), (5u128).into()); } #[test] #[should_panic(expected = "The new owner should be different from the current owner")] fn withdraw_from_fails_when_the_owner_and_recipient_are_the_same() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw_from(alice(), alice(), (5u128).into()); } #[test] #[should_panic(expected = "Not enough allowance")] fn withdraw_from_fails_when_the_escrow_account_does_not_have_enough_allowance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.attached_deposit = 0; testing_env!(context.clone()); contract.withdraw_from(alice(), bob(), (5u128).into()); } #[test] #[should_panic(expected = "Burning more than the account balance")] fn withdraw_from_fails_when_trying_to_withdraw_more_than_owners_balance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); // get some wNear for carol contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); // give bob allowance to withdraw some tokens assert_eq!(contract.get_allowance(carol(), bob()), ZERO_U128.into()); contract.inc_allowance(bob(), std::u128::MAX.into()); assert_eq!(contract.get_allowance(carol(), bob()), std::u128::MAX.into()); // switch to a context with bob let mut context = get_context(bob()); testing_env!(context.clone()); context.attached_deposit = 1000 * STORAGE_PRICE_PER_BYTE; testing_env!(context.clone()); contract.withdraw_from(carol(), bob(), (deposit_amount.clone()+1).into()); } #[test] fn withdraw_from_with_correct_allowance_should_be_successful() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); // get some wNear for carol contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); // give bob allowance to withdraw some tokens assert_eq!(contract.get_allowance(carol(), bob()), ZERO_U128.into()); let withdrawal_amount = deposit_amount.clone() / 2; contract.inc_allowance(bob(), withdrawal_amount.clone().into()); assert_eq!(contract.get_allowance(carol(), bob()), withdrawal_amount.clone().into()); // switch to a context with bob let mut context = get_context(bob()); testing_env!(context.clone()); context.attached_deposit = 1000 * STORAGE_PRICE_PER_BYTE; testing_env!(context.clone()); assert_eq!(contract.get_balance(bob()).0, ZERO_U128.into()); contract.withdraw_from(carol(), bob(), withdrawal_amount.clone().into()); //todo: check bob's near balance assert_eq!(contract.get_balance(bob()).0, ZERO_U128.into()); assert_eq!(contract.get_allowance(carol(), bob()), ZERO_U128.into()); assert_eq!(contract.get_balance(carol()).0, withdrawal_amount.clone().into()); assert_eq!(contract.get_total_supply().0, withdrawal_amount.clone().into()); } #[test] fn withdraw_from_with_infinite_allowance_should_not_reduce_allowance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); // get some wNear for carol contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); // give bob allowance to withdraw some tokens assert_eq!(contract.get_allowance(carol(), bob()), ZERO_U128.into()); let withdrawal_amount = deposit_amount.clone() / 2; contract.inc_allowance(bob(), std::u128::MAX.into()); assert_eq!(contract.get_allowance(carol(), bob()), std::u128::MAX.into()); // switch to a context with bob let mut context = get_context(bob()); testing_env!(context.clone()); context.attached_deposit = 1000 * STORAGE_PRICE_PER_BYTE; testing_env!(context.clone()); assert_eq!(contract.get_balance(bob()).0, ZERO_U128.into()); contract.withdraw_from(carol(), bob(), withdrawal_amount.clone().into()); //todo: check bob's near balance assert_eq!(contract.get_balance(bob()).0, ZERO_U128.into()); assert_eq!(contract.get_allowance(carol(), bob()), std::u128::MAX.into()); assert_eq!(contract.get_balance(carol()).0, withdrawal_amount.clone().into()); assert_eq!(contract.get_total_supply().0, withdrawal_amount.clone().into()); } #[test] fn transfer_with_infinite_allowance_should_not_reduce_allowance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); // get some wNear for carol contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); // give bob allowance to withdraw some tokens assert_eq!(contract.get_allowance(carol(), bob()), ZERO_U128.into()); contract.inc_allowance(bob(), std::u128::MAX.into()); assert_eq!(contract.get_allowance(carol(), bob()), std::u128::MAX.into()); // switch to a context with bob let mut context = get_context(bob()); testing_env!(context.clone()); context.attached_deposit = 1000 * STORAGE_PRICE_PER_BYTE; testing_env!(context.clone()); assert_eq!(contract.get_balance(bob()).0, ZERO_U128.into()); let transfer_amount = deposit_amount.clone() / 2; contract.transfer_from(carol(), bob(), transfer_amount.clone().into()); assert_eq!(contract.get_balance(bob()).0, transfer_amount.clone().into()); assert_eq!(contract.get_allowance(carol(), bob()), std::u128::MAX.into()); assert_eq!(contract.get_balance(carol()).0, transfer_amount.clone().into()); assert_eq!(contract.get_total_supply().0, deposit_amount.clone().into()); } #[test] #[should_panic(expected = "Burning more than the account balance")] fn withdraw_from_with_infinite_allowance_should_not_withdraw_more_than_balance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount.clone() + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); //assert_eq!(contract.get_near_balance().0, 0); // get some wNear for carol contract.deposit(deposit_amount.clone().into()); //assert_eq!(contract.get_near_balance().0, 0); // TODO: check contract balance == deposit amount assert_eq!(contract.get_balance(carol()).0, deposit_amount); assert_eq!(contract.get_total_supply().0, deposit_amount); // give bob allowance to withdraw some tokens assert_eq!(contract.get_allowance(carol(), bob()), ZERO_U128.into()); contract.inc_allowance(bob(), std::u128::MAX.into()); assert_eq!(contract.get_allowance(carol(), bob()), std::u128::MAX.into()); // switch to a context with bob let mut context = get_context(bob()); testing_env!(context.clone()); context.attached_deposit = 1000 * STORAGE_PRICE_PER_BYTE; testing_env!(context.clone()); assert_eq!(contract.get_balance(bob()).0, ZERO_U128.into()); contract.withdraw_from(carol(), bob(), deposit_amount.clone().into()); contract.withdraw_from(carol(), bob(), deposit_amount.clone().into()); } #[test] fn transfer_after_deposit() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); // get some wNear tokens contract.deposit(deposit_amount.into()); let transfer_amount = 1_000_000_000_000_000u128 / 3; contract.transfer(bob(), transfer_amount.into()); context.storage_usage = env::storage_usage(); context.account_balance = env::account_balance(); context.is_view = true; context.attached_deposit = 0; testing_env!(context.clone()); assert_eq!(contract.get_balance(carol()).0, (1_000_000_000_000_000u128 - transfer_amount)); assert_eq!(contract.get_balance(bob()).0, transfer_amount); } #[test] #[should_panic(expected = "The new owner should be different from the current owner")] fn transfer_fail_self() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); // get some wNear tokens contract.deposit(deposit_amount.into()); let transfer_amount = deposit_amount.clone() / 2; contract.transfer(carol(), transfer_amount.into()); } #[test] #[should_panic(expected = "Invalid transfer to this contract")] fn transfer_fail_to_contract() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let deposit_amount = 1_000_000_000_000_000u128; context.attached_deposit = deposit_amount + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); // get some wNear tokens contract.deposit(deposit_amount.into()); let transfer_amount = deposit_amount.clone() / 2; contract.transfer(w_near(), transfer_amount.into()); } #[test] #[should_panic(expected = "Can not increment allowance for yourself")] fn self_inc_allowance_fail() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.attached_deposit = 1000 * STORAGE_PRICE_PER_BYTE; testing_env!(context.clone()); contract.inc_allowance(carol(), (5).into()); } #[test] #[should_panic(expected = "Can not decrement allowance for yourself")] fn self_dec_allowance_fail() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.attached_deposit = 1000 * STORAGE_PRICE_PER_BYTE; testing_env!(context.clone()); contract.dec_allowance(carol(), (10).into()); } #[test] fn saturating_dec_allowance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.attached_deposit = STORAGE_PRICE_PER_BYTE * 1000; testing_env!(context.clone()); contract.dec_allowance(bob(), (1_000_000_000_000_000u128 / 2).into()); assert_eq!(contract.get_allowance(carol(), bob()), 0.into()) } #[test] fn saturating_inc_allowance() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.attached_deposit = STORAGE_PRICE_PER_BYTE * 1000; testing_env!(context.clone()); let max_u128 = std::u128::MAX; contract.inc_allowance(bob(), max_u128.into()); contract.inc_allowance(bob(), max_u128.into()); assert_eq!(contract.get_allowance(carol(), bob()), std::u128::MAX.into()) } #[test] #[should_panic( expected = "The required attached deposit is 25700000000000000000000, but the given attached deposit is is 0" )] fn self_allowance_fail_no_deposit() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.attached_deposit = 0; testing_env!(context.clone()); contract.inc_allowance(bob(), 5.into()); } #[test] fn carol_escrows_to_bob_transfers_to_alice() { // Acting as carol let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.is_view = true; testing_env!(context.clone()); assert_eq!(contract.get_total_supply().0, 0); let deposit_amount = 1_000_000_000_000_000u128; let allowance = deposit_amount.clone() / 3; let transfer_amount = allowance / 3; context.is_view = false; context.attached_deposit = deposit_amount + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); // get some wNear tokens contract.deposit(deposit_amount.into()); contract.inc_allowance(bob(), allowance.into()); context.storage_usage = env::storage_usage(); context.account_balance = env::account_balance(); context.is_view = true; context.attached_deposit = 0; testing_env!(context.clone()); assert_eq!(contract.get_allowance(carol(), bob()).0, allowance); // Acting as bob now context.is_view = false; context.attached_deposit = STORAGE_PRICE_PER_BYTE * 1000; context.predecessor_account_id = bob(); testing_env!(context.clone()); contract.transfer_from(carol(), alice(), transfer_amount.into()); context.storage_usage = env::storage_usage(); context.account_balance = env::account_balance(); context.is_view = true; context.attached_deposit = 0; testing_env!(context.clone()); assert_eq!(contract.get_balance(carol()).0, deposit_amount.clone() - transfer_amount); assert_eq!(contract.get_balance(alice()).0, transfer_amount); assert_eq!(contract.get_allowance(carol(), bob()).0, allowance - transfer_amount); } #[test] fn self_allowance_set_for_refund() { let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); let initial_balance = context.account_balance; let initial_storage = context.storage_usage; context.attached_deposit = STORAGE_PRICE_PER_BYTE * 1000; testing_env!(context.clone()); let allowance_amount = 1_000_000_000_000_000u128; contract.inc_allowance(bob(), (allowance_amount.clone() / 2).into()); context.storage_usage = env::storage_usage(); context.account_balance = env::account_balance(); assert_eq!( context.account_balance, initial_balance + Balance::from(context.storage_usage - initial_storage) * STORAGE_PRICE_PER_BYTE ); let initial_balance = context.account_balance; let initial_storage = context.storage_usage; testing_env!(context.clone()); context.attached_deposit = 0; testing_env!(context.clone()); contract.dec_allowance(bob(), (allowance_amount.clone() / 2).into()); context.storage_usage = env::storage_usage(); context.account_balance = env::account_balance(); assert!(context.storage_usage < initial_storage); assert!(context.account_balance < initial_balance); assert_eq!( context.account_balance, initial_balance - Balance::from(initial_storage - context.storage_usage) * STORAGE_PRICE_PER_BYTE ); } #[test] fn carol_escrows_to_bob_locks_and_transfers_to_alice() { // Acting as carol let mut context = get_context(carol()); testing_env!(context.clone()); let mut contract = FungibleToken::new(); context.storage_usage = env::storage_usage(); context.is_view = true; testing_env!(context.clone()); assert_eq!(contract.get_total_supply().0, 0); let deposit_amount = 1_000_000_000_000_000u128; let allowance = deposit_amount.clone() / 3; let transfer_amount = allowance / 3; context.is_view = false; context.attached_deposit = deposit_amount + (1000 * STORAGE_PRICE_PER_BYTE); testing_env!(context.clone()); // get some wNear tokens contract.deposit(deposit_amount.into()); contract.inc_allowance(bob(), allowance.into()); context.storage_usage = env::storage_usage(); context.account_balance = env::account_balance(); context.is_view = true; context.attached_deposit = 0; testing_env!(context.clone()); assert_eq!(contract.get_allowance(carol(), bob()).0, allowance); assert_eq!(contract.get_balance(carol()).0, deposit_amount.clone()); // Acting as bob now context.is_view = false; context.attached_deposit = STORAGE_PRICE_PER_BYTE * 1000; context.predecessor_account_id = bob(); testing_env!(context.clone()); contract.transfer_from(carol(), alice(), transfer_amount.into()); context.storage_usage = env::storage_usage(); context.account_balance = env::account_balance(); context.is_view = true; context.attached_deposit = 0; testing_env!(context.clone()); assert_eq!(contract.get_balance(carol()).0, (deposit_amount.clone() - transfer_amount)); assert_eq!(contract.get_balance(alice()).0, transfer_amount); assert_eq!(contract.get_allowance(carol(), bob()).0, allowance - transfer_amount); } }
//! `cargo add` #![warn( missing_docs, missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unsafe_code, unstable_features, unused_import_braces, unused_qualifications )] #[macro_use] extern crate error_chain; #[macro_use] extern crate serde_derive; use crate::args::Args; use cargo_edit::{Dependency, Manifest}; use std::io::Write; use std::process; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; mod args; mod errors { error_chain! { errors { /// Specified a dependency with both a git URL and a version. GitUrlWithVersion(git: String, version: String) { description("Specified git URL with version") display("Cannot specify a git URL (`{}`) with a version (`{}`).", git, version) } /// Specified a dependency with both a git URL and a path. GitUrlWithPath(git: String, path: String) { description("Specified git URL with path") display("Cannot specify a git URL (`{}`) with a path (`{}`).", git, path) } } links { CargoEditLib(::cargo_edit::Error, ::cargo_edit::ErrorKind); } foreign_links { Io(::std::io::Error); } } } use crate::errors::*; static USAGE: &'static str = r#" Usage: cargo add <crate> [--dev|--build|--optional] [options] cargo add <crates>... [--dev|--build|--optional] [options] cargo add (-h|--help) cargo add --version Specify what crate to add: --vers <ver> Specify the version to grab from the registry (crates.io). You can also specify versions as part of the name, e.g `cargo add bitflags@0.3.2`. --git <uri> Specify a git repository to download the crate from. This does not work if either a version or path (or both) is specified. --path <uri> Specify the path the crate should be loaded from. Specify where to add the crate: -D --dev Add crate as development dependency. -B --build Add crate as build dependency. --optional Add as an optional dependency (for use in features). This does not work for `dev-dependencies` or `build-dependencies`. --target <target> Add as dependency to the given target platform. This does not work for `dev-dependencies` or `build-dependencies`. Options: --upgrade=<method> Choose method of semantic version upgrade. Must be one of "none" (exact version), "patch" (`~` modifier), "minor" (`^` modifier, default), or "all" (`>=`). --manifest-path=<path> Path to the manifest to add a dependency to. --allow-prerelease Include prerelease versions when fetching from crates.io (e.g. '0.6.0-alpha'). Defaults to false. --no-default-features Set `default-features = false` for the added dependency. -q --quiet Do not print any output in case of success. -h --help Show this help page. -V --version Show version. This command allows you to add a dependency to a Cargo.toml manifest file. If <crate> is a github or gitlab repository URL, or a local path, `cargo add` will try to automatically get the crate name and set the appropriate `--git` or `--path` value. Please note that Cargo treats versions like "1.2.3" as "^1.2.3" (and that "^1.2.3" is specified as ">=1.2.3 and <2.0.0"). By default, `cargo add` will use this format, as it is the one that the crates.io registry suggests. One goal of `cargo add` is to prevent you from using wildcard dependencies (version set to "*"). "#; fn print_msg(dep: &Dependency, section: &[String], optional: bool) -> Result<()> { let colorchoice = if atty::is(atty::Stream::Stdout) { ColorChoice::Auto } else { ColorChoice::Never }; let mut output = StandardStream::stdout(colorchoice); output.set_color(ColorSpec::new().set_fg(Some(Color::Green)).set_bold(true))?; write!(output, "{:>12}", "Adding")?; output.reset()?; write!(output, " {}", dep.name)?; if let Some(version) = dep.version() { write!(output, " v{}", version)?; } else { write!(output, " (unknown version)")?; } write!(output, " to")?; if optional { write!(output, " optional")?; } let section = if section.len() == 1 { section[0].clone() } else { format!("{} for target `{}`", &section[2], &section[1]) }; writeln!(output, " {}", section)?; Ok(()) } fn handle_add(args: &Args) -> Result<()> { let manifest_path = args.flag_manifest_path.as_ref().map(From::from); let mut manifest = Manifest::open(&manifest_path)?; let deps = &args.parse_dependencies()?; deps.iter() .map(|dep| { if !args.flag_quiet { print_msg(dep, &args.get_section(), args.flag_optional)?; } manifest .insert_into_table(&args.get_section(), dep) .map_err(Into::into) }) .collect::<Result<Vec<_>>>() .map_err(|err| { eprintln!("Could not edit `Cargo.toml`.\n\nERROR: {}", err); err })?; let mut file = Manifest::find_file(&manifest_path)?; manifest.write_to_file(&mut file)?; Ok(()) } fn main() { let args = docopt::Docopt::new(USAGE) .and_then(|d| d.deserialize::<Args>()) .unwrap_or_else(|err| err.exit()); if args.flag_version { println!("cargo-add version {}", env!("CARGO_PKG_VERSION")); process::exit(0); } if let Err(err) = handle_add(&args) { eprintln!("Command failed due to unhandled error: {}\n", err); for e in err.iter().skip(1) { eprintln!("Caused by: {}", e); } if let Some(backtrace) = err.backtrace() { eprintln!("Backtrace: {:?}", backtrace); } process::exit(1); } }
use crate::blob::blob::{copy_status_from_headers, CopyStatus}; use crate::core::{copy_id_from_headers, CopyId}; use azure_core::headers::{ client_request_id_from_headers_optional, date_from_headers, etag_from_headers, last_modified_from_headers, request_id_from_headers, server_from_headers, version_from_headers, }; use azure_core::RequestId; use chrono::{DateTime, Utc}; use http::HeaderMap; use std::convert::TryFrom; #[derive(Debug, Clone, PartialEq)] pub struct CopyBlobResponse { pub etag: String, pub last_modified: DateTime<Utc>, pub request_id: RequestId, pub version: String, pub server: String, pub date: DateTime<Utc>, pub copy_id: CopyId, pub copy_status: CopyStatus, pub client_request_id: Option<String>, } impl TryFrom<&HeaderMap> for CopyBlobResponse { type Error = crate::Error; fn try_from(headers: &HeaderMap) -> Result<Self, Self::Error> { trace!("CopyBlobResponse headers == {:#?}", headers); Ok(Self { etag: etag_from_headers(headers)?, last_modified: last_modified_from_headers(headers)?, request_id: request_id_from_headers(headers)?, version: version_from_headers(headers)?.to_owned(), server: server_from_headers(headers)?.to_owned(), date: date_from_headers(headers)?, copy_id: copy_id_from_headers(headers)?, copy_status: copy_status_from_headers(headers)?, client_request_id: client_request_id_from_headers_optional(headers), }) } }
use super::{Concierge, WsError}; use concierge_api_rs::info; use serde::Serialize; use std::{borrow::Cow, collections::HashSet}; use uuid::Uuid; use warp::ws::Message; /// A struct containing group information. pub struct Service { pub name: String, pub nickname: Option<String>, pub owner_uuid: Uuid, pub clients: HashSet<Uuid>, } impl Service { /// Create a new group associated with an owner uuid. pub fn new(name: String, nickname: Option<String>, owner_uuid: Uuid) -> Self { Self { name, nickname, owner_uuid, clients: HashSet::new(), } } /// Utility method to construct an origin receipt on certain payloads. pub fn info(&self) -> info::Service<'_> { info::Service { name: Cow::Borrowed(&self.name), nickname: self.nickname.as_deref().map(Cow::Borrowed), owner_uuid: self.owner_uuid, subscribers: self.clients.iter().copied().collect::<Vec<_>>(), } } /// Add the client to the group. pub fn add_subscriber(&mut self, uuid: Uuid) -> bool { self.clients.insert(uuid) } /// Remove the client from the group. pub fn remove_subscriber(&mut self, uuid: Uuid) -> bool { self.clients.remove(&uuid) } /// Create a controller that allows for controls between the service /// and the concierge. pub fn hook<'a, 'b: 'a>(&'a self, concierge: &'b Concierge) -> ServiceController<'_, '_> { ServiceController { service: self, concierge, } } } /// This to isolate pure client behavior from service-server coupled behavior. pub struct ServiceController<'a, 'c: 'a> { service: &'a Service, concierge: &'c Concierge, } impl ServiceController<'_, '_> { /// Broadcast a payload to all connected client of a certain group. pub async fn broadcast(&self, payload: &impl Serialize, to_owner: bool) -> Result<(), WsError> { let message = Message::text(serde_json::to_string(&payload)?); let clients = self.concierge.clients.read().await; self.service .clients .iter() .filter(|client_uuid| to_owner || **client_uuid != self.service.owner_uuid) .filter_map(|client_uuid| clients.get(client_uuid)) .for_each(|client| { client.send_ws_msg(message.clone()).ok(); }); Ok(()) } }
use error::*; use serialization::ModuleMapperSerde; use std::collections::HashMap; #[derive(Clone, Debug)] pub struct ModuleMapper { module_map: HashMap<String, usize>, } impl ModuleMapper { pub fn new(module_map: HashMap<String, usize>) -> Self { ModuleMapper { module_map: module_map, } } pub fn get_module_index<TName>(&self, module_name: TName) -> Result<usize> where TName: AsRef<str> { let module_name = module_name.as_ref(); match self.module_map.get(module_name) { Some(some) => Ok(some.clone()), None => { Err(ErrorKind::PluginNotFoundInPluginMap(module_name.to_string()).into()) }, } } pub fn get_module_map(&self) -> &HashMap<String, usize> { &self.module_map } pub fn get_serde(&self) -> ModuleMapperSerde { ModuleMapperSerde::new(&self.module_map) } }
use crate::{ParseError, Token, TokenContents}; use nu_protocol::Span; #[derive(Debug)] pub struct LiteCommand { pub comments: Vec<Span>, pub parts: Vec<Span>, } impl Default for LiteCommand { fn default() -> Self { Self::new() } } impl LiteCommand { pub fn new() -> Self { Self { comments: vec![], parts: vec![], } } pub fn push(&mut self, span: Span) { self.parts.push(span); } pub fn is_empty(&self) -> bool { self.parts.is_empty() } } #[derive(Debug)] pub struct LitePipeline { pub commands: Vec<LiteCommand>, } impl Default for LitePipeline { fn default() -> Self { Self::new() } } impl LitePipeline { pub fn new() -> Self { Self { commands: vec![] } } pub fn push(&mut self, command: LiteCommand) { self.commands.push(command); } pub fn is_empty(&self) -> bool { self.commands.is_empty() } } #[derive(Debug)] pub struct LiteBlock { pub block: Vec<LitePipeline>, } impl Default for LiteBlock { fn default() -> Self { Self::new() } } impl LiteBlock { pub fn new() -> Self { Self { block: vec![] } } pub fn push(&mut self, pipeline: LitePipeline) { self.block.push(pipeline); } pub fn is_empty(&self) -> bool { self.block.is_empty() } } pub fn lite_parse(tokens: &[Token]) -> (LiteBlock, Option<ParseError>) { let mut block = LiteBlock::new(); let mut curr_pipeline = LitePipeline::new(); let mut curr_command = LiteCommand::new(); let mut last_token = TokenContents::Eol; let mut curr_comment: Option<Vec<Span>> = None; for token in tokens.iter() { match &token.contents { TokenContents::Item => { // If we have a comment, go ahead and attach it if let Some(curr_comment) = curr_comment.take() { curr_command.comments = curr_comment; } curr_command.push(token.span); last_token = TokenContents::Item; } TokenContents::Pipe => { if !curr_command.is_empty() { curr_pipeline.push(curr_command); curr_command = LiteCommand::new(); } last_token = TokenContents::Pipe; } TokenContents::Eol => { if last_token != TokenContents::Pipe { if !curr_command.is_empty() { curr_pipeline.push(curr_command); curr_command = LiteCommand::new(); } if !curr_pipeline.is_empty() { block.push(curr_pipeline); curr_pipeline = LitePipeline::new(); } } if last_token == TokenContents::Eol { // Clear out the comment as we're entering a new comment curr_comment = None; } last_token = TokenContents::Eol; } TokenContents::Semicolon => { if !curr_command.is_empty() { curr_pipeline.push(curr_command); curr_command = LiteCommand::new(); } if !curr_pipeline.is_empty() { block.push(curr_pipeline); curr_pipeline = LitePipeline::new(); } last_token = TokenContents::Semicolon; } TokenContents::Comment => { // Comment is beside something if last_token != TokenContents::Eol { curr_command.comments.push(token.span); curr_comment = None; } else { // Comment precedes something if let Some(curr_comment) = &mut curr_comment { curr_comment.push(token.span); } else { curr_comment = Some(vec![token.span]); } } last_token = TokenContents::Comment; } } } if !curr_command.is_empty() { curr_pipeline.push(curr_command); } if !curr_pipeline.is_empty() { block.push(curr_pipeline); } if last_token == TokenContents::Pipe { ( block, Some(ParseError::UnexpectedEof( "pipeline missing end".into(), tokens[tokens.len() - 1].span, )), ) } else { (block, None) } }
extern crate num; use std::cmp; fn rec(i: usize, j: usize, n: usize, w_list: &Vec<usize>, v_list: &Vec<usize>, dp: &mut Vec<Vec<Option<usize>>>) -> usize { if dp[i][j] != None { return dp[i][j].unwrap() } let mut res = None; if i == n { res = Some(0) }else if j < w_list[i] { res = Some(rec(i + 1, j, n, w_list, v_list, dp)) }else{ res = Some(cmp::max(rec(i + 1, j, n, w_list, v_list, dp), rec(i + 1, j - w_list[i], n, w_list, v_list, dp) + v_list[i])) } dp[i][j] = res; println!("calc => {}", dp[i][j].unwrap()); return dp[i][j].unwrap() } fn act2_3_1(n: usize, w_list: &Vec<usize>, v_list: &Vec<usize>, w: usize) -> usize{ let mut dp: &mut Vec<Vec<Option<usize>>> = &mut Vec::new(); for _ in 0..1000 { let mut line = Vec::new(); for _ in 0..1000{ line.push(None) } dp.push(line); } return rec(0, w, n, w_list, v_list, dp) } fn act2_3_2(n: usize, m: usize, s: &str, t: &str) -> usize { let max_n = 1000; let max_m = 1000; let mut dp: &mut Vec<Vec<usize>> = &mut Vec::new(); for _ in 0..max_n { let mut line = Vec::new(); for _ in 0..max_m{ line.push(0) } dp.push(line); } let s_chrs:Vec<char> = s.chars().collect(); let t_chrs:Vec<char> = t.chars().collect(); for i in 0..n{ for j in 0..m{ if s_chrs[i] == t_chrs[j] { dp[i+1][j+1] = dp[i][j] + 1; }else{ dp[i+1][j+1] = cmp::max(dp[i][j + 1], dp[i+1][j]); } } } return dp[n][m] } fn act2_3_3(n: usize, w_list: &Vec<usize>, v_list: &Vec<usize>, w: usize) -> usize{ let mut dp: &mut Vec<usize> = &mut Vec::new(); for x in 0..100 { dp.push(0); } for i in 0..n { for j in w_list[i]..w+1 { dp[j] = cmp::max(dp[j], dp[j - w_list[i]] + v_list[i]); } } return dp[w] } fn act2_3_4(n: usize, w_list: &Vec<usize>, v_list: &Vec<usize>, w: usize) -> usize { let max_n = 100; let max_m = 100; let mut dp: &mut Vec<Vec<usize>> = &mut Vec::new(); for _ in 0..max_n { let mut line: Vec<usize> = Vec::new(); for _ in 0..(max_m * max_n){ line.push(usize::max_value()) } dp.push(line); } dp[0][0] = 0; for i in 0..n { for j in 0..(max_n * max_m) { if j < v_list[i] { dp[i + 1][j] = dp[i][j]; }else{ let right = if dp[i][j - v_list[i]] == usize::max_value() { dp[i][j - v_list[i]] }else{ dp[i][j - v_list[i]] + w_list[i] }; dp[i + 1][j] = cmp::min(dp[i][j], right); } } } let mut res = 0; for i in 0..(max_n * max_m) { if dp[n][i] <= w{ res = i; } } return res } fn act2_3_5(n: usize, a: &Vec<usize>, m: &Vec<usize>, k: usize) -> String { let max_n = 1000; let max_m = 1000; let mut dp: &mut Vec<i32> = &mut Vec::new(); for _ in 0..max_n { dp.push(-1); } dp[0] = 0; for i in 0..n { for j in 0..k+1 { if dp[j] >= 0 { dp[j] = m[i] as i32; }else if j < a[i] || dp[j - a[i]] <= 0 { dp[j] = -1; }else{ dp[j] = dp[j - a[i]] - 1; } } } for i in 0..max_n { if dp[i] >= 0 { println!("i:{}, value:{}", i, dp[i]); } } if dp[k] >= 0 { return "Yes".to_string() }else { return "No".to_string() } } fn act2_3_6(n: usize, a:&Vec<usize>) -> usize { let mut res = 0; let mut dp: &mut Vec<usize> = &mut Vec::new(); let max_n = 1000; for _ in 0..max_n { dp.push(1); } for i in 0..n { for j in 0..i { if a[i] < a[j] { dp[i] = cmp::max(dp[i], dp[j] + 1); } } res = cmp::max(res, dp[i]); } return res } fn act2_3_7(n: usize, m:usize, bigM:usize) -> usize { let max_n = 100; let max_m = 100; let mut dp: &mut Vec<Vec<usize>> = &mut Vec::new(); for _ in 0..max_n { let mut line: Vec<usize> = Vec::new(); for _ in 0..(max_m * max_n){ line.push(0) } dp.push(line); } dp[0][0] = 1; for i in 1..m+1 { for j in 0..n+1 { if (j as i32 - i as i32) >= 0 { dp[i][j] = (dp[i - 1][j] + dp[i][j - i]) % bigM; }else{ dp[i][j] = dp[i - 1][j]; } } } return dp[m][n] } fn act2_3_8(n: usize, m: usize, a: &Vec<usize>, bigM: usize) -> usize { let max_n = 100; let max_m = 100; let mut dp: &mut Vec<Vec<usize>> = &mut Vec::new(); for i in 0..max_n { let mut line: Vec<usize> = Vec::new(); for j in 0..(max_m * max_n){ if j == 0 { line.push(1) }else{ line.push(0) } } dp.push(line); } for i in 0..n{ for j in 1..m+1{ let x = ((j as i32) - (1 as i32) - (a[i] as i32)) as i32; if x >= 0 { dp[i + 1][j] = (dp[i + 1][j - 1] + dp[i][j] - dp[i][j - 1 - a[i]] + bigM) % bigM; }else{ dp[i + 1][j] = (dp[i + 1][j - 1] + dp[i][j]) % bigM; } } } return dp[n][m] } #[cfg(test)] mod tests { use super::*; #[test] fn act2_3_1_test(){ assert_eq!(7, act2_3_1(4, &vec![2,1,3,2], &vec![3,2,4,2], 5)); } #[test] fn act2_3_2_test(){ assert_eq!(3, act2_3_2(4, 4, "abcd", "becd")); } #[test] fn act2_3_3_test(){ assert_eq!(10, act2_3_3(3, &vec![3,4,2], &vec![4,5,3], 7)); } #[test] fn act2_3_4_test(){ assert_eq!(7, act2_3_4(4, &vec![2,1,3,2], &vec![3,2,4,2], 5)); } #[test] fn act2_3_5_test(){ assert_eq!("Yes", act2_3_5(3, &vec![3,5,8], &vec![3,2,2], 17)); } #[test] fn act2_3_6_test(){ assert_eq!(3, act2_3_6(5, &vec![4,2,3,1,5])); } #[test] fn act2_3_7_test(){ assert_eq!(4, act2_3_7(4, 3, 1000)); } #[test] fn act2_3_8_test(){ assert_eq!(6, act2_3_8(3, 3, &vec![1,2,3], 10000)); } }
//! tzcnt /// Counts trailing zero bits pub trait Tzcnt { /// Counts the number of trailing least significant zero bits. /// /// When the source operand is 0, it returns its size in bits. /// /// This is equivalent to searching for the least significant set bit and /// returning its index. /// /// **Keywords**: Count trailing zeros, Bit scan forward, find first set. /// /// # Instructions /// /// - [`TZCNT`](http://www.felixcloutier.com/x86/TZCNT.html): /// - Description: Count the number of trailing zero bits. /// - Architecture: x86. /// - Instruction set: BMI. /// - Registers: 16/32/64 bit. /// /// # Example /// /// ``` /// # use bitintr::*; /// assert_eq!(0b1001_0000_u16.tzcnt(), 4_u16); /// assert_eq!(0b0000_0000_u32.tzcnt(), 32_u32); /// assert_eq!(0b0000_0001_u64.tzcnt(), 0_u64); /// ``` fn tzcnt(self) -> Self; } macro_rules! impl_tzcnt { ($id:ident) => { impl Tzcnt for $id { #[inline] fn tzcnt(self) -> Self { self.trailing_zeros() as Self } } }; } impl_all!(impl_tzcnt: u8, u16, u32, u64, i8, i16, i32, i64);
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ //! Define functions that can modify the global state of tracing. #[cfg(not(test))] use std::env; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use crate::context::TraceContext; use lazy_static::lazy_static; #[cfg(not(test))] const ENABLE_TRACE_ENV: &str = "RUST_TRACE"; lazy_static! { static ref ENABLED: AtomicBool = init_enabled(); static ref SAMPLE_RATE: AtomicUsize = init_sample_rate(); static ref TRACE_CONTEXT: TraceContext = TraceContext::default(); } /// Check if the tracing is globally enabled #[inline] pub fn is_enabled() -> bool { ENABLED.load(Ordering::Relaxed) } /// Globally enable tracing #[inline] pub fn enable() { ENABLED.store(true, Ordering::Relaxed); } /// Globally disable tracing #[inline] pub fn disable() { ENABLED.store(false, Ordering::Relaxed); } /// Check the global sample rate for tracing, returned value is a percentage. #[inline] pub fn sample_rate() -> usize { SAMPLE_RATE.load(Ordering::Relaxed) } /// Set a sample rate for tracing where `rate` is the percentage of samples that /// will be enabled. /// /// # Panics /// /// Panics if `rate` is over 100 #[inline] pub fn set_sample_rate(rate: usize) { assert!(rate <= 100); SAMPLE_RATE.store(rate, Ordering::Relaxed); } #[cfg(test)] fn init_enabled() -> AtomicBool { AtomicBool::new(true) } #[cfg(not(test))] fn init_enabled() -> AtomicBool { AtomicBool::new(env::var_os(ENABLE_TRACE_ENV).is_some()) } fn init_sample_rate() -> AtomicUsize { AtomicUsize::new(100) } /// Get a global context, useful when debugging a piece of code that does not /// have an easily accessible TraceContext passed from callsites. It is not /// recommended to use it in long running processes (like servers) in /// production as the trace might easily grow to big to be opened by a trace /// viewer after awhile. pub fn get_global_context() -> &'static TraceContext { &TRACE_CONTEXT }
#![deny(warnings)] #![feature(question_mark)] use std::env; use std::fs::File; use std::io::{stdin, stdout, Result, Read, Write}; use std::process::{Command, ExitStatus}; fn login(user: &str) -> Result<ExitStatus> { let shell = "/bin/sh"; let home = "/home/"; env::set_current_dir(home)?; if let Ok(mut motd) = File::open("/etc/motd") { let mut motd_string = String::new(); if let Ok(_) = motd.read_to_string(&mut motd_string) { println!("{}", motd_string); } } Command::new("/bin/sh") .env("HOME", home) .env("SHELL", shell) .env("USER", user) .spawn()?.wait() } fn main() { loop { if let Ok(mut issue) = File::open("/etc/issue") { let mut issue_string = String::new(); if let Ok(_) = issue.read_to_string(&mut issue_string) { println!("{}", issue_string); } } print!("redox login: "); stdout().flush().unwrap(); let mut user = String::new(); stdin().read_line(&mut user).unwrap(); match login(&user) { Ok(_exit_status) => (), Err(err) => println!("login: failed to login as {}: {}", user, err) } } }
pub fn sum_of_squares(n: usize) -> usize { (1..(n+1)).map(|x| x*x) .fold(0, |sum, x| sum+x) } pub fn square_of_sum(n : usize) -> usize { let sum = (1..(n+1)).fold(0, |sum, x| sum+x); sum * sum } pub fn difference(n: usize) -> usize { square_of_sum(n) - sum_of_squares(n) }
use std::{ cell::{Cell, RefCell}, str::CharIndices, }; use crate::iterator::sneakable::Sneakable; use super::{Token, TokenKind}; pub struct Scanner<'source> { // characters in the source string are iterated by codepoints. // the scanner needs to be able to peek the next two characters as it is // currently implemented, but 'core::iter::Peekable' only allows // peeking the character right next to the cursor. I implemented a // 'Sneakable' type that stores the previous, next and next next elements. // it is simple and there is a high chance that it is slow. we will // figure it out. start: RefCell<Sneakable<CharIndices<'source>>>, current: RefCell<Sneakable<CharIndices<'source>>>, source: &'source str, line: Cell<usize>, } impl<'source> Scanner<'source> { pub fn new(source: &'source str) -> Scanner<'source> { let iter = Sneakable::new(source.char_indices()); Self { start: RefCell::new(iter.clone()), current: RefCell::new(iter), source, line: Cell::new(1), } } /// returns tokens until the stream is finished /// when it is finished it only returns Eof tokens /// if it finds something unexpected, it returns Error tokens /// with an error message in the lexeme field. pub fn scan(&'source self) -> Token<'source> { self.skip_whitespace(); { *self.start.borrow_mut() = self.current.borrow().clone(); } if self.is_at_end() { return self.make_token(TokenKind::Eof); } let c = self.advance(); match c { '(' => self.make_token(TokenKind::LeftParen), ')' => self.make_token(TokenKind::RightParen), '{' => self.make_token(TokenKind::LeftBrace), '}' => self.make_token(TokenKind::RightBrace), ';' => self.make_token(TokenKind::Semicolon), ',' => self.make_token(TokenKind::Comma), '.' => self.make_token(TokenKind::Dot), '-' => self.make_token(TokenKind::Minus), '+' => self.make_token(TokenKind::Plus), '/' => self.make_token(TokenKind::Slash), '*' => self.make_token(TokenKind::Star), '!' => self.make_token(if self.check('=') { TokenKind::BangEqual } else { TokenKind::Bang }), '=' => self.make_token(if self.check('=') { TokenKind::EqualEqual } else { TokenKind::Equal }), '<' => self.make_token(if self.check('=') { TokenKind::LessEqual } else { TokenKind::Less }), '>' => self.make_token(if self.check('=') { TokenKind::GreaterEqual } else { TokenKind::Greater }), '"' => self.string(), _ if c.is_alphabetic() => self.identifier(), _ if c.is_ascii_digit() => self.number(), _ => self.make_error_token("Unexpected character."), } } fn is_at_end(&self) -> bool { self.current.borrow_mut().peek().is_none() } // panics if the current iterator is at the end fn advance(&self) -> char { self.current.borrow_mut().next(); self.current.borrow_mut().previous().unwrap().1 } fn check(&self, expected: char) -> bool { if self.is_at_end() || self.peek() != Some(expected) { false } else { self.current.borrow_mut().next(); true } } fn peek(&self) -> Option<char> { self.current.borrow_mut().peek().map(|c| c.1) } fn peek_next(&self) -> Option<char> { self.current.borrow_mut().peek_next().map(|c| c.1) } fn identifier(&'source self) -> Token<'source> { while self .peek() .map(|c| c.is_alphabetic() || c.is_ascii_digit()) .unwrap_or(false) { self.advance(); } self.make_token(self.identifier_type()) } // TODO! measure performance against a simple 'match' fn identifier_type(&self) -> TokenKind { // interior mutability is weird let (start_peek, start_peek_next, current_peek) = { let mut start = self.start.borrow_mut(); let mut current = self.current.borrow_mut(); let start_peek = start.peek().unwrap().clone(); let start_peek_next = start.peek_next().unwrap().clone(); let current_peek = current.peek().unwrap().clone(); (start_peek, start_peek_next, current_peek) }; let check_keyword = |start: usize, length: usize, rest: &str, kind: TokenKind| { // if both slices of source are the same length, and the parts that have not // been checked are the same, then it is the token kind you think it is. // otherwise it is a simple identifier. if current_peek.0 - start_peek.0 == start + length && rest == &self.source[start_peek.0 + start..start_peek.0 + start + length] { kind } else { TokenKind::Identifier } }; match start_peek.1 { 'a' => check_keyword(1, 2, "nd", TokenKind::And), 'c' => check_keyword(1, 4, "lass", TokenKind::Class), 'e' => check_keyword(1, 3, "lse", TokenKind::Else), 'f' if current_peek.0 - start_peek.0 > 1 => match start_peek_next.1 { 'a' => check_keyword(2, 3, "lse", TokenKind::False), 'o' => check_keyword(2, 1, "r", TokenKind::For), 'u' => check_keyword(2, 1, "n", TokenKind::Fun), _ => TokenKind::Identifier, }, 'i' => check_keyword(1, 1, "f", TokenKind::If), 'n' => check_keyword(1, 2, "il", TokenKind::Nil), 'o' => check_keyword(1, 1, "r", TokenKind::Or), 'p' => check_keyword(1, 4, "rint", TokenKind::Print), 'r' => check_keyword(1, 5, "eturn", TokenKind::Return), 's' => check_keyword(1, 4, "uper", TokenKind::Super), 't' if current_peek.0 - start_peek.0 > 1 => match start_peek_next.1 { 'h' => check_keyword(2, 2, "is", TokenKind::This), 'r' => check_keyword(2, 2, "ue", TokenKind::True), _ => TokenKind::Identifier, }, 'v' => check_keyword(1, 2, "ar", TokenKind::Var), 'w' => check_keyword(1, 4, "hile", TokenKind::While), _ => TokenKind::Identifier, } } fn number(&'source self) -> Token<'source> { while self .peek() .as_ref() .map(char::is_ascii_digit) .unwrap_or(false) { self.advance(); } if self.peek() == Some('.') && self .peek_next() .as_ref() .map(char::is_ascii_digit) .unwrap_or(false) { self.advance(); while self .peek() .as_ref() .map(char::is_ascii_digit) .unwrap_or(false) { self.advance(); } } self.make_token(TokenKind::Number) } fn string(&'source self) -> Token<'source> { while self.peek() != Some('"') && !self.is_at_end() { if self.peek() == Some('\n') { self.line.set(self.line.get() + 1); } self.advance(); } if self.is_at_end() { self.make_error_token("Unterminated string.") } else { self.advance(); self.make_token(TokenKind::String) } } fn skip_whitespace(&self) { loop { let c = self.peek(); match c { Some(' ') | Some('\r') | Some('\t') => { self.advance(); } Some('\n') => { self.line.set(self.line.get() + 1); self.advance(); } Some('/') if self.peek_next() == Some('/') => { while self.peek() != Some('\n') && !self.is_at_end() { self.advance(); } } _ => return, } } } fn make_token(&'source self, kind: TokenKind) -> Token<'source> { // in the case that kind == Eof, my .peek() calls will return None. // in that case, i want the lexeme string to be a 0-length one let start_index = self.start.borrow_mut().peek().map(|it| it.0).unwrap_or(0); let current_index = self.current.borrow_mut().peek().map(|it| it.0).unwrap_or(0); Token { kind, lexeme: &self.source[start_index..current_index], line: self.line.get(), } } fn make_error_token(&'source self, message: &'static str) -> Token<'source> { Token { kind: TokenKind::Error, lexeme: message, line: self.line.get(), } } }
impl Solution { pub fn str_str(haystack: String, needle: String) -> i32 { if needle.len() == 0 { return 0; } for i in 0..haystack.len() { if i+needle.len() > haystack.len(){ break; } if haystack[i..i+needle.len()] == needle { return i as i32; } } return -1; } }
use super::prelude::*; use db::schema::executor_processor; pub(crate) fn config(cfg: &mut web::ServiceConfig) { cfg.service(show_executor_processors) .service(create_executor_processor) .service(update_executor_processor) .service(delete_executor_processor) .service(activate_executor_processor); } #[post("/api/executor_processor/create")] async fn create_executor_processor( web::Json(executor_processor): web::Json<model::NewExecutorProcessor>, pool: ShareData<db::ConnectionPool>, ) -> HttpResponse { if let Ok(conn) = pool.get() { return HttpResponse::Ok().json(Into::<UnifiedResponseMessages<usize>>::into( web::block(move || { diesel::insert_into(executor_processor::table) .values(&executor_processor) .execute(&conn) }) .await, )); } HttpResponse::Ok().json(UnifiedResponseMessages::<()>::error()) } #[post("/api/executor_processor/list")] async fn show_executor_processors( web::Json(query_params): web::Json<model::QueryParamsExecutorProcessor>, pool: ShareData<db::ConnectionPool>, ) -> HttpResponse { if let Ok(conn) = pool.get() { return HttpResponse::Ok().json(Into::< UnifiedResponseMessages<PaginateData<model::ExecutorProcessor>>, >::into( web::block::<_, _, diesel::result::Error>(move || { let query_builder = model::ExecutorProcessorQueryBuilder::query_all_columns(); let executor_processors = query_params .clone() .query_filter(query_builder) .paginate(query_params.page) .set_per_page(query_params.per_page) .load::<model::ExecutorProcessor>(&conn)?; let per_page = query_params.per_page; let count_builder = model::ExecutorProcessorQueryBuilder::query_count(); let count = query_params .query_filter(count_builder) .get_result::<i64>(&conn)?; Ok(PaginateData::<model::ExecutorProcessor>::default() .set_data_source(executor_processors) .set_page_size(per_page) .set_total(count)) }) .await, )); } HttpResponse::Ok().json(UnifiedResponseMessages::< PaginateData<model::ExecutorProcessor>, >::error()) } #[post("/api/executor_processor/update")] async fn update_executor_processor( web::Json(executor_processor): web::Json<model::UpdateExecutorProcessor>, pool: ShareData<db::ConnectionPool>, ) -> HttpResponse { if let Ok(conn) = pool.get() { return HttpResponse::Ok().json(Into::<UnifiedResponseMessages<usize>>::into( web::block(move || { diesel::update(&executor_processor) .set(&executor_processor) .execute(&conn) }) .await, )); } HttpResponse::Ok().json(UnifiedResponseMessages::<usize>::error()) } #[post("/api/executor_processor/delete")] async fn delete_executor_processor( web::Json(model::ExecutorProcessorId { executor_processor_id, }): web::Json<model::ExecutorProcessorId>, pool: ShareData<db::ConnectionPool>, ) -> HttpResponse { use db::schema::executor_processor::dsl::*; if let Ok(conn) = pool.get() { return HttpResponse::Ok().json(Into::<UnifiedResponseMessages<usize>>::into( web::block(move || { diesel::delete(executor_processor.find(executor_processor_id)).execute(&conn) }) .await, )); } HttpResponse::Ok().json(UnifiedResponseMessages::<usize>::error()) } // Update `status` and `token`. #[post("/api/executor_processor/activate")] async fn activate_executor_processor( web::Json(model::ExecutorProcessorId { executor_processor_id, }): web::Json<model::ExecutorProcessorId>, pool: ShareData<db::ConnectionPool>, scheduler: SharedSchedulerMetaInfo, ) -> HttpResponse { let uniform_data: UnifiedResponseMessages<()> = do_activate(pool, executor_processor_id, scheduler) .await .into(); HttpResponse::Ok().json(uniform_data) } async fn do_activate( pool: ShareData<db::ConnectionPool>, executor_processor_id: i64, scheduler: SharedSchedulerMetaInfo, ) -> Result<(), CommonError> { let bind_info = activate_executor(pool.get()?, executor_processor_id, scheduler).await?; activate_executor_row(pool.get()?, executor_processor_id, bind_info).await?; Ok(()) } async fn activate_executor( conn: db::PoolConnection, executor_processor_id: i64, scheduler: SharedSchedulerMetaInfo, ) -> Result<service_binding::BindResponse, CommonError> { let query = web::block::<_, model::UpdateExecutorProcessor, diesel::result::Error>(move || { executor_processor::table .find(executor_processor_id) .select(( executor_processor::id, executor_processor::name, executor_processor::description, executor_processor::host, executor_processor::machine_id, executor_processor::tag, )) .first(&conn) }) .await?; let model::UpdateExecutorProcessor { id, name, host, machine_id, .. }: model::UpdateExecutorProcessor = query; let client = RequestClient::default(); let url = "http://".to_string() + &host + "/api/executor/bind"; let private_key = scheduler.get_app_security_key(); let scheduler_host = scheduler.get_app_host_name().clone(); let signed_scheduler = service_binding::BindRequest::default() .set_scheduler_host(scheduler_host) .set_executor_processor_id(id) .set_executor_processor_host(host) .set_executor_processor_name(name) .set_executor_machine_id(machine_id) .set_time(get_timestamp()) .sign(private_key)?; let response: Result<service_binding::EncryptedBindResponse, CommonError> = client .post(url) .send_json(&signed_scheduler) .await? .json::<UnifiedResponseMessages<service_binding::EncryptedBindResponse>>() .await? .into(); Ok(response?.decrypt_self(private_key)?) } async fn activate_executor_row( conn: db::PoolConnection, executor_processor_id: i64, bind_info: service_binding::BindResponse, ) -> Result<(), CommonError> { use db::schema::executor_processor::dsl::{executor_processor, status, token}; // TODO: // Consider caching tokens to be used when collecting executor-events, and health checks. // This will avoid querying the database. // However, cached record operations cannot be placed in the context of the operation db update token. web::block::<_, usize, diesel::result::Error>(move || { diesel::update(executor_processor.find(executor_processor_id)) .set(( token.eq(&bind_info.token.unwrap_or_default()), status.eq(state::executor_processor::State::Enabled as i16), )) .execute(&conn) }) .await?; Ok(()) }
use virtual_machine_interpreter::VirtualMachineInterpreter; use std::env; use std::fs::File; use std::io::{stdin, stdout, BufRead, BufReader, BufWriter, Write}; fn main() { let mut reader: Box<dyn BufRead> = match env::args().nth(1) { None => Box::new(BufReader::new(stdin())), Some(filename) => Box::new(BufReader::new( File::open(filename).expect("cannot open file"), )), }; let mut writer: Box<dyn Write> = match env::args().nth(2) { None => Box::new(BufWriter::new(stdout())), Some(filename) => Box::new(BufWriter::new( File::create(filename).expect("cannot create file"), )), }; let mut str_in = String::new(); reader.read_to_string(&mut str_in).expect("read failed"); VirtualMachineInterpreter::interpret(str_in.lines(), &mut writer).unwrap(); }
/// Definitions for base known species use crate::species::Species; use std::fmt; /// Create a new Canine Species #[derive(Clone, Debug, PartialEq)] pub struct Canine; impl Species for Canine { fn name(&self) -> &str { "Canine" } } impl fmt::Display for Canine { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.name()) } } /// Create a new Feline Species #[derive(Clone, Debug, PartialEq)] pub struct Feline; impl Species for Feline { fn name(&self) -> &str { "Feline" } } impl fmt::Display for Feline { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.name()) } } #[cfg(test)] mod test_feline { use super::Feline; use crate::species::Species; #[test] fn feline_species_name() { assert_eq!("Feline", Feline {}.name()); } } #[cfg(test)] mod test_canine { use super::Canine; use crate::species::Species; #[test] fn canine_species_name() { assert_eq!("Canine", Canine {}.name()); } }
//! Space types that is used to define capacity /// Represent 1 * usize space pub struct S1 { _inner: [usize; 1], } /// Represent 2 * usize space pub struct S2 { _inner: [usize; 2], } /// Represent 4 * usize space pub struct S4 { _inner: [usize; 4], } /// Represent 8 * usize space pub struct S8 { _inner: [usize; 8], } /// Represent 16 * usize space pub struct S16 { _inner: [usize; 16], } /// Represent 32 * usize space pub struct S32 { _inner: [usize; 32], } /// Represent 64 * usize space pub struct S64 { _inner: [usize; 64], }
extern crate pkg_config; use std::path::PathBuf; use std::{env, fs, process}; macro_rules! cmd( ($name:expr) => (process::Command::new($name)); ); macro_rules! get( ($name:expr) => (env::var($name).unwrap()); ); macro_rules! ok( ($result:expr) => ($result.unwrap()); ); macro_rules! run( ($command:expr) => ( assert!($command.stdout(process::Stdio::inherit()) .stderr(process::Stdio::inherit()) .status().unwrap().success()); ); ); fn main() { if pkg_config::find_library("hdf5").is_ok() { return; } let source = PathBuf::from(&get!("CARGO_MANIFEST_DIR")).join("source"); let output = PathBuf::from(&get!("OUT_DIR")); let build = output.join("build"); let install = output.join("install"); if fs::metadata(&build).is_err() { ok!(fs::create_dir_all(&build)); run!(cmd!(source.join("configure")).current_dir(&build) .arg("--disable-hl") .arg("--enable-debug=no") .arg("--enable-production") .arg("--enable-threadsafe") .arg(&format!("--prefix={}", install.display()))); } if fs::metadata(&install).is_err() { ok!(fs::create_dir_all(&install)); run!(cmd!("make").current_dir(&build) .arg(&format!("-j{}", &get!("NUM_JOBS"))) .arg("install")); } println!("cargo:rustc-link-lib=dylib=hdf5"); println!("cargo:rustc-link-search={}", install.join("lib").display()); }