text
stringlengths
8
4.13M
use tracing::error; use h3ron::collections::{HashMap, HashSet}; use h3ron::iter::change_resolution; use h3ron::{H3Cell, Index}; use crate::clickhouse::compacted_tables::{TableSet, COL_NAME_H3INDEX}; use crate::Error; #[derive(Clone)] pub enum TableSetQuery { /// autogenerate a query based on the available columns AutoGenerated, /// templated select statement /// /// The selected columns must include the h3indexes in a column named `h3index` /// /// The query must include these placeholders: /// * "<[table]>": will be filled with the table to be queried /// * "<[h3indexes]>": will be filled with an array of h3indexes used for the query. This should /// /// TODO: parsing and validating and injecting missing column into the query with https://github.com/ballista-compute/sqlparser-rs /// would be nice, but as the parser does not implement a clickhouse dialect, its is probably more /// error prone than it is beneficial. TemplatedSelect(String), } impl TableSetQuery { pub fn validate(&self) -> Result<(), Error> { match self { TableSetQuery::AutoGenerated => Ok(()), TableSetQuery::TemplatedSelect(querystring) => { // validate only mandatory placeholders for placeholder in &["<[table]>", "<[h3indexes]>"] { if !querystring.contains(placeholder) { return Err(Error::MissingQueryPlaceholder(placeholder.to_string())); } } Ok(()) } } } } impl From<Option<String>> for TableSetQuery { fn from(instr: Option<String>) -> Self { match instr { Some(s) => Self::TemplatedSelect(s), None => Self::AutoGenerated, } } } impl Default for TableSetQuery { fn default() -> Self { Self::AutoGenerated } } pub trait BuildCellQueryString { /// build a select query for the given h3 cells. /// /// Will also fetch the parent, compacted indexes. fn build_cell_query_string( &self, tableset: &TableSet, h3_resolution: u8, h3cells: &[H3Cell], ) -> Result<String, Error>; } impl BuildCellQueryString for TableSetQuery { fn build_cell_query_string( &self, tableset: &TableSet, h3_resolution: u8, h3cells: &[H3Cell], ) -> Result<String, Error> { if !tableset.base_tables.contains_key(&h3_resolution) { error!( "Resolution {} is not a part of the base tables of tableset {}", h3_resolution, tableset.basename ); return Err(Error::UnsupportedH3Resolution(h3_resolution)); } if h3cells.is_empty() { return Err(Error::EmptyCells); }; self.validate()?; // collect the indexes and the parents (where the tables exist) let queryable_h3indexes = collect_queryable_h3indexes(tableset, h3cells, h3_resolution)?; if queryable_h3indexes.is_empty() { return Err(Error::NoQueryableTables); } let selectable_columns = itertools::join( tableset .columns .iter() .map(|(col_name, _)| col_name) .filter(|col_name| !col_name.starts_with(COL_NAME_H3INDEX)), ", ", ); let mut query_string_parts = Vec::new(); for table in tableset.tables_to_satisfy_query_at_resolution(h3_resolution)? { if let Some(query_h3indexes) = queryable_h3indexes.get(&table.spec.h3_resolution) { let query_h3indexesarray_string = format!( "[{}]", itertools::join(query_h3indexes.iter().map(|hi| hi.to_string()), ",",) ); let tablename = table.to_table_name(); let qs = match self { TableSetQuery::AutoGenerated => { format!( "select {}, {} from {} where {} in {}", COL_NAME_H3INDEX, selectable_columns, tablename, COL_NAME_H3INDEX, query_h3indexesarray_string ) } TableSetQuery::TemplatedSelect(query_string) => query_string .replace("<[table]>", &tablename) .replace("<[h3indexes]>", &query_h3indexesarray_string), }; query_string_parts.push(qs); } } Ok(itertools::join(query_string_parts.iter(), " union all ")) } } /// collect the indexes and the parents (where the tables exist) fn collect_queryable_h3indexes( tableset: &TableSet, cells: &[H3Cell], query_h3_resolution: u8, ) -> Result<HashMap<u8, HashSet<u64>>, Error> { let mut queryable_h3indexes: HashMap<_, HashSet<_>> = tableset .base_tables .iter() .chain(tableset.compacted_tables.iter()) .filter(|(r, _)| **r <= query_h3_resolution) .map(|(r, _)| (*r, HashSet::default())) .collect(); for (resolution, queryable_h3indexes_set) in queryable_h3indexes.iter_mut() { let mut h3indexes_at_resolution = change_resolution(cells, *resolution) .map(|cell_res| cell_res.map(|cell| cell.h3index())) .collect::<Result<Vec<_>, _>>()?; h3indexes_at_resolution.sort_unstable(); h3indexes_at_resolution.dedup(); queryable_h3indexes_set.extend(h3indexes_at_resolution.iter()); } Ok(queryable_h3indexes) }
///! This module defines an assembly shim that dispatches calls with a ///! variable number of arguments as efficiently as possible, following the ///! System-V ABI calling convention, or other equivalent platform-standard ///! convention, depending on target. ///! ///! Currently, we have written the shim for x86_64 Linux and macOS. ///! ///! See the assembly files in `dynamic_apply/*.s` for details on their ///! implementation. use core::arch::global_asm; use cfg_if::cfg_if; use crate::function::ErlangResult; use crate::term::OpaqueTerm; use super::DynamicCallee; extern "C-unwind" { #[allow(improper_ctypes)] #[link_name = "__firefly_dynamic_apply"] pub fn apply(f: DynamicCallee, argv: *const OpaqueTerm, argc: usize) -> ErlangResult; } cfg_if! { if #[cfg(all(target_os = "macos", target_arch = "x86_64"))] { global_asm!(include_str!("asm/dynamic_apply_macos.s")); } else if #[cfg(all(target_os = "macos", target_arch = "aarch64"))] { global_asm!(include_str!("asm/dynamic_apply_macos_aarch64.s")); } else if #[cfg(target_arch = "x86_64")] { global_asm!(include_str!("asm/dynamic_apply_linux.s")); } else { compile_error!("dynamic calls have not been implemented for this platform!"); } } /* #[cfg(test)] mod tests { use core::mem; use super::*; use crate::term::OpaqueTerm; #[test] fn basic_apply_test() { // Transform a function reference to a generic void function pointer let callee = adder as *const (); // Transform the pointer to our DynamicCallee type alias, since that is what apply expects let callee = unsafe { mem::transmute::<*const (), DynamicCallee>(callee) }; // Build up the args and call the function let args = &[fixnum!(22), fixnum!(11)]; let argv = args.as_ptr(); let argc = args.len(); let result = unsafe { apply(callee, argv, argc) }; let expected = ErlangResult::ok(fixnum!(33)); assert_eq!(result, expected); } #[test] fn basic_apply_rustcc_test() { // Transform a function reference to a generic void function pointer let callee = adder_rust as *const (); // Transform the pointer to our DynamicCallee type alias, since that is what apply expects let callee = unsafe { mem::transmute::<*const (), DynamicCallee>(callee) }; // Build up the args and call the function let args = &[fixnum!(22), fixnum!(11)]; let argv = args.as_ptr(); let argc = args.len(); let result = unsafe { apply(callee, argv, argc) }; let expected = ErlangResult::ok(fixnum!(33)); assert_eq!(result, expected); } #[test] fn spilled_args_even_spills_apply_test() { // Transform a function reference to a generic void function pointer let callee = spilled_args_even as *const (); // Transform the pointer to our DynamicCallee type alias, since that is what apply expects let callee = unsafe { mem::transmute::<*const (), DynamicCallee>(callee) }; // Build up the args and call the function let mut args = vec![]; args.resize(10, fixnum!(1)); let (args, expected) = if cfg!(target_arch = "x86_64") { // On x86_64, we have 6 registers to use, so pass 8 arguments let slice = &args[0..7]; (slice, fixnum!(8)) } else if cfg!(target_arch = "aarch64") { // On aarch64, we have 8 registers to use, so pass 10 arguments (&args[0..], fixnum!(10)) } else { panic!("need to update test case for this target"); }; let argv = args.as_ptr(); let argc = args.len(); let result = unsafe { apply(callee, argv, argc) }; let expected = ErlangResult::ok(expected); assert_eq!(result, expected); } #[test] fn spilled_args_odd_spills_apply_test() { // Transform a function reference to a generic void function pointer let callee = spilled_args_odd as *const (); // Transform the pointer to our DynamicCallee type alias, since that is what apply expects let callee = unsafe { mem::transmute::<*const (), DynamicCallee>(callee) }; // Build up the args and call the function let mut args = vec![]; args.resize(9, fixnum!(1)); let (args, expected) = if cfg!(target_arch = "x86_64") { // On x86_64, we have 6 registers to use, so pass 7 arguments let slice = &args[0..6]; (slice, fixnum!(7)) } else if cfg!(target_arch = "aarch64") { // On aarch64, we have 8 registers to use, so pass 9 arguments (&args[0..], fixnum!(9)) } else { panic!("need to update test case for this target"); }; let argv = args.as_ptr(); let argc = args.len(); let result = unsafe { apply(callee, argv, argc) }; let expected = ErlangResult::ok(expected); assert_eq!(result, expected); } #[test] #[should_panic] fn panic_apply_test() { // Transform a function reference to a generic void function pointer let callee = panicky as *const (); // Transform the pointer to our DynamicCallee type alias, since that is what apply expects let callee = unsafe { mem::transmute::<*const (), DynamicCallee>(callee) }; // Build up the args and call the function let args = &[fixnum!(22), fixnum!(11)]; let argv = args.as_ptr(); let argc = args.len(); let _result = unsafe { apply(callee, argv, argc) }; } #[test] #[should_panic] fn panic_apply_spills_test() { // Transform a function reference to a generic void function pointer let callee = panicky_spilled as *const (); // Transform the pointer to our DynamicCallee type alias, since that is what apply expects let callee = unsafe { mem::transmute::<*const (), DynamicCallee>(callee) }; // Build up the args and call the function let mut args = vec![]; args.resize(8, fixnum!(1)); let argv = args.as_ptr(); let argc = args.len(); let _result = unsafe { apply(callee, argv, argc) }; } fn panicky(_x: usize, _y: usize) -> ErlangResult { panic!("panicky"); } extern "C-unwind" fn panicky_spilled( _a: Term, _b: Term, _c: Term, _d: Term, _e: Term, _f: Term, _g: Term, ) -> ErlangResult { panic!("panicky"); } extern "C" fn adder(x: Term, y: Term) -> ErlangResult { let result = x.decode_immediate() + y.decode_immediate(); ErlangResult::ok(fixnum!(result)) } fn adder_rust(x: Term, y: Term) -> ErlangResult { let result = x.decode_immediate() + y.decode_immediate(); ErlangResult::ok(fixnum!(result)) } #[cfg(target_arch = "x86_64")] extern "C" fn spilled_args_even( a: Term, b: Term, c: Term, d: Term, e: Term, f: Term, g: Term, h: Term, ) -> ErlangResult { let a = a.decode_immediate(); let b = b.decode_immediate(); let c = c.decode_immediate(); let d = d.decode_immediate(); let e = e.decode_immediate(); let f = f.decode_immediate(); let g = g.decode_immediate(); let h = h.decode_immediate(); let value = fixnum!(a + b + c + d + e + f + g + h); ErlangResult::ok(value) } #[cfg(target_arch = "aarch64")] extern "C" fn spilled_args_even( a: Term, b: Term, c: Term, d: Term, e: Term, f: Term, g: Term, h: Term, i: Term, j: Term, ) -> ErlangResult { let a = a.decode_immediate(); let b = b.decode_immediate(); let c = c.decode_immediate(); let d = d.decode_immediate(); let e = e.decode_immediate(); let f = f.decode_immediate(); let g = g.decode_immediate(); let h = h.decode_immediate(); let i = i.decode_immediate(); let j = j.decode_immediate(); let value = fixnum!(a + b + c + d + e + f + g + h + i + j); ErlangResult::ok(value) } #[cfg(target_arch = "x86_64")] extern "C" fn spilled_args_odd( a: Term, b: Term, c: Term, d: Term, e: Term, f: Term, g: Term, ) -> ErlangResult { let a = a.decode_immediate(); let b = b.decode_immediate(); let c = c.decode_immediate(); let d = d.decode_immediate(); let e = e.decode_immediate(); let f = f.decode_immediate(); let g = g.decode_immediate(); let value = fixnum!(a + b + c + d + e + f + g); ErlangResult::ok(value) } #[cfg(target_arch = "aarch64")] extern "C" fn spilled_args_odd( a: Term, b: Term, c: Term, d: Term, e: Term, f: Term, g: Term, h: Term, i: Term, ) -> ErlangResult { let a = a.decode_immediate(); let b = b.decode_immediate(); let c = c.decode_immediate(); let d = d.decode_immediate(); let e = e.decode_immediate(); let f = f.decode_immediate(); let g = g.decode_immediate(); let h = h.decode_immediate(); let i = i.decode_immediate(); let value = fixnum!(a + b + c + d + e + f + g + h + i); ErlangResult::ok(value) } } */
use self_cell::self_cell; type X<'a> = &'a str; self_cell! { pub struct Foo<'a> { owner: fn(&'a ()), #[covariant] dependent: X, } } fn transmute_lifetime<'a, 'b>(x: &'a str) -> &'b str { fn helper<'x>(s: &'x str) -> impl for<'z> FnOnce(&'z fn(&'x ())) -> &'z str { move |_| s } let x: Foo<'a> = Foo::new(|_| (), helper(x)); let x: Foo<'static> = x; // coerce using variance let y = Box::leak(Box::new(x)); y.borrow_dependent() } fn main() { let r; { let s = "Hello World".to_owned(); r = transmute_lifetime(s.as_str()); dbg!(r); // "Hello World" } dbg!(r); // prints garbage :-) }
// Copyright 2023 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_expression::TableSchemaRef; use crate::filters::xor8::xor8_filter::Xor8Filter; /// Filters of a given DataBlock /// `filter_schema.fields.len()` should equals `filters.len()` pub struct BlockFilter { // schema of index block, chosen columns only pub filter_schema: TableSchemaRef, // filters of index block, chosen columns only pub filters: Vec<Arc<Xor8Filter>>, }
use serde::Deserialize; #[derive(Deserialize)] pub struct Schema { pub types: Vec<Type>, #[serde(rename = "queryType")] pub query_type: TypeReference, #[serde(rename = "mutationType")] pub mutation_type: Option<TypeReference>, #[serde(rename = "subscriptionType")] pub subscription_type: Option<TypeReference>, pub directives: Vec<Directive>, } #[derive(Deserialize)] pub struct TypeReference { pub name: String, } #[derive(Deserialize)] pub struct Type { pub kind: TypeKind, pub name: Option<String>, pub description: Option<String>, // Object and Interface only pub fields: Option<Vec<Field>>, // Object only pub interfaces: Option<Vec<Type>>, // Interface and Union only #[serde(rename = "possibleTypes")] pub possible_types: Option<Vec<Type>>, // Enum only #[serde(rename = "enumValues")] pub enum_values: Option<Vec<EnumValue>>, // InputObject only #[serde(rename = "inputFields")] pub input_fields: Option<Vec<InputValue>>, // NonNull and List only #[serde(rename = "ofType")] pub of_type: Box<Option<Type>>, } #[derive(Deserialize)] pub struct Field { pub name: Option<String>, pub description: Option<String>, pub args: Vec<InputValue>, #[serde(rename = "type")] pub schema_type: Type, #[serde(rename = "isDeprecated")] pub is_deprecated: bool, #[serde(rename = "deprecationReason")] pub deprecation_reason: Option<String>, } #[derive(Deserialize)] pub struct InputValue { pub name: String, pub description: Option<String>, #[serde(rename = "type")] pub schema_type: Type, #[serde(rename = "defaultValue")] pub default_value: Option<String>, } #[derive(Deserialize)] pub struct EnumValue { pub name: String, pub description: Option<String>, #[serde(rename = "isDeprecated")] pub is_deprecated: bool, #[serde(rename = "deprecationReason")] pub deprecation_reason: Option<String>, } #[derive(Deserialize, PartialEq, Debug)] pub enum TypeKind { #[serde(alias = "SCALAR")] Scalar, #[serde(alias = "OBJECT")] Object, #[serde(alias = "INTERFACE")] Interface, #[serde(alias = "UNION")] Union, #[serde(alias = "ENUM")] Enum, #[serde(alias = "INPUT_OBJECT")] InputObject, #[serde(alias = "LIST")] List, #[serde(alias = "NON_NULL")] NonNull, } #[derive(Deserialize)] pub struct Directive { pub name: String, pub description: Option<String>, pub locations: Vec<DirectiveLocation>, pub args: Vec<InputValue>, } #[derive(Deserialize)] pub enum DirectiveLocation { #[serde(rename = "QUERY")] Query, #[serde(rename = "MUTATION")] Mutation, #[serde(rename = "SUBSCRIPTION")] Subscription, #[serde(rename = "FIELD")] Field, #[serde(rename = "FRAGMENT_DEFINITION")] FragmentDefinition, #[serde(rename = "FRAGMENT_SPREAD")] FragmentSpread, #[serde(rename = "INLINE_FRAGMENT")] InlineFragment, #[serde(rename = "SCHEMA")] Schema, #[serde(rename = "SCALAR")] Scalar, #[serde(rename = "OBJECT")] Object, #[serde(rename = "FIELD_DEFINITION")] FieldDefinition, #[serde(rename = "ARGUMENT_DEFINITION")] ArgumentDefinition, #[serde(rename = "INTERFACE")] Interface, #[serde(rename = "UNION")] Union, #[serde(rename = "ENUM")] Enum, #[serde(rename = "ENUM_VALUE")] EnumValue, #[serde(rename = "INPUT_OBJECT")] InputObject, #[serde(rename = "INPUT_FIELD_DEFINITION")] InputFieldDefinition, }
use prettytable::{format, Table}; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; #[derive(Deserialize, Serialize, Debug)] pub struct Pass { pub autologin: String, pub login: String, pub passwd: String, } #[derive(Deserialize, Debug)] pub struct Gpa { gpa: String, } #[derive(Deserialize, Debug)] pub struct User { pub login: String, title: String, scolaryear: String, promo: i32, location: String, gpa: Vec<Gpa>, } #[derive(Deserialize, Debug)] pub struct Home { ip: String, pub board: Board, } #[derive(Deserialize, Debug)] pub struct Board { pub projets: Vec<Projet>, pub activites: Vec<Activites>, } #[derive(Deserialize, Debug)] pub struct Projet { pub title: String, pub title_link: String, pub timeline_start: String, pub timeline_end: String, pub timeline_barre: String, pub date_inscription: Value, pub id_activite: String, } #[derive(Deserialize, Debug)] pub struct Activites { pub title: String, module: String, module_link: String, module_code: String, pub title_link: String, timeline_start: String, timeline_end: String, timeline_barre: String, salle: String, token: Option<String>, pub token_link: String, } #[derive(Deserialize, Debug)] pub struct Event { pub code: String, num_event: String, seats: String, nb_inscrits: String, begin: String, end: String, id_activite: String, allow_token: String, } #[derive(Deserialize, Debug)] pub struct Activite { pub scolaryear: String, codemodule: String, codeinstance: String, codeacti: String, call_ihk: Option<String>, instance_location: String, module_title: String, pub title: String, begin: String, start: String, nb_hours: String, pub events: Vec<Event>, } #[derive(Deserialize, Debug)] pub struct Document { pub title: String, pub fullpath: String, } #[derive(Deserialize, Serialize, Debug)] pub struct TokenData { pub token: String, pub rate: i32, pub comment: String, } #[derive(Deserialize, Debug)] pub struct Module { title: String, date_ins: String, scolaryear: i32, grade: String, credits: i32, } #[derive(Deserialize, Debug)] pub struct Note { title: String, titlemodule: String, date: String, scolaryear: i32, final_note: f32, } #[derive(Deserialize, Debug)] pub struct ModulesNotes { modules: Vec<Module>, notes: Vec<Note>, } #[derive(Deserialize, Debug)] pub struct Repo { url: String, uuid: String, } #[derive(Deserialize, Debug)] pub struct Repos { message: String, repositories: HashMap<String, Repo>, } impl Repos { pub fn print_repos(&self) { let mut table = Table::new(); table.set_format(format_display_table()); table.add_row(row!["ID", "REPO_NAME"]); table.add_row(row!["--", "---------"]); for (idx, repo) in self.repositories.keys().enumerate() { table.add_row(row![idx, repo]); } table.printstd(); } } #[derive(Serialize, Debug)] pub struct Blih { pub user: String, pub signature: String, } #[derive(Serialize, Debug)] pub struct BlihData { pub user: String, pub signature: String, pub data: serde_json::Value, } #[derive(Deserialize, Debug)] pub struct BlihResponse { pub message: Option<String>, pub error: Option<String>, } #[derive(Deserialize, Debug)] pub struct IntaResponse { pub message: Option<String>, pub error: Option<String>, } impl User { pub fn print(&self) { let mut table = Table::new(); table.set_format(format_display()); table.add_row(row!["Name: ", self.title]); table.add_row(row!["Login: ", self.login]); table.add_row(row!["Promo: ", self.promo]); table.add_row(row!["Gpa: ", self.gpa[0].gpa]); table.add_row(row!["Scolaryear: ", self.scolaryear]); table.add_row(row!["Location: ", self.location]); table.printstd(); } } fn parce_json_float_to_string(str: &String) -> String { let all_elements: Vec<&str> = str.split(".").collect(); format!("{}", all_elements[0]) } impl Board { pub fn print_projects(&self) { let mut table = Table::new(); table.set_format(format_display_table()); table.add_row(row!["ID", "PROJECT_NAME", "TIMELINE_BARRE"]); table.add_row(row!["--", "------------", "--------------"]); for (idx, project) in self.projets.iter().enumerate() { let nbr: String = parce_json_float_to_string(&project.timeline_barre); table.add_row(row![ idx, project.title, format!("|{}|{}%", parce_timeline(&nbr), &nbr) ]); } print!("\n"); table.printstd(); } pub fn print_project_detail(&self, idx: i32, autologin_url: &String) { match self.projets.get(idx as usize) { Some(proj) => { let mut table = Table::new(); let nbr: String = parce_json_float_to_string(&proj.timeline_barre); table.set_format(format_display()); table.add_row(row!["Title: ", proj.title]); table.add_row(row![ "Link: ", format!("{}{}project/", autologin_url, proj.title_link) ]); table.add_row(row!["Start_Time: ", proj.timeline_start]); table.add_row(row!["End_Time: ", proj.timeline_end]); table.add_row(row![ "Time_Barre: ", format!("|{}|{}%", parce_timeline(&nbr), &nbr) ]); table.add_row(row!["Date_inscription: ", proj.date_inscription]); table.printstd(); } None => panic!("there is no project with this id"), } } pub fn print_activity(&self) { let mut table = Table::new(); table.set_format(format_display_table()); table.add_row(row!["ID", "ACTIVITY_NAME", "TIMELINE_BARRE"]); table.add_row(row!["--", "-------------", "--------------"]); for (idx, activite) in self.activites.iter().enumerate() { let nbr: String = parce_json_float_to_string(&activite.timeline_barre); table.add_row(row![ idx, activite.title, format!("|{}|{}%", parce_timeline(&nbr), &nbr) ]); } print!("\n"); table.printstd(); } pub fn print_activity_detail(&self, idx: i32, autologin_url: &String) { match self.activites.get(idx as usize) { Some(activite) => { let mut table = Table::new(); let nbr: String = parce_json_float_to_string(&activite.timeline_barre); table.set_format(format_display()); table.add_row(row!["Title: ", activite.title]); table.add_row(row![ "Link: ", format!("{}{}project/", autologin_url, activite.title_link) ]); table.add_row(row!["Start_Time: ", activite.timeline_start]); table.add_row(row!["End_Time: ", activite.timeline_end]); table.add_row(row![ "Time_Barre: ", format!("|{}|{}%", parce_timeline(&nbr), &nbr) ]); table.add_row(row!["Salle: ", activite.salle]); match &activite.token { Some(token) => { table.add_row(row!["token: ", &token]); if &token[..] == "1" { table.add_row(row!["token: ", token]); println!("{}", token); } } None => { table.add_row(row!["token: ", "null"]); } }; table.printstd(); } None => panic!("there is no project with this id"), } } } impl ModulesNotes { pub fn print_notes(&self) { let mut table = Table::new(); table.set_format(format_display_table()); table.add_row(row!["TITLE", "MODULES", "DATE", "SCOLARYEAR", "NOTE"]); table.add_row(row!["-----", "-------", "----", "----------", "----"]); for note in self.notes.iter() { table.add_row(row![ note.title, note.titlemodule, note.date, note.scolaryear, note.final_note ]); } print!("\n"); table.printstd(); } pub fn print_modules(&self) { let mut table = Table::new(); table.set_format(format_display_table()); table.add_row(row!["TITLE", "DATE", "SCOLARYEAR", "GRADE", "CREDIT"]); table.add_row(row!["-----", "----", "----------", "-----", "------"]); for mode in self.modules.iter() { table.add_row(row![ mode.title, mode.date_ins, mode.scolaryear, mode.grade, mode.credits ]); } print!("\n"); table.printstd(); } } fn format_display() -> format::TableFormat { format::FormatBuilder::new() .column_separator(' ') .borders(' ') .separators( &[format::LinePosition::Top, format::LinePosition::Bottom], format::LineSeparator::new(' ', ' ', ' ', ' '), ) .padding(0, 0) .build() } fn format_display_table() -> format::TableFormat { format::FormatBuilder::new() .column_separator('|') .borders('|') .separators( &[format::LinePosition::Top, format::LinePosition::Bottom], format::LineSeparator::new('-', '+', '+', '+'), ) .padding(2, 2) .build() } fn parce_timeline(nbr: &String) -> String { let max: i32 = nbr.parse().unwrap(); (0..10) .map(|i| match i * 10 < max { true => 35 as char, false => 32 as char, }) .collect() }
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass #![feature(generators)] enum Enum { A(String), B } fn main() { || { loop { if let true = true { match Enum::A(String::new()) { Enum::A(_var) => {} Enum::B => {} } } yield; } }; }
// belajar menggabung dua string yg berbeda tipe // String dan &str // untuk tipe 'str' menyusul fn main() { // String dan &str let mut name: String = "Agus ".to_owned(); let last_name: &str = "Susilo"; name.push_str(last_name); println!("{}", name); // String dan String let mut lang_name: String = "Rust ".to_owned(); let feature: String = "awesome".to_owned(); // perhatikan pemakaian & pada saat push_str lang_name.push_str(&feature); println!("{}", lang_name); // String dan &str // no muttable var let lang: String = "Rust ".to_owned(); let comment: &str = "is fast."; let lang_comment = lang+comment; println!("{}", lang_comment); // jika ingin menghasilkan string baru tanpa merubah keduanya let borrowed_str: &str = "Hello "; let an_borrowed_str: &str = "Rust!"; let bersama = format!("{}{}", borrowed_str, an_borrowed_str); println!("{}", bersama); both_string(); string_clone(); } fn both_string() { let a: String = "Hello ".to_owned(); let b: String = "world!".to_owned(); let c = format!("{}{}", a, b); println!("{}", c); } fn string_clone() { let a: String = "Hello ".to_owned(); let b: &str = "world clone."; let c = a.clone() + b; println!("{}", c); }
fn main() { web_logger::init(); yew::start_app::<multi_thread_std_web::Model>(); }
//! A command-line executable for generating the chain's genesis block. #[macro_use] extern crate morgan_vote_controller; #[macro_use] extern crate morgan_stake_controller; #[macro_use] extern crate morgan_budget_controller; #[macro_use] extern crate morgan_token_controller; #[macro_use] extern crate morgan_config_controller; #[macro_use] extern crate morgan_exchange_controller; use clap::{crate_description, crate_name, crate_version, value_t_or_exit, App, Arg}; use morgan::blockBufferPool::create_new_ledger; use morgan_interface::account::Account; use morgan_interface::fee_calculator::FeeCalculator; use morgan_interface::genesis_block::GenesisBlock; use morgan_interface::hash::{hash, Hash}; use morgan_interface::poh_config::PohConfig; use morgan_interface::signature::{read_keypair, KeypairUtil}; use morgan_interface::system_program; use morgan_interface::timing; use morgan_stake_api::stake_state; use morgan_storage_controller::genesis_block_util::GenesisBlockUtil; use morgan_vote_api::vote_state; use std::error; use std::time::{Duration, Instant}; pub const BOOTSTRAP_LEADER_DIFS: u64 = 42; fn main() -> Result<(), Box<dyn error::Error>> { let default_bootstrap_leader_difs = &BOOTSTRAP_LEADER_DIFS.to_string(); let default_difs_per_signature = &FeeCalculator::default().difs_per_signature.to_string(); let default_target_tick_duration = &timing::duration_as_ms(&PohConfig::default().target_tick_duration).to_string(); let default_ticks_per_slot = &timing::DEFAULT_TICKS_PER_SLOT.to_string(); let default_slots_per_epoch = &timing::DEFAULT_SLOTS_PER_EPOCH.to_string(); let matches = App::new(crate_name!()) .about(crate_description!()) .version(crate_version!()) .arg( Arg::with_name("bootstrap_leader_keypair_file") .short("b") .long("bootstrap-leader-keypair") .value_name("BOOTSTRAP LEADER KEYPAIR") .takes_value(true) .required(true) .help("Path to file containing the bootstrap leader's keypair"), ) .arg( Arg::with_name("ledger_path") .short("l") .long("ledger") .value_name("DIR") .takes_value(true) .required(true) .help("Use directory as persistent ledger location"), ) .arg( Arg::with_name("difs") .short("t") .long("difs") .value_name("DIFS") .takes_value(true) .required(true) .help("Number of difs to create in the mint"), ) .arg( Arg::with_name("mint_keypair_file") .short("m") .long("mint") .value_name("MINT") .takes_value(true) .required(true) .help("Path to file containing keys of the mint"), ) .arg( Arg::with_name("bootstrap_vote_keypair_file") .short("s") .long("bootstrap-vote-keypair") .value_name("BOOTSTRAP VOTE KEYPAIR") .takes_value(true) .required(true) .help("Path to file containing the bootstrap leader's voting keypair"), ) .arg( Arg::with_name("bootstrap_stake_keypair_file") .short("k") .long("bootstrap-stake-keypair") .value_name("BOOTSTRAP STAKE KEYPAIR") .takes_value(true) .required(true) .help("Path to file containing the bootstrap leader's staking keypair"), ) .arg( Arg::with_name("bootstrap_storage_keypair_file") .long("bootstrap-storage-keypair") .value_name("BOOTSTRAP STORAGE KEYPAIR") .takes_value(true) .required(true) .help("Path to file containing the bootstrap leader's storage keypair"), ) .arg( Arg::with_name("bootstrap_leader_difs") .long("bootstrap-leader-difs") .value_name("DIFS") .takes_value(true) .default_value(default_bootstrap_leader_difs) .required(true) .help("Number of difs to assign to the bootstrap leader"), ) .arg( Arg::with_name("difs_per_signature") .long("difs-per-signature") .value_name("DIFS") .takes_value(true) .default_value(default_difs_per_signature) .help("Number of difs the cluster will charge for signature verification"), ) .arg( Arg::with_name("target_tick_duration") .long("target-tick-duration") .value_name("MILLIS") .takes_value(true) .default_value(default_target_tick_duration) .help("The target tick rate of the cluster in milliseconds"), ) .arg( Arg::with_name("hashes_per_tick") .long("hashes-per-tick") .value_name("NUM_HASHES|\"auto\"|\"sleep\"") .takes_value(true) .default_value("auto") .help( "How many PoH hashes to roll before emitting the next tick. \ If \"auto\", determine based on --target-tick-duration \ and the hash rate of this computer. If \"sleep\", for development \ sleep for --target-tick-duration instead of hashing", ), ) .arg( Arg::with_name("ticks_per_slot") .long("ticks-per-slot") .value_name("TICKS") .takes_value(true) .default_value(default_ticks_per_slot) .help("The number of ticks in a slot"), ) .arg( Arg::with_name("slots_per_epoch") .long("slots-per-epoch") .value_name("SLOTS") .takes_value(true) .default_value(default_slots_per_epoch) .help("The number of slots in an epoch"), ) .get_matches(); let bootstrap_leader_keypair_file = matches.value_of("bootstrap_leader_keypair_file").unwrap(); let bootstrap_vote_keypair_file = matches.value_of("bootstrap_vote_keypair_file").unwrap(); let bootstrap_stake_keypair_file = matches.value_of("bootstrap_stake_keypair_file").unwrap(); let bootstrap_storage_keypair_file = matches.value_of("bootstrap_storage_keypair_file").unwrap(); let mint_keypair_file = matches.value_of("mint_keypair_file").unwrap(); let ledger_path = matches.value_of("ledger_path").unwrap(); let difs = value_t_or_exit!(matches, "difs", u64); let bootstrap_leader_stake_difs = value_t_or_exit!(matches, "bootstrap_leader_difs", u64); let bootstrap_leader_keypair = read_keypair(bootstrap_leader_keypair_file)?; let bootstrap_vote_keypair = read_keypair(bootstrap_vote_keypair_file)?; let bootstrap_stake_keypair = read_keypair(bootstrap_stake_keypair_file)?; let bootstrap_storage_keypair = read_keypair(bootstrap_storage_keypair_file)?; let mint_keypair = read_keypair(mint_keypair_file)?; // TODO: de-duplicate the stake once passive staking // is fully implemented // https://github.com/morgan-labs/morgan/issues/4213 let (vote_account, vote_state) = vote_state::create_bootstrap_leader_account( &bootstrap_vote_keypair.pubkey(), &bootstrap_leader_keypair.pubkey(), 0, bootstrap_leader_stake_difs, ); let mut genesis_block = GenesisBlock::new( &bootstrap_leader_keypair.pubkey(), &[ // the mint ( mint_keypair.pubkey(), Account::new(difs, 0, 0, &system_program::id()), ), // node needs an account to issue votes from ( bootstrap_leader_keypair.pubkey(), Account::new(1, 0, 0, &system_program::id()), ), // where votes go to (bootstrap_vote_keypair.pubkey(), vote_account), // passive bootstrap leader stake, duplicates above temporarily ( bootstrap_stake_keypair.pubkey(), stake_state::create_delegate_stake_account( &bootstrap_vote_keypair.pubkey(), &vote_state, bootstrap_leader_stake_difs, ), ), ], &[ morgan_vote_controller!(), morgan_stake_controller!(), morgan_budget_controller!(), morgan_token_controller!(), morgan_config_controller!(), morgan_exchange_controller!(), ], ); genesis_block.add_storage_controller(&bootstrap_storage_keypair.pubkey()); genesis_block.fee_calculator.difs_per_signature = value_t_or_exit!(matches, "difs_per_signature", u64); genesis_block.ticks_per_slot = value_t_or_exit!(matches, "ticks_per_slot", u64); genesis_block.slots_per_epoch = value_t_or_exit!(matches, "slots_per_epoch", u64); genesis_block.poh_config.target_tick_duration = Duration::from_millis(value_t_or_exit!(matches, "target_tick_duration", u64)); match matches.value_of("hashes_per_tick").unwrap() { "auto" => { let mut v = Hash::default(); println!("Running 1 million hashes..."); let start = Instant::now(); for _ in 0..1_000_000 { v = hash(&v.as_ref()); } let end = Instant::now(); let elapsed = end.duration_since(start).as_millis(); let hashes_per_tick = (genesis_block.poh_config.target_tick_duration.as_millis() * 1_000_000 / elapsed) as u64; println!("Hashes per tick: {}", hashes_per_tick); genesis_block.poh_config.hashes_per_tick = Some(hashes_per_tick); } "sleep" => { genesis_block.poh_config.hashes_per_tick = None; } _ => { genesis_block.poh_config.hashes_per_tick = Some(value_t_or_exit!(matches, "hashes_per_tick", u64)); } } create_new_ledger(ledger_path, &genesis_block)?; Ok(()) } #[cfg(test)] mod tests { use hashbrown::HashSet; #[test] fn test_program_ids() { let ids = [ ( "11111111111111111111111111111111", morgan_interface::system_program::id(), ), ( "NativeLoader1111111111111111111111111111111", morgan_interface::native_loader::id(), ), ( "BPFLoader1111111111111111111111111111111111", morgan_interface::bpf_loader::id(), ), ( "Budget1111111111111111111111111111111111111", morgan_budget_api::id(), ), ( "Stake11111111111111111111111111111111111111", morgan_stake_api::id(), ), ( "Storage111111111111111111111111111111111111", morgan_storage_api::id(), ), ( "Token11111111111111111111111111111111111111", morgan_token_api::id(), ), ( "Vote111111111111111111111111111111111111111", morgan_vote_api::id(), ), ( "Stake11111111111111111111111111111111111111", morgan_stake_api::id(), ), ( "Config1111111111111111111111111111111111111", morgan_config_api::id(), ), ( "Exchange11111111111111111111111111111111111", morgan_exchange_api::id(), ), ]; assert!(ids.iter().all(|(name, id)| *name == id.to_string())); } #[test] fn test_program_id_uniqueness() { let mut unique = HashSet::new(); let ids = vec![ morgan_interface::system_program::id(), morgan_interface::native_loader::id(), morgan_interface::bpf_loader::id(), morgan_budget_api::id(), morgan_storage_api::id(), morgan_token_api::id(), morgan_vote_api::id(), morgan_stake_api::id(), morgan_config_api::id(), morgan_exchange_api::id(), ]; assert!(ids.into_iter().all(move |id| unique.insert(id))); } }
use std::path::Path; use std::fs::File; use std::io::*; pub struct Rom { data: [u8; 4096], } impl Rom { pub fn new<P: AsRef<Path>>(path: P) -> Result<Rom> { let mut file = try!(File::open(&path)); let mut buf = Vec::new(); try!(file.read_to_end(&mut buf)); let mut data = [0; 4096]; let sprites: [[u8; 5]; 16] = [ [0xF0, 0x90, 0x90, 0x90, 0xF0], [0x20, 0x60, 0x20, 0x20, 0x70], [0xF0, 0x10, 0xF0, 0x80, 0xF0], [0xF0, 0x10, 0xF0, 0x10, 0xF0], [0x90, 0x90, 0xF0, 0x10, 0x10], [0xF0, 0x80, 0xF0, 0x10, 0xF0], [0xF0, 0x80, 0xF0, 0x90, 0xF0], [0xF0, 0x10, 0x20, 0x40, 0x40], [0xF0, 0x90, 0xF0, 0x90, 0xF0], [0xF0, 0x90, 0xF0, 0x10, 0xF0], [0xF0, 0x90, 0xF0, 0x90, 0x90], [0xE0, 0x90, 0xE0, 0x90, 0xE0], [0xF0, 0x80, 0x80, 0x80, 0xF0], [0xE0, 0x90, 0x90, 0x90, 0xE0], [0xF0, 0x80, 0xF0, 0x80, 0xF0], [0xF0, 0x80, 0xF0, 0x80, 0x80], ]; let mut i = 0; for sprite in &sprites { for pos in sprite { data[i] = *pos; i += 1; } } let mut j = 0x200; for i in buf.iter() { data[j] = *i; j += 1; } Ok(Rom { data }) } pub fn load(&self, address: u16) -> u8 { self.data[address as usize] } pub fn store(&mut self, address: u16, value: u8) { self.data[address as usize] = value; } }
use x86_64::{ structures::paging::{FrameAllocator, PhysFrame, Mapper, OffsetPageTable, Page, PageTable, Size4KiB}, PhysAddr, VirtAddr, }; unsafe fn active_level_4_table(physical_memory_offset: VirtAddr) -> &'static mut PageTable { use x86_64::registers::control::Cr3; // Cr3 points to level 4 page table let (level_4_table_frame, _) = Cr3::read(); // skipping flags on second field of tuple let phys = level_4_table_frame.start_address(); // add physical_memory_offset we got from `bootloader` to calculate virtual memory where phys memory is 1-1 mapped to let virt = physical_memory_offset + phys.as_u64(); let page_table_ptr: *mut PageTable = virt.as_mut_ptr(); // get a raw pointer to PageTable out of the mem address &mut *page_table_ptr // as mutable since we'll need to modify PageTable } pub unsafe fn init(physical_memory_offset: VirtAddr) -> OffsetPageTable<'static> { let level_4_table = active_level_4_table(physical_memory_offset); OffsetPageTable::new(level_4_table, physical_memory_offset) } pub struct EmptyFrameAllocator; // allocator that always returns None unsafe impl FrameAllocator<Size4KiB> for EmptyFrameAllocator { fn allocate_frame(&mut self) -> Option<PhysFrame> { None } } use bootloader::bootinfo::{MemoryMap, MemoryRegionType}; //frame allocator that returns usable frame using info from bootloader's memory_map pub struct BootInfoFrameAllocator { memory_map: &'static MemoryMap, next: usize, } impl BootInfoFrameAllocator { pub unsafe fn init(memory_map: &'static MemoryMap) -> Self { BootInfoFrameAllocator { memory_map, next: 0 } } fn usable_frames(&self) -> impl Iterator<Item = PhysFrame> { // get mem regions from memory_map let regions = self.memory_map.iter(); let usable_regions = regions.filter(|r| r.region_type == MemoryRegionType::Usable); //map each region to it's address range let addr_ranges = usable_regions.map(|r| r.range.start_addr()..r.range.end_addr()); // we pick one address every 4096 from the range, since we want 4Kb page frames let frame_addresses = addr_ranges.flat_map(|r| r.step_by(4096)); // create physframe frame_addresses.map(|addr| PhysFrame::containing_address(PhysAddr::new(addr))) } } unsafe impl FrameAllocator<Size4KiB> for BootInfoFrameAllocator { fn allocate_frame(&mut self) -> Option<PhysFrame> { let frame = self.usable_frames().nth(self.next); self.next += 1; frame } } pub fn create_example_mapping( page: Page, mapper: &mut OffsetPageTable, frame_allocator: &mut impl FrameAllocator<Size4KiB>, ) { use x86_64::structures::paging::PageTableFlags as Flags; let frame = PhysFrame::containing_address(PhysAddr::new(0xb8000)); // VGA buffer address let flags = Flags::PRESENT | Flags::WRITABLE; let map_to_result = unsafe { mapper.map_to(page, frame, flags, frame_allocator) }; map_to_result.expect("map_to failed").flush(); // flush this page from TLB! }
use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::sym; declare_clippy_lint! { /// ### What it does /// Detects `().hash(_)`. /// /// ### Why is this bad? /// Hashing a unit value doesn't do anything as the implementation of `Hash` for `()` is a no-op. /// /// ### Example /// ```rust /// # use std::hash::Hash; /// # use std::collections::hash_map::DefaultHasher; /// # enum Foo { Empty, WithValue(u8) } /// # use Foo::*; /// # let mut state = DefaultHasher::new(); /// # let my_enum = Foo::Empty; /// match my_enum { /// Empty => ().hash(&mut state), /// WithValue(x) => x.hash(&mut state), /// } /// ``` /// Use instead: /// ```rust /// # use std::hash::Hash; /// # use std::collections::hash_map::DefaultHasher; /// # enum Foo { Empty, WithValue(u8) } /// # use Foo::*; /// # let mut state = DefaultHasher::new(); /// # let my_enum = Foo::Empty; /// match my_enum { /// Empty => 0_u8.hash(&mut state), /// WithValue(x) => x.hash(&mut state), /// } /// ``` #[clippy::version = "1.58.0"] pub UNIT_HASH, correctness, "hashing a unit value, which does nothing" } declare_lint_pass!(UnitHash => [UNIT_HASH]); impl LateLintPass<'tcx> for UnitHash { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { if_chain! { if let ExprKind::MethodCall(name_ident, _, args, _) = &expr.kind; if name_ident.ident.name == sym::hash; if let [recv, state_param] = args; if cx.typeck_results().expr_ty(recv).is_unit(); then { span_lint_and_then( cx, UNIT_HASH, expr.span, "this call to `hash` on the unit type will do nothing", |diag| { diag.span_suggestion( expr.span, "remove the call to `hash` or consider using", format!( "0_u8.hash({})", snippet(cx, state_param.span, ".."), ), Applicability::MaybeIncorrect, ); diag.note("the implementation of `Hash` for `()` is a no-op"); } ); } } } }
use onig::{Regex, RegexOptions}; use serde::{Deserialize, Serialize, Serializer}; use std::ops::Deref; use std::{ convert::TryFrom, hash::{Hash, Hasher}, }; impl TryFrom<String> for SerializeRegex { type Error = onig::Error; fn try_from(string: String) -> Result<Self, onig::Error> { Ok(SerializeRegex { regex: SerializeRegex::compile(&string)?, string, }) } } #[derive(Deserialize, Debug)] #[serde(try_from = "String")] pub struct SerializeRegex { pub(crate) string: String, pub(crate) regex: Regex, } impl Serialize for SerializeRegex { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&self.string) } } impl Hash for SerializeRegex { fn hash<H: Hasher>(&self, state: &mut H) { self.string.hash(state); } } impl SerializeRegex { pub fn compile(regex_str: &str) -> Result<Regex, onig::Error> { let mut case_sensitive = true; let regex_str = if let Some(stripped) = regex_str.strip_suffix("(?i)") { case_sensitive = false; stripped } else { regex_str }; Regex::with_options( regex_str, if case_sensitive { RegexOptions::REGEX_OPTION_NONE } else { RegexOptions::REGEX_OPTION_IGNORECASE }, onig::Syntax::java(), ) } } impl Deref for SerializeRegex { type Target = Regex; fn deref(&self) -> &Self::Target { &self.regex } }
async fn wip() { let mission_registry = MissionRegistryTwin {}; let mission_registry_1_state = twin::execute_twin(service.clone(), mission_registry)?; let mission_registry = tokio::spawn(launchpad_1_state.map(move |state| { println!("{:?}", state); })); match launchpad_1_state.next().await { Ok(state) => { println!("{:?}", state); if let Some(mission) = state.mission { println!("{:?}", mission); match *(twin_current_state(service.clone(), MissionTwin { id: mission }).await) { Ok(mission_state) => println!("{:?}", mission_state), Err(e) => println!("{:?}", e), } } } _ => (), } match mission_registry_1_state.try_recv() { Ok(state) => { println!("{:?}", state); } _ => (), } std::thread::sleep(Duration::from_secs_f32(0.33f32)); } async fn observe_twin() { let launchpad_thread = observe( twin::execute_twin( service.clone(), LaunchpadTwin { id: "Launchpad-01".to_string(), }, ), |state| println!("launchpad state {:?}", state), ); let _ = join!(launchpad_thread); } async fn observe() { let missions_thread = observe( twin::execute_twin(service.clone(), MissionRegistryTwin), |state| println!("Missions state {:?}", state), ); let _ = join!(missions_thread); } async fn test_switch_map_combine_latest () { let mut current_mission = switch_map( twin::execute_twin(service.clone(), MissionRegistryTwin), |state| { Some(combine_latest( state .into_iter() .map(|id| twin::execute_twin(service.clone(), MissionTwin { id })) .collect(), )) }, ); observe( current_mission, |states| println!("all_mission {:?}", states), ); } async fn test_observe_registry () { let current_mission = observe_registry( service.clone(), MissionRegistryTwin, |s| s.into_iter().map(|id| MissionTwin { id }).collect(), |states| println!("all_mission {:?}", states), ); }
#![allow(clippy::many_single_char_names)] use druid::piet::Color; pub struct ColorUtil; impl ColorUtil { pub fn hsl(h: f64, s: f64, l: f64) -> Color { Self::rbg8t(Self::hsl_to_rgb(h, s, l)) } pub const fn rbg8t((r, g, b): (u8, u8, u8)) -> Color { Color::rgb8(r, g, b) } // https://pauljmiller.com/posts/druid-widget-tutorial.html fn hue_to_rgb(p: f64, q: f64, t: f64) -> f64 { let mut t = t; if t < 0. { t += 1. } if t > 1. { t -= 1. }; if t < 1. / 6. { p + (q - p) * 6. * t } else if t < 1. / 2. { q } else if t < 2. / 3. { p + (q - p) * (2. / 3. - t) * 6. } else { p } } fn hsl_to_rgb(h: f64, s: f64, l: f64) -> (u8, u8, u8) { let r; let g; let b; if s == 0.0 { // achromatic r = l; g = l; b = l; } else { let q = if l < 0.5 { l * (1. + s) } else { l + s - l * s }; let p = 2. * l - q; r = Self::hue_to_rgb(p, q, h + 1. / 3.); g = Self::hue_to_rgb(p, q, h); b = Self::hue_to_rgb(p, q, h - 1. / 3.); } ( (r * 255.).round() as u8, (g * 255.).round() as u8, (b * 255.).round() as u8, ) } }
#[doc = r"Value read from the register"] pub struct R { bits: u32, } #[doc = r"Value to write to the register"] pub struct W { bits: u32, } impl super::ACCTL0 { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u32 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = r"Value of the field"] pub struct COMP_ACCTL0_CINVR { bits: bool, } impl COMP_ACCTL0_CINVR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _COMP_ACCTL0_CINVW<'a> { w: &'a mut W, } impl<'a> _COMP_ACCTL0_CINVW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 1); self.w.bits |= ((value as u32) & 1) << 1; self.w } } #[doc = "Possible values of the field `COMP_ACCTL0_ISEN`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMP_ACCTL0_ISENR { #[doc = "Level sense, see ISLVAL"] COMP_ACCTL0_ISEN_LEVEL, #[doc = "Falling edge"] COMP_ACCTL0_ISEN_FALL, #[doc = "Rising edge"] COMP_ACCTL0_ISEN_RISE, #[doc = "Either edge"] COMP_ACCTL0_ISEN_BOTH, } impl COMP_ACCTL0_ISENR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_LEVEL => 0, COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_FALL => 1, COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_RISE => 2, COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_BOTH => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> COMP_ACCTL0_ISENR { match value { 0 => COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_LEVEL, 1 => COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_FALL, 2 => COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_RISE, 3 => COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_BOTH, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `COMP_ACCTL0_ISEN_LEVEL`"] #[inline(always)] pub fn is_comp_acctl0_isen_level(&self) -> bool { *self == COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_LEVEL } #[doc = "Checks if the value of the field is `COMP_ACCTL0_ISEN_FALL`"] #[inline(always)] pub fn is_comp_acctl0_isen_fall(&self) -> bool { *self == COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_FALL } #[doc = "Checks if the value of the field is `COMP_ACCTL0_ISEN_RISE`"] #[inline(always)] pub fn is_comp_acctl0_isen_rise(&self) -> bool { *self == COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_RISE } #[doc = "Checks if the value of the field is `COMP_ACCTL0_ISEN_BOTH`"] #[inline(always)] pub fn is_comp_acctl0_isen_both(&self) -> bool { *self == COMP_ACCTL0_ISENR::COMP_ACCTL0_ISEN_BOTH } } #[doc = "Values that can be written to the field `COMP_ACCTL0_ISEN`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMP_ACCTL0_ISENW { #[doc = "Level sense, see ISLVAL"] COMP_ACCTL0_ISEN_LEVEL, #[doc = "Falling edge"] COMP_ACCTL0_ISEN_FALL, #[doc = "Rising edge"] COMP_ACCTL0_ISEN_RISE, #[doc = "Either edge"] COMP_ACCTL0_ISEN_BOTH, } impl COMP_ACCTL0_ISENW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_LEVEL => 0, COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_FALL => 1, COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_RISE => 2, COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_BOTH => 3, } } } #[doc = r"Proxy"] pub struct _COMP_ACCTL0_ISENW<'a> { w: &'a mut W, } impl<'a> _COMP_ACCTL0_ISENW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: COMP_ACCTL0_ISENW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "Level sense, see ISLVAL"] #[inline(always)] pub fn comp_acctl0_isen_level(self) -> &'a mut W { self.variant(COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_LEVEL) } #[doc = "Falling edge"] #[inline(always)] pub fn comp_acctl0_isen_fall(self) -> &'a mut W { self.variant(COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_FALL) } #[doc = "Rising edge"] #[inline(always)] pub fn comp_acctl0_isen_rise(self) -> &'a mut W { self.variant(COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_RISE) } #[doc = "Either edge"] #[inline(always)] pub fn comp_acctl0_isen_both(self) -> &'a mut W { self.variant(COMP_ACCTL0_ISENW::COMP_ACCTL0_ISEN_BOTH) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 2); self.w.bits |= ((value as u32) & 3) << 2; self.w } } #[doc = r"Value of the field"] pub struct COMP_ACCTL0_ISLVALR { bits: bool, } impl COMP_ACCTL0_ISLVALR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _COMP_ACCTL0_ISLVALW<'a> { w: &'a mut W, } impl<'a> _COMP_ACCTL0_ISLVALW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 4); self.w.bits |= ((value as u32) & 1) << 4; self.w } } #[doc = "Possible values of the field `COMP_ACCTL0_TSEN`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMP_ACCTL0_TSENR { #[doc = "Level sense, see TSLVAL"] COMP_ACCTL0_TSEN_LEVEL, #[doc = "Falling edge"] COMP_ACCTL0_TSEN_FALL, #[doc = "Rising edge"] COMP_ACCTL0_TSEN_RISE, #[doc = "Either edge"] COMP_ACCTL0_TSEN_BOTH, } impl COMP_ACCTL0_TSENR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_LEVEL => 0, COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_FALL => 1, COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_RISE => 2, COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_BOTH => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> COMP_ACCTL0_TSENR { match value { 0 => COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_LEVEL, 1 => COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_FALL, 2 => COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_RISE, 3 => COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_BOTH, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `COMP_ACCTL0_TSEN_LEVEL`"] #[inline(always)] pub fn is_comp_acctl0_tsen_level(&self) -> bool { *self == COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_LEVEL } #[doc = "Checks if the value of the field is `COMP_ACCTL0_TSEN_FALL`"] #[inline(always)] pub fn is_comp_acctl0_tsen_fall(&self) -> bool { *self == COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_FALL } #[doc = "Checks if the value of the field is `COMP_ACCTL0_TSEN_RISE`"] #[inline(always)] pub fn is_comp_acctl0_tsen_rise(&self) -> bool { *self == COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_RISE } #[doc = "Checks if the value of the field is `COMP_ACCTL0_TSEN_BOTH`"] #[inline(always)] pub fn is_comp_acctl0_tsen_both(&self) -> bool { *self == COMP_ACCTL0_TSENR::COMP_ACCTL0_TSEN_BOTH } } #[doc = "Values that can be written to the field `COMP_ACCTL0_TSEN`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMP_ACCTL0_TSENW { #[doc = "Level sense, see TSLVAL"] COMP_ACCTL0_TSEN_LEVEL, #[doc = "Falling edge"] COMP_ACCTL0_TSEN_FALL, #[doc = "Rising edge"] COMP_ACCTL0_TSEN_RISE, #[doc = "Either edge"] COMP_ACCTL0_TSEN_BOTH, } impl COMP_ACCTL0_TSENW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_LEVEL => 0, COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_FALL => 1, COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_RISE => 2, COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_BOTH => 3, } } } #[doc = r"Proxy"] pub struct _COMP_ACCTL0_TSENW<'a> { w: &'a mut W, } impl<'a> _COMP_ACCTL0_TSENW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: COMP_ACCTL0_TSENW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "Level sense, see TSLVAL"] #[inline(always)] pub fn comp_acctl0_tsen_level(self) -> &'a mut W { self.variant(COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_LEVEL) } #[doc = "Falling edge"] #[inline(always)] pub fn comp_acctl0_tsen_fall(self) -> &'a mut W { self.variant(COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_FALL) } #[doc = "Rising edge"] #[inline(always)] pub fn comp_acctl0_tsen_rise(self) -> &'a mut W { self.variant(COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_RISE) } #[doc = "Either edge"] #[inline(always)] pub fn comp_acctl0_tsen_both(self) -> &'a mut W { self.variant(COMP_ACCTL0_TSENW::COMP_ACCTL0_TSEN_BOTH) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 5); self.w.bits |= ((value as u32) & 3) << 5; self.w } } #[doc = r"Value of the field"] pub struct COMP_ACCTL0_TSLVALR { bits: bool, } impl COMP_ACCTL0_TSLVALR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _COMP_ACCTL0_TSLVALW<'a> { w: &'a mut W, } impl<'a> _COMP_ACCTL0_TSLVALW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 7); self.w.bits |= ((value as u32) & 1) << 7; self.w } } #[doc = "Possible values of the field `COMP_ACCTL0_ASRCP`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMP_ACCTL0_ASRCPR { #[doc = "Pin value of Cn+"] COMP_ACCTL0_ASRCP_PIN, #[doc = "Pin value of C0+"] COMP_ACCTL0_ASRCP_PIN0, #[doc = "Internal voltage reference"] COMP_ACCTL0_ASRCP_REF, #[doc = r"Reserved"] _Reserved(u8), } impl COMP_ACCTL0_ASRCPR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { match *self { COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_PIN => 0, COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_PIN0 => 1, COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_REF => 2, COMP_ACCTL0_ASRCPR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _from(value: u8) -> COMP_ACCTL0_ASRCPR { match value { 0 => COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_PIN, 1 => COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_PIN0, 2 => COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_REF, i => COMP_ACCTL0_ASRCPR::_Reserved(i), } } #[doc = "Checks if the value of the field is `COMP_ACCTL0_ASRCP_PIN`"] #[inline(always)] pub fn is_comp_acctl0_asrcp_pin(&self) -> bool { *self == COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_PIN } #[doc = "Checks if the value of the field is `COMP_ACCTL0_ASRCP_PIN0`"] #[inline(always)] pub fn is_comp_acctl0_asrcp_pin0(&self) -> bool { *self == COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_PIN0 } #[doc = "Checks if the value of the field is `COMP_ACCTL0_ASRCP_REF`"] #[inline(always)] pub fn is_comp_acctl0_asrcp_ref(&self) -> bool { *self == COMP_ACCTL0_ASRCPR::COMP_ACCTL0_ASRCP_REF } } #[doc = "Values that can be written to the field `COMP_ACCTL0_ASRCP`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum COMP_ACCTL0_ASRCPW { #[doc = "Pin value of Cn+"] COMP_ACCTL0_ASRCP_PIN, #[doc = "Pin value of C0+"] COMP_ACCTL0_ASRCP_PIN0, #[doc = "Internal voltage reference"] COMP_ACCTL0_ASRCP_REF, } impl COMP_ACCTL0_ASRCPW { #[allow(missing_docs)] #[doc(hidden)] #[inline(always)] pub fn _bits(&self) -> u8 { match *self { COMP_ACCTL0_ASRCPW::COMP_ACCTL0_ASRCP_PIN => 0, COMP_ACCTL0_ASRCPW::COMP_ACCTL0_ASRCP_PIN0 => 1, COMP_ACCTL0_ASRCPW::COMP_ACCTL0_ASRCP_REF => 2, } } } #[doc = r"Proxy"] pub struct _COMP_ACCTL0_ASRCPW<'a> { w: &'a mut W, } impl<'a> _COMP_ACCTL0_ASRCPW<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: COMP_ACCTL0_ASRCPW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "Pin value of Cn+"] #[inline(always)] pub fn comp_acctl0_asrcp_pin(self) -> &'a mut W { self.variant(COMP_ACCTL0_ASRCPW::COMP_ACCTL0_ASRCP_PIN) } #[doc = "Pin value of C0+"] #[inline(always)] pub fn comp_acctl0_asrcp_pin0(self) -> &'a mut W { self.variant(COMP_ACCTL0_ASRCPW::COMP_ACCTL0_ASRCP_PIN0) } #[doc = "Internal voltage reference"] #[inline(always)] pub fn comp_acctl0_asrcp_ref(self) -> &'a mut W { self.variant(COMP_ACCTL0_ASRCPW::COMP_ACCTL0_ASRCP_REF) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits &= !(3 << 9); self.w.bits |= ((value as u32) & 3) << 9; self.w } } #[doc = r"Value of the field"] pub struct COMP_ACCTL0_TOENR { bits: bool, } impl COMP_ACCTL0_TOENR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _COMP_ACCTL0_TOENW<'a> { w: &'a mut W, } impl<'a> _COMP_ACCTL0_TOENW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 11); self.w.bits |= ((value as u32) & 1) << 11; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 1 - Comparator Output Invert"] #[inline(always)] pub fn comp_acctl0_cinv(&self) -> COMP_ACCTL0_CINVR { let bits = ((self.bits >> 1) & 1) != 0; COMP_ACCTL0_CINVR { bits } } #[doc = "Bits 2:3 - Interrupt Sense"] #[inline(always)] pub fn comp_acctl0_isen(&self) -> COMP_ACCTL0_ISENR { COMP_ACCTL0_ISENR::_from(((self.bits >> 2) & 3) as u8) } #[doc = "Bit 4 - Interrupt Sense Level Value"] #[inline(always)] pub fn comp_acctl0_islval(&self) -> COMP_ACCTL0_ISLVALR { let bits = ((self.bits >> 4) & 1) != 0; COMP_ACCTL0_ISLVALR { bits } } #[doc = "Bits 5:6 - Trigger Sense"] #[inline(always)] pub fn comp_acctl0_tsen(&self) -> COMP_ACCTL0_TSENR { COMP_ACCTL0_TSENR::_from(((self.bits >> 5) & 3) as u8) } #[doc = "Bit 7 - Trigger Sense Level Value"] #[inline(always)] pub fn comp_acctl0_tslval(&self) -> COMP_ACCTL0_TSLVALR { let bits = ((self.bits >> 7) & 1) != 0; COMP_ACCTL0_TSLVALR { bits } } #[doc = "Bits 9:10 - Analog Source Positive"] #[inline(always)] pub fn comp_acctl0_asrcp(&self) -> COMP_ACCTL0_ASRCPR { COMP_ACCTL0_ASRCPR::_from(((self.bits >> 9) & 3) as u8) } #[doc = "Bit 11 - Trigger Output Enable"] #[inline(always)] pub fn comp_acctl0_toen(&self) -> COMP_ACCTL0_TOENR { let bits = ((self.bits >> 11) & 1) != 0; COMP_ACCTL0_TOENR { bits } } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 1 - Comparator Output Invert"] #[inline(always)] pub fn comp_acctl0_cinv(&mut self) -> _COMP_ACCTL0_CINVW { _COMP_ACCTL0_CINVW { w: self } } #[doc = "Bits 2:3 - Interrupt Sense"] #[inline(always)] pub fn comp_acctl0_isen(&mut self) -> _COMP_ACCTL0_ISENW { _COMP_ACCTL0_ISENW { w: self } } #[doc = "Bit 4 - Interrupt Sense Level Value"] #[inline(always)] pub fn comp_acctl0_islval(&mut self) -> _COMP_ACCTL0_ISLVALW { _COMP_ACCTL0_ISLVALW { w: self } } #[doc = "Bits 5:6 - Trigger Sense"] #[inline(always)] pub fn comp_acctl0_tsen(&mut self) -> _COMP_ACCTL0_TSENW { _COMP_ACCTL0_TSENW { w: self } } #[doc = "Bit 7 - Trigger Sense Level Value"] #[inline(always)] pub fn comp_acctl0_tslval(&mut self) -> _COMP_ACCTL0_TSLVALW { _COMP_ACCTL0_TSLVALW { w: self } } #[doc = "Bits 9:10 - Analog Source Positive"] #[inline(always)] pub fn comp_acctl0_asrcp(&mut self) -> _COMP_ACCTL0_ASRCPW { _COMP_ACCTL0_ASRCPW { w: self } } #[doc = "Bit 11 - Trigger Output Enable"] #[inline(always)] pub fn comp_acctl0_toen(&mut self) -> _COMP_ACCTL0_TOENW { _COMP_ACCTL0_TOENW { w: self } } }
use crate::smb2::requests::close::Close; use super::create_random_byte_array_of_predefined_length; use super::create_random_byte_array_with_random_length; pub fn fuzz_close_with_predefined_values() -> Close { let mut close_request = Close::default(); close_request.file_id = create_random_byte_array_of_predefined_length(16); close_request } /// Fuzzes the close request with random values that comply to the size restrictions of certain fields. pub fn fuzz_close_with_random_fields() -> Close { let mut close_request = Close::default(); close_request.structure_size = create_random_byte_array_of_predefined_length(2); close_request.flags = create_random_byte_array_of_predefined_length(2); close_request.reserved = create_random_byte_array_of_predefined_length(4); close_request.file_id = create_random_byte_array_of_predefined_length(16); close_request } /// Fuzzes the close request with random values with random length. pub fn fuzz_close_completely_random() -> Close { let mut close_request = Close::default(); close_request.structure_size = create_random_byte_array_with_random_length(); close_request.flags = create_random_byte_array_with_random_length(); close_request.reserved = create_random_byte_array_with_random_length(); close_request.file_id = create_random_byte_array_with_random_length(); close_request }
mod renderer; use renderer::Renderer; mod automaton; use automaton::Automaton; mod events; use events::Event; mod console_renderer; use console_renderer::ConsoleRenderer; mod sdl2_renderer; use sdl2_renderer::Sdl2Renderer; mod rule110; use rule110::Rule110; mod rule30; use rule30::Rule30; mod rule90; use rule90::Rule90; mod rule184; use rule184::Rule184; mod gol; use gol::Gol; mod seeds; use seeds::Seeds; mod lant; use lant::Lant; mod brians_brain; use brians_brain::BriansBrain; mod wireworld; use wireworld::Wireworld; mod cell; mod grid; mod row; use std::{thread, time}; use std::env; const CYCLE_TIME_MS: u64 = 100; const COLS: usize = 100; const ROWS: usize = 100; const SDL_WINDOW_HEIGHT: u32 = 800; const SDL_WINDOW_WIDTH: u32 = 800; const SDL_CELL_HEIGHT: u32 = SDL_WINDOW_HEIGHT / (ROWS as u32); const SDL_CELL_WIDTH: u32 = SDL_WINDOW_WIDTH / (COLS as u32); pub fn main() -> Result<(), String> { let args: Vec<String> = env::args().collect(); if args.len() < 2 { panic!("./rucomato automaton (rule110) output (console/sdl2)") } let automaton_select = &args[1]; let output_select = &args[2]; println!("Automaton: {}", automaton_select); println!("Output: {}", output_select); let mut renderer: Box<dyn Renderer>; let mut automaton: Box<dyn Automaton>; let mut grid = false; match automaton_select.as_str() { "rule110" => automaton = Box::new(Rule110::new(COLS)), "rule30" => automaton = Box::new(Rule30::new(COLS)), "rule90" => automaton = Box::new(Rule90::new(COLS)), "rule184" => automaton = Box::new(Rule184::new(COLS)), "gol" => { automaton = Box::new(Gol::new(ROWS, COLS)); grid = true; } "seeds" => { automaton = Box::new(Seeds::new(ROWS, COLS)); grid = true; } "lant" => { automaton = Box::new(Lant::new(ROWS, COLS)); grid = true; } "bb" => { automaton = Box::new(BriansBrain::new(ROWS, COLS)); grid = true; } "wireworld" => { automaton = Box::new(Wireworld::new(ROWS, COLS)); grid = true; } _ => panic!("Unknown automaton {} selected.", automaton_select), } match output_select.as_str() { "console" => renderer = Box::new(ConsoleRenderer::new(grid)), "sdl2" => { renderer = Box::new(Sdl2Renderer::new( automaton_select, SDL_WINDOW_WIDTH, SDL_WINDOW_HEIGHT, ROWS, SDL_CELL_WIDTH, SDL_CELL_HEIGHT, )?) } _ => panic!("Unknown output type {} selected.", output_select), } renderer.begin_render(); automaton.render(&mut *renderer); renderer.end_render(); let mut quit = false; let mut pause = false; while !quit { let now = time::Instant::now(); for event in renderer.get_events().iter() { match event { Event::QUIT => quit = true, Event::PAUSE => pause = !pause, _ => {} } } if !pause { renderer.begin_render(); automaton.render(&mut *renderer); renderer.end_render(); automaton.next(); } let elapsed = now.elapsed().as_millis() as u64; if output_select != "console" { println!("Iteration time: {} ms", elapsed); } if elapsed < CYCLE_TIME_MS { thread::sleep(time::Duration::from_millis(CYCLE_TIME_MS - elapsed)); } } Ok(()) }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { crate::{ mod_manager::ModManager, models::AddModInfo, story_context_store::{ContextReader, ContextWriter, Contributor}, utils, }, failure::{format_err, Error, ResultExt}, fidl_fuchsia_app_discover::{ ModuleIdentifier, StoryDiscoverError, StoryModuleRequest, StoryModuleRequestStream, }, fidl_fuchsia_mem::Buffer, fidl_fuchsia_modular::Intent, fuchsia_async as fasync, fuchsia_syslog::macros::*, futures::prelude::*, parking_lot::Mutex, std::sync::Arc, }; /// The StoryModule protocol implementation. pub struct StoryModuleService<T> { /// The story id to which the module belongs. story_id: String, /// The module id in story |story_id| to which the output belongs. module_id: String, /// Reference to the context store. story_context_store: Arc<Mutex<T>>, /// Reference to the intent re-issuing. mod_manager: Arc<Mutex<ModManager<T>>>, } impl<T: ContextReader + ContextWriter + 'static> StoryModuleService<T> { /// Create a new module writer instance from an identifier. pub fn new( story_context_store: Arc<Mutex<T>>, mod_manager: Arc<Mutex<ModManager<T>>>, module: ModuleIdentifier, ) -> Result<Self, Error> { Ok(StoryModuleService { story_id: module.story_id.ok_or(format_err!("expected story id"))?, module_id: utils::encoded_module_path( module.module_path.ok_or(format_err!("expected mod path"))?, ), story_context_store, mod_manager, }) } /// Handle a stream of StoryModule requests. pub fn spawn(self, mut stream: StoryModuleRequestStream) { fasync::spawn_local( async move { while let Some(request) = stream.try_next().await.context(format!( "Error running module output for {:?} {:?}", self.story_id, self.module_id, ))? { match request { StoryModuleRequest::WriteOutput { output_name, entity_reference, responder, } => { self.handle_write(output_name, entity_reference).await?; responder.send(&mut Ok(()))?; } StoryModuleRequest::IssueIntent { intent, mod_name, responder } => { self.handle_add_to_story(intent, mod_name).await?; responder.send()?; // TODO: bind controller. } StoryModuleRequest::WriteInstanceState { key, value, responder } => { let mut result = self.handle_write_instance_state(&key, value).await; responder.send(&mut result)?; } StoryModuleRequest::ReadInstanceState { key, responder } => { let mut result = self.handle_read_instance_state(&key).await; responder.send(&mut result)?; } } } self.story_context_store.lock().withdraw_all(&self.story_id, &self.module_id); Ok(()) } .unwrap_or_else(|e: Error| fx_log_err!("error serving module output {}", e)), ) } async fn handle_add_to_story(&self, intent: Intent, mod_name: String) -> Result<(), Error> { let mut mod_manager = self.mod_manager.lock(); let action = AddModInfo::new(intent.into(), Some(self.story_id.clone()), Some(mod_name)); mod_manager.issue_action(&action, /*focus=*/ true).await } /// Write to the given |entity_reference| to the context store and associate /// it to this module output |output_name|. If no entity reference is given, /// clear that output. async fn handle_write( &self, output_name: String, entity_reference: Option<String>, ) -> Result<(), Error> { // TODO: verify the output_name matches an output in // the manifest. fx_log_info!( "Got write for parameter name:{}, story:{}, mod:{:?} reference:{:?}", output_name, self.story_id, self.module_id, entity_reference, ); let mut context_store_lock = self.story_context_store.lock(); match entity_reference { Some(reference) => { if let Some(old_reference) = context_store_lock.get_reference(&self.story_id, &self.module_id, &output_name) { let mut issuer_lock = self.mod_manager.lock(); issuer_lock .replace( old_reference, &reference, Contributor::module_new(&self.story_id, &self.module_id, &output_name), ) .await?; } context_store_lock .contribute(&self.story_id, &self.module_id, &output_name, &reference) .await?; } None => context_store_lock.withdraw(&self.story_id, &self.module_id, &output_name), } Ok(()) } async fn handle_write_instance_state( &self, key: &str, value: Buffer, ) -> Result<(), StoryDiscoverError> { let mod_manager = self.mod_manager.lock(); let mut story_manager = mod_manager.story_manager.lock(); story_manager .set_instance_state( &self.story_id, &self.module_id, key, utils::vmo_buffer_to_string(Box::new(value)) .map_err(|_| StoryDiscoverError::VmoStringConversion)?, ) .await } async fn handle_read_instance_state(&self, key: &str) -> Result<Buffer, StoryDiscoverError> { let mod_manager = self.mod_manager.lock(); let story_manager = mod_manager.story_manager.lock(); let state_string = story_manager.get_instance_state(&self.story_id, &self.module_id, &key).await?; utils::string_to_vmo_buffer(state_string) .map_err(|_| StoryDiscoverError::VmoStringConversion) } } #[cfg(test)] mod tests { use { super::*, crate::{ models::{AddModInfo, Intent}, story_context_store::{ContextEntity, Contributor}, testing::{init_state, FakeEntityData, FakeEntityResolver, PuppetMasterFake}, }, fidl_fuchsia_app_discover::StoryModuleMarker, fidl_fuchsia_modular::{ EntityResolverMarker, IntentParameter as FidlIntentParameter, IntentParameterData, PuppetMasterMarker, StoryCommand, }, maplit::{hashmap, hashset}, }; #[fasync::run_singlethreaded(test)] async fn test_write() { let (entity_resolver, request_stream) = fidl::endpoints::create_proxy_and_stream::<EntityResolverMarker>().unwrap(); let mut fake_entity_resolver = FakeEntityResolver::new(); fake_entity_resolver .register_entity("foo", FakeEntityData::new(vec!["some-type".into()], "")); fake_entity_resolver .register_entity("bar", FakeEntityData::new(vec!["some-type-bar".into()], "")); fake_entity_resolver.spawn(request_stream); let (puppet_master_client, _) = fidl::endpoints::create_proxy_and_stream::<PuppetMasterMarker>().unwrap(); let (state, _, mod_manager) = init_state(puppet_master_client, entity_resolver); // Initialize service client and server. let (client, request_stream) = fidl::endpoints::create_proxy_and_stream::<StoryModuleMarker>().unwrap(); let module = ModuleIdentifier { story_id: Some("story1".to_string()), module_path: Some(vec!["mod-a".to_string()]), }; StoryModuleService::new(state.clone(), mod_manager, module).unwrap().spawn(request_stream); // Write a module output. assert!(client.write_output("param-foo", Some("foo")).await.is_ok()); assert!(client.write_output("param-bar", Some("bar")).await.is_ok()); // Verify we have two entities with the right contributor. { let context_store = state.lock(); let result = context_store.current().collect::<Vec<&ContextEntity>>(); let expected_entities = vec![ ContextEntity::new_test( "bar", hashset!("some-type-bar".into()), hashset!(Contributor::module_new("story1", "mod-a", "param-bar",)), ), ContextEntity::new_test( "foo", hashset!("some-type".into()), hashset!(Contributor::module_new("story1", "mod-a", "param-foo",)), ), ]; assert_eq!(result.len(), 2); assert!(result.iter().all(|r| expected_entities.iter().any(|e| e == *r))) } // Write no entity to the same output. This should withdraw the entity. assert!(client.write_output("param-foo", None).await.is_ok()); // Verify we have only one entity. let context_store = state.lock(); let result = context_store.current().collect::<Vec<&ContextEntity>>(); assert_eq!(result.len(), 1); } #[fasync::run_singlethreaded(test)] async fn reissue_intents() -> Result<(), Error> { // Setup puppet master fake. let (puppet_master_client, puppet_master_request_stream) = fidl::endpoints::create_proxy_and_stream::<PuppetMasterMarker>().unwrap(); let mut puppet_master_fake = PuppetMasterFake::new(); let (entity_resolver, request_stream) = fidl::endpoints::create_proxy_and_stream::<EntityResolverMarker>().unwrap(); let mut fake_entity_resolver = FakeEntityResolver::new(); fake_entity_resolver .register_entity("garnet-ref", FakeEntityData::new(vec!["some-type".into()], "")); fake_entity_resolver .register_entity("peridot-ref", FakeEntityData::new(vec!["some-type".into()], "")); fake_entity_resolver.spawn(request_stream); // This will be called with the action of the old reference but with // the replaced entity reference. puppet_master_fake.set_on_execute("story1", |commands| { assert_eq!(commands.len(), 1); if let StoryCommand::AddMod(add_mod) = &commands[0] { assert_eq!(add_mod.intent.action, Some("PLAY_MUSIC".to_string())); assert_eq!(add_mod.mod_name_transitional, Some("mod-a".to_string())); assert_eq!( add_mod.intent.parameters, Some(vec![FidlIntentParameter { name: Some("artist".to_string()), data: IntentParameterData::EntityReference("garnet-ref".to_string()), },]) ); } else { assert!(false); } }); puppet_master_fake.spawn(puppet_master_request_stream); // Set initial state of connected mods. The actions here will be executed with the new // entity reference in the parameter. let (context_store_ref, _, mod_manager_ref) = init_state(puppet_master_client, entity_resolver); { let mut context_store = context_store_ref.lock(); context_store.contribute("story1", "mod-a", "artist", "peridot-ref").await?; let mut mod_manager = mod_manager_ref.lock(); let intent = Intent::new().with_action("PLAY_MUSIC").add_parameter("artist", "peridot-ref"); let action = AddModInfo::new(intent, Some("story1".to_string()), Some("mod-a".to_string())); mod_manager.actions = hashmap!("peridot-ref".to_string() => hashset!(action)); } // Initialize service client and server. let (client, request_stream) = fidl::endpoints::create_proxy_and_stream::<StoryModuleMarker>().unwrap(); let module = ModuleIdentifier { story_id: Some("story1".to_string()), module_path: Some(vec!["mod-a".to_string()]), }; StoryModuleService::new(context_store_ref, mod_manager_ref, module) .unwrap() .spawn(request_stream); // Write a module output. assert!(client.write_output("artist", Some("garnet-ref")).await.is_ok()); Ok(()) } #[fasync::run_singlethreaded(test)] async fn issue_intent() -> Result<(), Error> { // Setup puppet master fake. let (puppet_master_client, puppet_master_request_stream) = fidl::endpoints::create_proxy_and_stream::<PuppetMasterMarker>().unwrap(); let mut puppet_master_fake = PuppetMasterFake::new(); let (entity_resolver, request_stream) = fidl::endpoints::create_proxy_and_stream::<EntityResolverMarker>().unwrap(); let mut fake_entity_resolver = FakeEntityResolver::new(); fake_entity_resolver .register_entity("garnet-ref", FakeEntityData::new(vec!["some-type".into()], "")); fake_entity_resolver.spawn(request_stream); // This will be called with the action of the old reference but with // the replaced entity reference. puppet_master_fake.set_on_execute("story1", |commands| { assert_eq!(commands.len(), 3); if let ( StoryCommand::AddMod(add_mod), StoryCommand::SetFocusState(set_focus), StoryCommand::FocusMod(focus_mod), ) = (&commands[0], &commands[1], &commands[2]) { assert_eq!(add_mod.intent.action, Some("PLAY_MUSIC".to_string())); assert_eq!(add_mod.mod_name_transitional, Some("mod-b".to_string())); assert_eq!( add_mod.intent.parameters, Some(vec![FidlIntentParameter { name: Some("artist".to_string()), data: IntentParameterData::EntityReference("garnet-ref".to_string()), },]) ); assert!(set_focus.focused); assert_eq!(add_mod.mod_name_transitional, focus_mod.mod_name_transitional); } else { assert!(false); } }); puppet_master_fake.spawn(puppet_master_request_stream); // Initialize service client and server. let (context_store, _, mod_manager) = init_state(puppet_master_client, entity_resolver); let (client, request_stream) = fidl::endpoints::create_proxy_and_stream::<StoryModuleMarker>().unwrap(); let module = ModuleIdentifier { story_id: Some("story1".to_string()), module_path: Some(vec!["mod-a".to_string()]), }; StoryModuleService::new(context_store, mod_manager, module).unwrap().spawn(request_stream); // Write a module output. let intent = Intent::new().with_action("PLAY_MUSIC").add_parameter("artist", "garnet-ref"); assert!(client.issue_intent(&mut intent.into(), "mod-b").await.is_ok()); Ok(()) } #[fasync::run_singlethreaded(test)] async fn handle_instance_state() -> Result<(), Error> { let (puppet_master_client, _) = fidl::endpoints::create_proxy_and_stream::<PuppetMasterMarker>().unwrap(); let (entity_resolver, _) = fidl::endpoints::create_proxy_and_stream::<EntityResolverMarker>().unwrap(); // Initialize service client and server. let (client, request_stream) = fidl::endpoints::create_proxy_and_stream::<StoryModuleMarker>().unwrap(); let module = ModuleIdentifier { story_id: Some("story1".to_string()), module_path: Some(vec!["mod-a".to_string()]), }; let (state, _, mod_manager) = init_state(puppet_master_client, entity_resolver); StoryModuleService::new(state.clone(), mod_manager, module).unwrap().spawn(request_stream); // Write instance state. assert!(client .write_instance_state("query", &mut utils::string_to_vmo_buffer("cities in spain")?) .await .is_ok()); // Read instance state. let state_string = utils::vmo_buffer_to_string(Box::new( client.read_instance_state("query").await?.unwrap(), ))?; assert_eq!(state_string, "cities in spain".to_string()); assert!(client.read_instance_state("other_state_key").await?.is_err()); Ok(()) } }
#[doc = "Reader of register MISR"] pub type R = crate::R<u32, super::MISR>; #[doc = "Reader of field `TAMP1MF`"] pub type TAMP1MF_R = crate::R<bool, bool>; #[doc = "Reader of field `TAMP2MF`"] pub type TAMP2MF_R = crate::R<bool, bool>; #[doc = "Reader of field `TAMP3MF`"] pub type TAMP3MF_R = crate::R<bool, bool>; #[doc = "Reader of field `ITAMP3MF`"] pub type ITAMP3MF_R = crate::R<bool, bool>; #[doc = "Reader of field `ITAMP4MF`"] pub type ITAMP4MF_R = crate::R<bool, bool>; #[doc = "Reader of field `ITAMP5MF`"] pub type ITAMP5MF_R = crate::R<bool, bool>; #[doc = "Reader of field `ITAMP6MF`"] pub type ITAMP6MF_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - TAMP1MF:"] #[inline(always)] pub fn tamp1mf(&self) -> TAMP1MF_R { TAMP1MF_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - TAMP2MF"] #[inline(always)] pub fn tamp2mf(&self) -> TAMP2MF_R { TAMP2MF_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - TAMP3MF"] #[inline(always)] pub fn tamp3mf(&self) -> TAMP3MF_R { TAMP3MF_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 18 - ITAMP3MF"] #[inline(always)] pub fn itamp3mf(&self) -> ITAMP3MF_R { ITAMP3MF_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - ITAMP4MF"] #[inline(always)] pub fn itamp4mf(&self) -> ITAMP4MF_R { ITAMP4MF_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - ITAMP5MF"] #[inline(always)] pub fn itamp5mf(&self) -> ITAMP5MF_R { ITAMP5MF_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 21 - ITAMP6MF"] #[inline(always)] pub fn itamp6mf(&self) -> ITAMP6MF_R { ITAMP6MF_R::new(((self.bits >> 21) & 0x01) != 0) } }
#[doc = "Reader of register DCKCFGR2"] pub type R = crate::R<u32, super::DCKCFGR2>; #[doc = "Writer for register DCKCFGR2"] pub type W = crate::W<u32, super::DCKCFGR2>; #[doc = "Register DCKCFGR2 `reset()`'s with value 0"] impl crate::ResetValue for super::DCKCFGR2 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "I2C4SEL\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum I2C4SEL_A { #[doc = "0: APB1 clock (PCLK1) is selected as I2C clock"] APB1 = 0, #[doc = "1: System clock is selected as I2C clock"] SYSCLK = 1, #[doc = "2: HSI clock is selected as I2C clock"] HSI = 2, } impl From<I2C4SEL_A> for u8 { #[inline(always)] fn from(variant: I2C4SEL_A) -> Self { variant as _ } } #[doc = "Reader of field `I2C4SEL`"] pub type I2C4SEL_R = crate::R<u8, I2C4SEL_A>; impl I2C4SEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, I2C4SEL_A> { use crate::Variant::*; match self.bits { 0 => Val(I2C4SEL_A::APB1), 1 => Val(I2C4SEL_A::SYSCLK), 2 => Val(I2C4SEL_A::HSI), i => Res(i), } } #[doc = "Checks if the value of the field is `APB1`"] #[inline(always)] pub fn is_apb1(&self) -> bool { *self == I2C4SEL_A::APB1 } #[doc = "Checks if the value of the field is `SYSCLK`"] #[inline(always)] pub fn is_sysclk(&self) -> bool { *self == I2C4SEL_A::SYSCLK } #[doc = "Checks if the value of the field is `HSI`"] #[inline(always)] pub fn is_hsi(&self) -> bool { *self == I2C4SEL_A::HSI } } #[doc = "Write proxy for field `I2C4SEL`"] pub struct I2C4SEL_W<'a> { w: &'a mut W, } impl<'a> I2C4SEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: I2C4SEL_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "APB1 clock (PCLK1) is selected as I2C clock"] #[inline(always)] pub fn apb1(self) -> &'a mut W { self.variant(I2C4SEL_A::APB1) } #[doc = "System clock is selected as I2C clock"] #[inline(always)] pub fn sysclk(self) -> &'a mut W { self.variant(I2C4SEL_A::SYSCLK) } #[doc = "HSI clock is selected as I2C clock"] #[inline(always)] pub fn hsi(self) -> &'a mut W { self.variant(I2C4SEL_A::HSI) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 22)) | (((value as u32) & 0x03) << 22); self.w } } #[doc = "LPTIM1SEL\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum LPTIM1SEL_A { #[doc = "0: APB1 clock (PCLK1) selected as LPTILM1 clock"] APB1 = 0, #[doc = "1: LSI clock is selected as LPTILM1 clock"] LSI = 1, #[doc = "2: HSI clock is selected as LPTILM1 clock"] HSI = 2, #[doc = "3: LSE clock is selected as LPTILM1 clock"] LSE = 3, } impl From<LPTIM1SEL_A> for u8 { #[inline(always)] fn from(variant: LPTIM1SEL_A) -> Self { variant as _ } } #[doc = "Reader of field `LPTIM1SEL`"] pub type LPTIM1SEL_R = crate::R<u8, LPTIM1SEL_A>; impl LPTIM1SEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LPTIM1SEL_A { match self.bits { 0 => LPTIM1SEL_A::APB1, 1 => LPTIM1SEL_A::LSI, 2 => LPTIM1SEL_A::HSI, 3 => LPTIM1SEL_A::LSE, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `APB1`"] #[inline(always)] pub fn is_apb1(&self) -> bool { *self == LPTIM1SEL_A::APB1 } #[doc = "Checks if the value of the field is `LSI`"] #[inline(always)] pub fn is_lsi(&self) -> bool { *self == LPTIM1SEL_A::LSI } #[doc = "Checks if the value of the field is `HSI`"] #[inline(always)] pub fn is_hsi(&self) -> bool { *self == LPTIM1SEL_A::HSI } #[doc = "Checks if the value of the field is `LSE`"] #[inline(always)] pub fn is_lse(&self) -> bool { *self == LPTIM1SEL_A::LSE } } #[doc = "Write proxy for field `LPTIM1SEL`"] pub struct LPTIM1SEL_W<'a> { w: &'a mut W, } impl<'a> LPTIM1SEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LPTIM1SEL_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "APB1 clock (PCLK1) selected as LPTILM1 clock"] #[inline(always)] pub fn apb1(self) -> &'a mut W { self.variant(LPTIM1SEL_A::APB1) } #[doc = "LSI clock is selected as LPTILM1 clock"] #[inline(always)] pub fn lsi(self) -> &'a mut W { self.variant(LPTIM1SEL_A::LSI) } #[doc = "HSI clock is selected as LPTILM1 clock"] #[inline(always)] pub fn hsi(self) -> &'a mut W { self.variant(LPTIM1SEL_A::HSI) } #[doc = "LSE clock is selected as LPTILM1 clock"] #[inline(always)] pub fn lse(self) -> &'a mut W { self.variant(LPTIM1SEL_A::LSE) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 30)) | (((value as u32) & 0x03) << 30); self.w } } impl R { #[doc = "Bits 22:23 - I2C4SEL"] #[inline(always)] pub fn i2c4sel(&self) -> I2C4SEL_R { I2C4SEL_R::new(((self.bits >> 22) & 0x03) as u8) } #[doc = "Bits 30:31 - LPTIM1SEL"] #[inline(always)] pub fn lptim1sel(&self) -> LPTIM1SEL_R { LPTIM1SEL_R::new(((self.bits >> 30) & 0x03) as u8) } } impl W { #[doc = "Bits 22:23 - I2C4SEL"] #[inline(always)] pub fn i2c4sel(&mut self) -> I2C4SEL_W { I2C4SEL_W { w: self } } #[doc = "Bits 30:31 - LPTIM1SEL"] #[inline(always)] pub fn lptim1sel(&mut self) -> LPTIM1SEL_W { LPTIM1SEL_W { w: self } } }
use super::{inner_most_type_name, Result}; use proc_macro2::TokenStream; use quote::{format_ident, quote}; use syn::{ parse::{Parse, ParseStream}, punctuated::Punctuated, Ident, Token, TypePath, }; pub fn expand(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = or_return_compile_error!(syn::parse::<Input>(input)); or_return_compile_error!(input.expand()).into() } struct Input { key_path: Punctuated<Ident, Token![.]>, ty: TypePath, force_as_scalar: bool, } impl Input { fn expand(&self) -> Result<TokenStream> { if self.is_scalar()? || self.force_as_scalar { Ok(self.expand_to_scalar()) } else { Ok(self.expand_to_type()?) } } fn expand_to_scalar(&self) -> TokenStream { let resolver_method_name = self.resolver_method_name(); let ty = &self.ty; let key_path = &self.key_path; quote! { fn #resolver_method_name( &self, _: &juniper::Executor<'_, Context>, ) -> juniper::FieldResult<&#ty> { Ok(&self.#key_path) } } } fn expand_to_type(&self) -> Result<TokenStream> { let resolver_method_name = self.resolver_method_name(); let ty = &self.ty; let key_path = &self.key_path; let inner_ty = inner_most_type_name(&self.ty)?; let code = quote! { fn #resolver_method_name( &self, _: &juniper::Executor<'_, Context>, _: &juniper_from_schema::QueryTrail<'_, #inner_ty, juniper_from_schema::Walked>, ) -> juniper::FieldResult<&#ty> { Ok(&self.#key_path) } }; Ok(code) } fn resolver_method_name(&self) -> Ident { format_ident!("field_{}", self.key_path.last().unwrap()) } fn is_scalar(&self) -> Result<bool> { let inner = inner_most_type_name(&self.ty)?; match inner.to_string().as_str() { "String" | "i32" | "f64" | "bool" | "ID" => Ok(true), _ => Ok(false), } } } mod kw { syn::custom_keyword!(scalar); } impl Parse for Input { fn parse(input: ParseStream) -> syn::Result<Self> { let key_path = Punctuated::parse_separated_nonempty(input)?; input.parse::<Token![-]>()?; input.parse::<Token![>]>()?; let ty = input.parse::<TypePath>()?; let force_as_scalar = if input.peek(Token![as]) { input.parse::<Token![as]>()?; input.parse::<kw::scalar>()?; true } else { false }; Ok(Input { key_path, ty, force_as_scalar, }) } }
//! ANSI Text Styling //! //! *A portmanteau of "ansi style"* //! //! `anstyle` provides core types describing [ANSI styling escape //! codes](https://en.wikipedia.org/wiki/ANSI_escape_code) for interoperability //! between crates. //! //! Example use cases: //! - An argument parser allowing callers to define the colors used in the help-output without //! putting the text formatting crate in the public API //! - A style description parser that can work with any text formatting crate //! //! Priorities: //! 1. API stability //! 2. Low compile-time and binary-size overhead //! 3. `const` friendly API for callers to statically define their stylesheet //! //! For integration with text styling crate, see: //! - [anstyle-ansi-term](https://docs.rs/anstyle-ansi-term) //! - [anstyle-crossterm](https://docs.rs/anstyle-crossterm) //! - [anstyle-owo-colors](https://docs.rs/anstyle-owo-colors) //! - [anstyle-termcolor](https://docs.rs/anstyle-termcolor) //! - [anstyle-yansi](https://docs.rs/anstyle-yansi) //! //! User-styling parsers: //! - [anstyle-git](https://docs.rs/anstyle-git): Parse Git style descriptions //! - [anstyle-ls](https://docs.rs/anstyle-ls): Parse LS_COLORS style descriptions //! //! Convert to other formats //! - [anstream](https://docs.rs/anstream): A simple cross platform library for writing colored text to a terminal //! - [anstyle-roff](https://docs.rs/anstyle-roff): For converting to ROFF //! //! Utilities //! - [anstyle-lossy](https://docs.rs/anstyle-lossy): Convert between `anstyle::Color` types //! - [anstyle-parse](https://docs.rs/anstyle-parse): Parsing ANSI Style Escapes //! - [anstyle-wincon](https://docs.rs/anstyle-wincon): Styling legacy Microsoft terminals //! //! # Examples //! //! The core type is [`Style`]: //! ```rust //! let style = anstyle::Style::new().bold(); //! ``` #![cfg_attr(not(feature = "std"), no_std)] #[macro_use] mod macros; mod color; mod effect; mod reset; mod style; pub use color::*; pub use effect::*; pub use reset::*; pub use style::*;
pub(crate) mod addr; pub(crate) mod msghdr; pub(crate) mod read_sockaddr; pub(crate) mod send_recv; pub(crate) mod syscalls; pub(crate) mod write_sockaddr;
use bootstrap::window::Window; #[derive(Debug, Clone, Copy)] pub struct Context; pub fn init(_window: &Window) { } pub fn create_context(_window: &Window) -> Context { Context } pub fn destroy_context(_context: Context) { } pub fn proc_loader(_proc_name: &str) -> Option<extern "C" fn()> { None } pub fn swap_buffers(_window: &Window) { }
#![allow(unused_variables)] use std::thread::sleep; use std::time::Duration; use futures::future::join4; use git2::{Oid, Repository, BranchType}; // -> Open & Select Your Repository fn open_repo() -> Repository { let path = std::env::args().nth(1).unwrap_or(".".to_string()); Repository::open(path.as_str()).expect("Couldn't open repo") } // -> Initial Head & Target fn initial_head() -> Oid { let repo = open_repo(); let initial_head = repo.head().unwrap(); initial_head.target().unwrap() } // -> Convert from String to str fn convert_to_str(target: Option<&str>) -> &str { target.unwrap_or("add") } // -> Commit Logging async fn get_fullcommit_log() { let timeout = Duration::from_secs(1); sleep(timeout); let repo = open_repo(); let commit = repo.find_commit(initial_head()).unwrap(); let timestamp = commit.time().seconds(); let tm = time::at(time::Timespec::new(timestamp, 0)); let time = tm.rfc822(); let commit_message = convert_to_str(commit.message()); let commit_message_bytes = commit.message_bytes(); let commit_raw_header = convert_to_str(commit.raw_header()); let commit_summary = commit.summary(); let commit_id = commit.id(); let commit_author = commit.author(); let commit_author_name = convert_to_str(commit_author.name()); let commit_author_email = convert_to_str(commit_author.email()); let commiter = commit.committer(); let commiter_name = convert_to_str(commiter.name()); let commiter_email = convert_to_str(commiter.email()); println!( "\n Last Commit:\n Id -> {} \n Message -> {} Committer Name -> {} \n Committer Email -> {} \n Time -> {} \n Raw -> {:#?} \n", commit_id, commit_message, commiter_name, commiter_email, time, commit_raw_header, ) } async fn create_new_branch() { let timeout = Duration::from_secs(3); sleep(timeout); let repo = open_repo(); let commit = repo.find_commit(initial_head()).unwrap(); let mut branch = repo.branch("test_branch", &commit, true).unwrap(); // -> sets the default remote branch branch.set_upstream(Some("master")).unwrap(); branch.upstream().unwrap(); println!("Branch has been succesfully created! If you wanna check type command git branch -a"); } async fn rename_branch() { let timeout = Duration::from_secs(5); sleep(timeout); let repo = open_repo(); let commit = repo.find_commit(initial_head()).unwrap(); let mut find_branch = repo.find_branch("test_branch",BranchType::Local).unwrap(); let mut renamed_branch = find_branch.rename("test_branch_1", true).unwrap(); // -> sets the default remote branch renamed_branch.set_upstream(Some("master")).unwrap(); renamed_branch.upstream().unwrap(); println!("Branch has been succesfully renamed! If you wanna check type command git branch -a"); } async fn delete_branch() { let timeout = Duration::from_secs(7); sleep(timeout); let repo = open_repo(); let commit = repo.find_commit(initial_head()).unwrap(); let mut find_branch = repo.find_branch("test_branch_1",BranchType::Local).unwrap(); find_branch.delete().unwrap(); println!("Branch has been succesfully deleted! If you wanna check type command git branch -a"); } pub async fn merge_all_hooks() { let commit_log = get_fullcommit_log(); let new_branch = create_new_branch(); let rename_branch = rename_branch(); let delete_branch = delete_branch(); let race_all = join4(commit_log, new_branch, rename_branch, delete_branch); race_all.await; }
use serde::{Deserialize, Serialize}; use serde_json; #[derive(Serialize, Deserialize, Debug)] pub struct Message { message_string: String, timestamp: String, is_test: bool, } #[derive(Serialize, Deserialize, Debug)] pub struct MessageString { client_code: String, alarm_event_code: String, handshake_code: String, } pub fn message_from_data(data: &[u8]) -> Message { let v: Message = serde_json::from_slice(data).unwrap(); return v; } pub fn parse_message_string(msg: &Message) -> MessageString { let mut msg_split: Vec<&str> = msg.message_string.split("-").collect(); let msgstr = MessageString { client_code: msg_split[0].to_string(), alarm_event_code: msg_split[1].to_string(), handshake_code: msg_split[2].to_string() }; return msgstr; } pub fn validate_handshake(msgstr: &MessageString, handshake: &str) -> bool { if msgstr.handshake_code != handshake { return false; } else { return true; } } pub fn validate_alarm_event_code(msgstr: &MessageString) -> bool { if msgstr.alarm_event_code.chars().count() != 5 { return false; } else { return true; } } pub fn validate_client_code(msgstr: &MessageString) -> bool { if msgstr.client_code.chars().count() != 16 { return false; } else { return true; } }
/** The StableAbi derive macro allows one to implement the [`StableAbi trait`] to : - Assert that the type has a stable representation across Rust version/compiles. - Produce the layout of the type at runtime to check it against the loaded library. # Caveats Due to how this macro is implemented, using `Self` in bounds doesn't work, you must use the full type name and generic arguments. # Container Attributes These helper attributes are applied on the type declaration. ### `#[sabi(phantom_field(name: type))]` Adds a virtual field to the type layout constant, which is checked against the phantom field that was declared in the same order for compatibility. ### `#[sabi(phantom_type_param = type)]` Adds a virtual type parameter to the type layout constant, which is checked for compatibility. ### `#[sabi(phantom_const_param = constant expression)]` Adds a virtual const parameter to the type layout constant, which is checked for equality with the virtual const parameter declared in the same order. The parameter must implement `StableAbi + Eq + Debug`. <span id = "not_stableabi_attr"></span> ### `#[sabi(not_stableabi(TypeParameter))]` Replaces the implicit `TypeParameter: `[`StableAbi`](trait@StableAbi) constraint with a `TypeParameter: `[`GetStaticEquivalent_`] constraint. ### `#[sabi(unsafe_unconstrained(TypeParameter))]` Removes the implicit `TypeParameter: `[`StableAbi`](trait@StableAbi) constraint. The type parameter will be ignored when determining whether the type has already been checked, when loading a dynamic library, Don't use this if transmuting this type to have different type parameters, only changing the `#[sabi(unsafe_unconstrained())]` one, would cause Undefined Behavior. This is only necessary if you are passing `TypeParameter` to [`UnsafeIgnoredType`] ### `#[sabi(bound(Type: ATrait))]` Adds a bound to the [`StableAbi`](trait@StableAbi) impl. ### `#[sabi(bounds(Type: ATrait, Type2: OtherTrait))]` Adds many bounds to the [`StableAbi`](trait@StableAbi) impl. <span id = "prefix_bound_attr"></span> ### `#[sabi(prefix_bound(Type: ATrait))]` This is only valid for Prefix types, declared with [`#[sabi(kind(Prefix(..)))]`](#sabi_kind_prefix_attr). Adds a bound to the [`PrefixTypeTrait`] impl (for the deriving type). ### `#[sabi(prefix_bounds(Type: ATrait, Type2: OtherTrait))]` This is only valid for Prefix types, declared with [`#[sabi(kind(Prefix(..)))]`](#sabi_kind_prefix_attr). Adds many bounds to the [`PrefixTypeTrait`] impl (for the deriving type). ### `#[sabi(unsafe_allow_type_macros)]` This allows type macros to be used alongside the StableAbi derive macro. The reason this is unsafe to enable them is because StableAbi cannot currently analize the lifetimes within macros, which means that if any lifetime argument inside the macro invocation changes it won't be checked by the runtime type checker. A type macro is any macro that evaluates to a type. ### `#[sabi(tag = some_expr)]` Adds a [`Tag`](crate::type_layout::Tag) associated with the type, a dynamically typed data structure used to encode extra properties about a type. This can only be done once, to add multiple properties you must to use any of a map, an array, or a set. You can only rely on tags for safety if the specific tags were present since the first compatible version of the library, otherwise this only guarantees compatibility between parent and child libraries, not sibling libraries. Parent means the library/binary that loaded a library, or the parents of that one. Sibling means libraries loaded at runtime by the same library/binary (or a parent of that one). For more information about tags, [look here](./type_layout/tagging/index.html) <span id = "sabi_extra_checks_attr"></span> ### `#[sabi(extra_checks = <some_constant_expression>)]` Adds an `ExtraChecks` trait object associated with the type, which allows encoding and checking extra properties about a type. `<some_constant_expression>` must be a constant that implements ExtraChecks . For examples of using this attribute [look here](./abi_stability/extra_checks/index.html#examples) ### `#[sabi(debug_print)]` Prints the generated code, stopping compilation. <span id = "sabi_kind_prefix_attr"></span> ### `#[sabi(kind(Prefix( .. )))]` Declares the struct as being a prefix-type. Arguments (what goes inside `#[sabi(kind(Prefix( <here> )))]`): - `prefix_ref = <Identifier>)` (optional: defaults to `<DerivingType>_Ref`): Declares an ffi-safe pointer to a vtable/module, that can be extended in semver compatible versions.<br> Uses `<Identifier>` as the name of the prefix struct.<br> For more details on prefix-types [look here](./docs/prefix_types/index.html) - `prefix_fields = <Identifier>)` (optional: defaults to `<DerivingType>_Prefix`):<br> Declares a struct with all the fields in the deriving type up to (and including) the field with the [`#[sabi(last_prefix_field)]`](#sabilast_prefix_field) attribute, named `<Identifier>`. - `prefix_ref_docs = <expression>` (optional, allows multiple):<br> Replaces the default documentation for `<DerivingType>_Ref` with the passed-in expresion.<br> If this is passed multiple times, then multiple `#[doc = ...]` attributes are emitted. <span id = "kind_with_non_exhaustive_attr"></span> ### `#[sabi(kind(WithNonExhaustive( .. ))]` Declares this enum as being nonexhaustive, generating items and impls necessary to wrap this enum in the [`NonExhaustive`] type to pass it through ffi. For more details on nonexhaustive enums [look here](./docs/sabi_nonexhaustive/index.html) ### `#[sabi(module_reflection(...))]` Determines how this type is accessed when treated as a module for reflection. `#[sabi(module_reflection( Module ))]`<br> The default reflection mode, treats its the public fields as module items. `#[sabi(module_reflection( Opaque ))]`<br> Treats this as an empty module. `#[sabi(module_reflection( Deref ))]`<br> Delegates the treatment of this type as a module to the type it dereferences to. ### `#[sabi(impl_InterfaceType(...))]` Implements the [`InterfaceType`] trait for a type, defining the usable/required traits when creating a [`DynTrait`]`<_, ThisType>`/[`NonExhaustive`]`<_, _, ThisType>`. Syntax: `#[sabi(impl_InterfaceType(Trait0, Trait1, ..., TraitN))]` If a trait is not specified, it will not be required when constructing [`DynTrait`]/[`NonExhaustive`], and won't be usable afterwards. <span id = "InterfaceType_traits"> These are the traits you can specify: </span> - `Send`: Changing this to require/unrequire in minor versions is an abi breaking change. - `Sync`: Changing this to require/unrequire in minor versions is an abi breaking change. - `Unpin`: Changing this to require/unrequire in minor versions is an abi breaking change. - `Clone` - `Default` - `Display` - `Debug` - `Eq` - `PartialEq` - `Ord` - `PartialOrd` - `Hash` - `Deserialize`: corresponds to `serde::Deserialize` - `Serialize`: corresponds to `serde::Serialize` - `Iterator`: this type will also have to implement [`abi_stable::erased_types::IteratorItem`]. - `DoubleEndedIterator`: this type will also have to implement [`abi_stable::erased_types::IteratorItem`]. - `FmtWrite`: corresponds to `std::fmt::Write` . - `IoWrite`: corresponds to `std::io::Write` . - `IoSeek`: corresponds to `std::io::Seek` . - `IoRead`: corresponds to `std::io::Read` . - `IoBufRead`: corresponds to `std::io::BufRead` . - `Error`: corresponds to `std::error::Error` . <br> Examples: - `#[sabi(impl_InterfaceType(Send, Sync))]` - `#[sabi(impl_InterfaceType(Send, Sync, Iterator, DoubleEndedIterator))]` - `#[sabi(impl_InterfaceType(Clone, Debug, FmtWrite))]` - `#[sabi(impl_InterfaceType(Clone, Debug, IoWrite, IoRead))]` ### `#[sabi(unsafe_opaque_fields]` Does not require any field to implement [`StableAbi`](trait@StableAbi), and instead uses the [`StableAbi`](trait@StableAbi) impl of [`UnsafeOpaqueField`]`<FieldType>`. This is unsafe because the layout of their type won't be verified when loading the library, which causes Undefined Behavior if the type has a different layout. ### `#[sabi(unsafe_sabi_opaque_fields)]` Requires every field to implement [`StableAbi`](trait@StableAbi)(unless overridden), but doesn't check their layout. This is unsafe because the layout of their type won't be verified when loading the library, which causes Undefined Behavior if the type has a different layout. # Field attributes These helper attributes are applied to fields. ### `#[sabi(rename = ident)]` Renames the field in the generated layout information. Use this when renaming private fields. ### `#[sabi(unsafe_change_type = SomeType)]` Changes the type of this field in the generated type layout constant to SomeType. This has the `unsafe` prefix because SomeType is relied on being correct by [`StableAbi`](trait@StableAbi). ### `#[sabi(unsafe_opaque_field)]` Does not require the field to implement [`StableAbi`], and instead uses the StableAbi impl of [`UnsafeOpaqueField`]`<FieldType>`. This is unsafe because the layout of the type won't be verified when loading the library, which causes Undefined Behavior if the type has a different layout. ### `#[sabi(unsafe_sabi_opaque_field)]` Requires the field to implement [`StableAbi`] (unless overridden), but doesn't check its layout. This is unsafe because the layout of the type won't be verified when loading the library, which causes Undefined Behavior if the type has a different layout. ### `#[sabi(bound = SomeBound)]` Adds a `TheFieldType: SomeBound` constraint to the [`StableAbi`](trait@StableAbi) impl. Eg: ```ignore #[sabi(bound = Debug)] name: RStr<'static>, ``` adds the `RStr<'static>: Debug` bound to the [`StableAbi`](trait@StableAbi) impl ### `#[sabi(with_field_indices)]` This is only valid for Prefix types, declared with [`#[sabi(kind(Prefix(..)))]`](#sabi_kind_prefix_attr). Generates associated constants named `field_index_for_<field_name>` with the index of each field in the prefix type. Those indices can then be passed to the `abi_stable::prefix_types::panic_on_missing_*` functions to panic on a missing field. ### `#[sabi(accessor_bound = ATrait)]` This is only valid for Prefix types, declared with [`#[sabi(kind(Prefix(..)))]`](#sabi_kind_prefix_attr). Adds the bound to the field type in the accessor method. ### `#[sabi(last_prefix_field)]` This is only valid for Prefix types, declared with [`#[sabi(kind(Prefix(..)))]`](#sabi_kind_prefix_attr). Declares that the field it is applied to is the last field in the prefix, where every field up to it is guaranteed to exist. ### `#[sabi(accessible_if = expression)]` This is only valid for Prefix types, declared with [`#[sabi(kind(Prefix(..)))]`](#sabi_kind_prefix_attr). This attribute turns any field conditional based on the const boolean expression (which must be valid a bool constant). Whether this attribute is aplied to any given prefix field must not change in minor versions. If `expression` is false, the field won't be accessible, and the type of the field can be anything so long as its size and alignment is compatible. If `expression` is true, the type of the field must be compatible when checking layout. If this attribute is apllied to prefix fields, it will only be compatible with other types if they agree on which accessors are conditional for prefix fields. Prefix fields with this attribute are made private in the generated `<DerivingType>_Prefix` struct, without this attribute they keep the visibility. To do `#[sabi(accessible_if = <TypeParameter as Trait>::CONSTANT)]` you can use the [`#[sabi(prefix_bound(TypeParameter: Trait))]`](#prefix_bound_attr) helper attribute. ### `#[sabi(refl(pub_getter = function_name))]` Determines the public getter for a field used by reflection. The function can return either a reference or a value. # Field and/or Container attributes ### `#[sabi(missing_field( .. ))]` This is only valid for Prefix types, declared with [`#[sabi(kind(Prefix(..)))]`](#sabi_kind_prefix_attr). Determines what happens in the accessor method for a field, when the field is missing. The default is that it returns an `Option<FieldType>`, returning None if the field is absent, Some(field_value) if it's present. If the attribute is on the struct, it's applied to all fields(this is overridable) after the [`#[sabi(last_prefix_field)]`](#sabilast_prefix_field) attribute. If the attribute is on a field, it's applied to that field only, overriding the setting on the struct. `#[sabi(missing_field(panic))]`<br> Panics if the field doesn't exist, with an informative error message. `#[sabi(missing_field(option))]`<br> Returns None if the field doesn't exist, Some(fieldvalue) if it does. This is the default. `#[sabi(missing_field(with = somefunction))]`<br> Returns `somefunction()` if the field doesn't exist. `#[sabi(missing_field(value = some_expression))]`<br> Returns `some_expression` if the field doesn't exist. `#[sabi(missing_field(default))]`<br> Returns `Default::default()` if the field doesn't exist. # Variant and/or Container attributes ### `#[sabi(with_constructor)]` This is only valid for nonexhaustive enums, declared with [`#[sabi(kind(WithNonExhaustive(..)))]`](#kind_with_non_exhaustive_attr). Creates constructors for enum variant(s), named the same as the variant(s) with an `_NE` suffix. This attribute can be overriden on variants(when it was also applied to the Container itself). For a variant like this: `VariantNamed{foo: RString, bar: RBox<Struct>}` it would generate an associated function like this(the exact generated code might differ a bit): ```ignore fn VariantNamed_NE(foo: RString, bar: RBox<Struct>) -> Enum_NE { let x = Enum::VariantNamed { foo, bar }; NonExhaustive::new(x) } ``` ### `#[sabi(with_boxed_constructor)]` This is only valid for nonexhaustive enums, declared with [`#[sabi(kind(WithNonExhaustive(..)))]`](#kind_with_non_exhaustive_attr). Creates constructors for enum variant(s) which only contain a pointer, named the same as the variant(s) with an `_NE` suffix. This attribute can be overriden on variants(when it was also applied to the Container itself). All constructor functions are declared inside a single impl block with `Self` bounded by the traits that are necessary to construct [`NonExhaustive`] from it. For a variant like this: `VariantNamed(RBox<T>)` it would generate an associated function like this(the exact generated code might differ a bit): ```ignore fn VariantNamed_NE(value: T) -> Enum_NE<T> { let x = RBox::new(value); let x = Enum::VariantNamed(x); NonExhaustive::new(x) } ``` <br> For a variant like this: `VariantNamed{ptr_: MyPointer<T>}` it would generate an associated function like this(the exact generated code might differ a bit): ```ignore fn VariantNamed_NE(value: T) -> Enum_NE<T> { let x = MyPointer::new(value); let x = Enum::VariantNamed { ptr_: x }; NonExhaustive::new(x) } ``` For a variant like this: `VariantNamed(BoxedStruct)` it would generate an associated function like this(the exact generated code might differ a bit): ```ignore fn VariantNamed_NE( value: <BoxedStruct as ::std::ops::Deref>::Target, ) -> Enum_NE<T> { let x = BoxedStruct::new(value); let x = Enum::VariantNamed(x); NonExhaustive::new(x) } ``` # Supported repr attributes Because repr attributes can cause the type to change layout, the StableAbi derive macro has to know about every repr attribute applied to the type, since it might invalidate layout stability. ### `repr(C)` This is the representation that most StableAbi types will have. ### `repr(transparent)` `repr(transparent)` types are supported, though their layout is not considered equivalent to their only non-zero-sized field, since this library considers all types as being meaningful even if zero-sized. ### `repr(i8|u8|i16|u16|i32|u32|i64|u64|isize|usize)` These repr attributes are only supported for enums. ### `repr(align(...))` `repr(align(...))` is supported, so long as it is used in combination with the other supported repr attributes. # Examples ### Basic example ``` use abi_stable::StableAbi; #[repr(C)] #[derive(StableAbi)] struct Point2D { x: u32, y: u32, } ``` ### On a `#[repr(transparent)]` newtype ``` use abi_stable::StableAbi; #[repr(transparent)] #[derive(StableAbi)] pub struct Wrapper<T> { pub inner: T, } ``` ### On a `#[repr(u8)]` enum. This enum cannot add variants in minor versions, for that you have to use [nonexhaustive enums](./docs/sabi_nonexhaustive/index.html). ``` use abi_stable::{std_types::RString, StableAbi}; #[repr(u8)] #[derive(StableAbi)] pub enum Command { LaunchRockets, EatLaundry, WakeTheDragon { using: RString }, } ``` ### Prefix-types For examples of Prefix-types [look here](./docs/prefix_types/index.html#examples). ### Nonexhaustive-enums For examples of nonexhaustive enums [look here for the first example ](./docs/sabi_nonexhaustive/index.html#defining-a-deserializable-nonexhaustive-enum). ### Examples of `#[sabi(not_stableabi())]` For examples of using both [`#[derive(GetStaticEquivalent)]`][derive@GetStaticEquivalent] and [`#[sabi(not_stableabi())]`](#not_stableabi_attr) [look here](derive@GetStaticEquivalent#examples). [`NonExhaustive`]: ./nonexhaustive_enum/struct.NonExhaustive.html [`GetStaticEquivalent_`]: crate::abi_stability::get_static_equivalent::GetStaticEquivalent_ [`StableAbi trait`]: trait@StableAbi [`UnsafeOpaqueField`]: crate::abi_stability::stable_abi_trait::UnsafeOpaqueField [`UnsafeIgnoredType`]: crate::marker_type::UnsafeIgnoredType [`PrefixTypeTrait`]: crate::prefix_type::PrefixTypeTrait [`ExtraChecks`]: crate::abi_stability::extra_checks::ExtraChecks [`InterfaceType`]: crate::InterfaceType */ #[doc(inline)] pub use abi_stable_derive::StableAbi;
use super::{Model, Var, Domain, IntervalDomain, IntervalDom, BitDomain}; use std::cell::RefCell; #[test] fn creates_new_var() { let m = Model::new(); let x = Var::new(m.clone(), -2, 255, "x"); assert_eq!((x.id, x.min(), x.max()), (0, -2, 255)); assert_eq!(m.clone().vars.borrow().len(), 1); let y = Var::new(m.clone(), -2, 255, "y"); assert_eq!(y.id, 1); assert_eq!(m.clone().vars.borrow().len(), 2); } fn min_is_min(d: &IntervalDomain) -> bool { match d.dom.borrow().intervals.get(0) { &(x, _) => x == d.get_min() } } fn max_is_max(d: &IntervalDomain) -> bool { match d.dom.borrow().intervals.last() { Some(&(_, y)) => y == d.get_max(), _ => false } } fn setup_domain_simple() -> IntervalDomain { IntervalDomain { dom: RefCell::new(IntervalDom { min: -3, max: 64, intervals: vec![(-3, 2), (4, 42), (54, 64)] }) } } fn intervals_bounds_are_coherent(d: &IntervalDomain) { assert!(min_is_min(d)); assert!(max_is_max(d)); } #[test] fn sets_min_lower() { let d = setup_domain_simple(); d.set_min(-4); assert_eq!(d.get_min(), -3); intervals_bounds_are_coherent(&d); } #[test] fn sets_min_middle() { let d = setup_domain_simple(); let values = [-2, 8, 42, 54, 64]; let lengths = [3, 2, 2, 1, 1]; let mut v : int; for i in range(0, values.len()) { v = values[i]; d.set_min(v); assert_eq!(d.get_min(), v); assert_eq!(d.dom.borrow().intervals.len(), lengths[i]) } intervals_bounds_are_coherent(&d); } #[test] fn sets_min_in_hole() { let d = setup_domain_simple(); d.set_min(43); assert_eq!(d.get_min(), 54); intervals_bounds_are_coherent(&d); } #[test] // #[should_fail] fn sets_min_too_high() { let d = setup_domain_simple(); d.set_min(65); assert_eq!(d.get_min(), -3); intervals_bounds_are_coherent(&d); } #[test] fn sets_max_higher() { let d = setup_domain_simple(); d.set_max(65); assert_eq!(d.get_max(), 64); intervals_bounds_are_coherent(&d); } #[test] fn sets_max_middle() { let d = setup_domain_simple(); let values = [63, 54, 42, 8, -3]; let lengths = [3, 3, 2, 2, 1]; let mut v : int; for i in range(0, values.len()) { v = values[i]; d.set_max(v); assert_eq!(d.get_max(), v); assert_eq!(d.dom.borrow().intervals.len(), lengths[i]) } intervals_bounds_are_coherent(&d); } #[test] fn sets_max_in_hole() { let d = setup_domain_simple(); d.set_max(43); assert_eq!(d.get_max(), 42); intervals_bounds_are_coherent(&d); } #[test] // #[should_fail] fn sets_max_too_low() { let d = setup_domain_simple(); d.set_max(-4); assert_eq!(d.get_max(), 64); intervals_bounds_are_coherent(&d); } fn setup_domain_holy() -> IntervalDomain { IntervalDomain { dom: RefCell::new(IntervalDom { min: -3, max: 64, intervals: vec![(-3, 2), (4, 18), (20, 24), (30, 30), (32, 34), (36, 38), (40, 42), (54, 64)] }) } } #[test] fn remove_outside() { let d = setup_domain_holy(); let e = setup_domain_holy(); d.remove(-8); d.remove(3); d.remove(19); d.remove(31); d.remove(35); d.remove(48); d.remove(128); assert_eq!(d.dom.borrow().intervals.len(), e.dom.borrow().intervals.len()); for i in range(0, d.dom.borrow().intervals.len()) { assert_eq!(d.dom.borrow().intervals.get(i), e.dom.borrow().intervals.get(i)); } intervals_bounds_are_coherent(&d); } #[test] fn remove_inside() { let d = setup_domain_holy(); let values = [-3, -1, 30, 36, 64]; for &v in values.iter() { d.remove(v) } for &v in values.iter() { for &(x, y) in d.dom.borrow().intervals.iter() { assert!(v < x || v > y, format!("{} is not outside [{}..{}]", v, x, y)); } } assert_eq!(d.dom.borrow().intervals.len(), 8); intervals_bounds_are_coherent(&d); } #[test] #[should_fail] #[allow(unused_variable)] fn bitdomain_is_small() { let d : BitDomain = Domain::new(-5, 59); } fn setup_bitdomain_simple() -> BitDomain { return Domain::new(-4, 59); } fn assert_bitdomain_has_same_bounds_as_simple(d: BitDomain, min: Option<int>, max: Option<int>) { let s = setup_bitdomain_simple(); let test_min = match min { Some(m) => m, _ => s.get_min() }; let test_max = match max { Some(m) => m, _ => s.get_max() }; assert_eq!((d.get_min(), d.get_max()), (test_min, test_max)); } #[test] fn bitdomain_is_consistent() { let d = setup_bitdomain_simple(); assert_bitdomain_has_same_bounds_as_simple(d, None, None); } #[test] fn bitdomain_set_min_lower() { let d = setup_bitdomain_simple(); d.set_min(-8); assert_bitdomain_has_same_bounds_as_simple(d, None, None); } #[test] fn bitdomain_set_min_middle() { let d = setup_bitdomain_simple(); d.set_min(8); assert_bitdomain_has_same_bounds_as_simple(d, Some(8), None); } #[test] // #[should_fail] fn bitdomain_set_min_above() { let d = setup_bitdomain_simple(); d.set_min(68); assert_bitdomain_has_same_bounds_as_simple(d, None, None); } #[test] fn bitdomain_set_max_above() { let d = setup_bitdomain_simple(); d.set_max(68); assert_bitdomain_has_same_bounds_as_simple(d, None, None); } #[test] fn bitdomain_set_max_middle() { let d = setup_bitdomain_simple(); d.set_max(8); assert_bitdomain_has_same_bounds_as_simple(d, None, Some(8)); } #[test] // #[should_fail] fn bitdomain_set_max_below() { let d = setup_bitdomain_simple(); d.set_max(-8); assert_bitdomain_has_same_bounds_as_simple(d, None, None); }
#[doc = r"Value read from the register"] pub struct R { bits: u32, } #[doc = r"Value to write to the register"] pub struct W { bits: u32, } impl super::PPGPIO { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u32 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P0R { bits: bool, } impl SYSCTL_PPGPIO_P0R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P0W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P0W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 0); self.w.bits |= ((value as u32) & 1) << 0; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P1R { bits: bool, } impl SYSCTL_PPGPIO_P1R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P1W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P1W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 1); self.w.bits |= ((value as u32) & 1) << 1; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P2R { bits: bool, } impl SYSCTL_PPGPIO_P2R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P2W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P2W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 2); self.w.bits |= ((value as u32) & 1) << 2; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P3R { bits: bool, } impl SYSCTL_PPGPIO_P3R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P3W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P3W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 3); self.w.bits |= ((value as u32) & 1) << 3; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P4R { bits: bool, } impl SYSCTL_PPGPIO_P4R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P4W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P4W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 4); self.w.bits |= ((value as u32) & 1) << 4; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P5R { bits: bool, } impl SYSCTL_PPGPIO_P5R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P5W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P5W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 5); self.w.bits |= ((value as u32) & 1) << 5; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P6R { bits: bool, } impl SYSCTL_PPGPIO_P6R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P6W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P6W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 6); self.w.bits |= ((value as u32) & 1) << 6; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P7R { bits: bool, } impl SYSCTL_PPGPIO_P7R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P7W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P7W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 7); self.w.bits |= ((value as u32) & 1) << 7; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P8R { bits: bool, } impl SYSCTL_PPGPIO_P8R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P8W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P8W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 8); self.w.bits |= ((value as u32) & 1) << 8; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P9R { bits: bool, } impl SYSCTL_PPGPIO_P9R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P9W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P9W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 9); self.w.bits |= ((value as u32) & 1) << 9; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P10R { bits: bool, } impl SYSCTL_PPGPIO_P10R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P10W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P10W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 10); self.w.bits |= ((value as u32) & 1) << 10; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P11R { bits: bool, } impl SYSCTL_PPGPIO_P11R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P11W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P11W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 11); self.w.bits |= ((value as u32) & 1) << 11; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P12R { bits: bool, } impl SYSCTL_PPGPIO_P12R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P12W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P12W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 12); self.w.bits |= ((value as u32) & 1) << 12; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P13R { bits: bool, } impl SYSCTL_PPGPIO_P13R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P13W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P13W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 13); self.w.bits |= ((value as u32) & 1) << 13; self.w } } #[doc = r"Value of the field"] pub struct SYSCTL_PPGPIO_P14R { bits: bool, } impl SYSCTL_PPGPIO_P14R { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _SYSCTL_PPGPIO_P14W<'a> { w: &'a mut W, } impl<'a> _SYSCTL_PPGPIO_P14W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 14); self.w.bits |= ((value as u32) & 1) << 14; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - GPIO Port A Present"] #[inline(always)] pub fn sysctl_ppgpio_p0(&self) -> SYSCTL_PPGPIO_P0R { let bits = ((self.bits >> 0) & 1) != 0; SYSCTL_PPGPIO_P0R { bits } } #[doc = "Bit 1 - GPIO Port B Present"] #[inline(always)] pub fn sysctl_ppgpio_p1(&self) -> SYSCTL_PPGPIO_P1R { let bits = ((self.bits >> 1) & 1) != 0; SYSCTL_PPGPIO_P1R { bits } } #[doc = "Bit 2 - GPIO Port C Present"] #[inline(always)] pub fn sysctl_ppgpio_p2(&self) -> SYSCTL_PPGPIO_P2R { let bits = ((self.bits >> 2) & 1) != 0; SYSCTL_PPGPIO_P2R { bits } } #[doc = "Bit 3 - GPIO Port D Present"] #[inline(always)] pub fn sysctl_ppgpio_p3(&self) -> SYSCTL_PPGPIO_P3R { let bits = ((self.bits >> 3) & 1) != 0; SYSCTL_PPGPIO_P3R { bits } } #[doc = "Bit 4 - GPIO Port E Present"] #[inline(always)] pub fn sysctl_ppgpio_p4(&self) -> SYSCTL_PPGPIO_P4R { let bits = ((self.bits >> 4) & 1) != 0; SYSCTL_PPGPIO_P4R { bits } } #[doc = "Bit 5 - GPIO Port F Present"] #[inline(always)] pub fn sysctl_ppgpio_p5(&self) -> SYSCTL_PPGPIO_P5R { let bits = ((self.bits >> 5) & 1) != 0; SYSCTL_PPGPIO_P5R { bits } } #[doc = "Bit 6 - GPIO Port G Present"] #[inline(always)] pub fn sysctl_ppgpio_p6(&self) -> SYSCTL_PPGPIO_P6R { let bits = ((self.bits >> 6) & 1) != 0; SYSCTL_PPGPIO_P6R { bits } } #[doc = "Bit 7 - GPIO Port H Present"] #[inline(always)] pub fn sysctl_ppgpio_p7(&self) -> SYSCTL_PPGPIO_P7R { let bits = ((self.bits >> 7) & 1) != 0; SYSCTL_PPGPIO_P7R { bits } } #[doc = "Bit 8 - GPIO Port J Present"] #[inline(always)] pub fn sysctl_ppgpio_p8(&self) -> SYSCTL_PPGPIO_P8R { let bits = ((self.bits >> 8) & 1) != 0; SYSCTL_PPGPIO_P8R { bits } } #[doc = "Bit 9 - GPIO Port K Present"] #[inline(always)] pub fn sysctl_ppgpio_p9(&self) -> SYSCTL_PPGPIO_P9R { let bits = ((self.bits >> 9) & 1) != 0; SYSCTL_PPGPIO_P9R { bits } } #[doc = "Bit 10 - GPIO Port L Present"] #[inline(always)] pub fn sysctl_ppgpio_p10(&self) -> SYSCTL_PPGPIO_P10R { let bits = ((self.bits >> 10) & 1) != 0; SYSCTL_PPGPIO_P10R { bits } } #[doc = "Bit 11 - GPIO Port M Present"] #[inline(always)] pub fn sysctl_ppgpio_p11(&self) -> SYSCTL_PPGPIO_P11R { let bits = ((self.bits >> 11) & 1) != 0; SYSCTL_PPGPIO_P11R { bits } } #[doc = "Bit 12 - GPIO Port N Present"] #[inline(always)] pub fn sysctl_ppgpio_p12(&self) -> SYSCTL_PPGPIO_P12R { let bits = ((self.bits >> 12) & 1) != 0; SYSCTL_PPGPIO_P12R { bits } } #[doc = "Bit 13 - GPIO Port P Present"] #[inline(always)] pub fn sysctl_ppgpio_p13(&self) -> SYSCTL_PPGPIO_P13R { let bits = ((self.bits >> 13) & 1) != 0; SYSCTL_PPGPIO_P13R { bits } } #[doc = "Bit 14 - GPIO Port Q Present"] #[inline(always)] pub fn sysctl_ppgpio_p14(&self) -> SYSCTL_PPGPIO_P14R { let bits = ((self.bits >> 14) & 1) != 0; SYSCTL_PPGPIO_P14R { bits } } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - GPIO Port A Present"] #[inline(always)] pub fn sysctl_ppgpio_p0(&mut self) -> _SYSCTL_PPGPIO_P0W { _SYSCTL_PPGPIO_P0W { w: self } } #[doc = "Bit 1 - GPIO Port B Present"] #[inline(always)] pub fn sysctl_ppgpio_p1(&mut self) -> _SYSCTL_PPGPIO_P1W { _SYSCTL_PPGPIO_P1W { w: self } } #[doc = "Bit 2 - GPIO Port C Present"] #[inline(always)] pub fn sysctl_ppgpio_p2(&mut self) -> _SYSCTL_PPGPIO_P2W { _SYSCTL_PPGPIO_P2W { w: self } } #[doc = "Bit 3 - GPIO Port D Present"] #[inline(always)] pub fn sysctl_ppgpio_p3(&mut self) -> _SYSCTL_PPGPIO_P3W { _SYSCTL_PPGPIO_P3W { w: self } } #[doc = "Bit 4 - GPIO Port E Present"] #[inline(always)] pub fn sysctl_ppgpio_p4(&mut self) -> _SYSCTL_PPGPIO_P4W { _SYSCTL_PPGPIO_P4W { w: self } } #[doc = "Bit 5 - GPIO Port F Present"] #[inline(always)] pub fn sysctl_ppgpio_p5(&mut self) -> _SYSCTL_PPGPIO_P5W { _SYSCTL_PPGPIO_P5W { w: self } } #[doc = "Bit 6 - GPIO Port G Present"] #[inline(always)] pub fn sysctl_ppgpio_p6(&mut self) -> _SYSCTL_PPGPIO_P6W { _SYSCTL_PPGPIO_P6W { w: self } } #[doc = "Bit 7 - GPIO Port H Present"] #[inline(always)] pub fn sysctl_ppgpio_p7(&mut self) -> _SYSCTL_PPGPIO_P7W { _SYSCTL_PPGPIO_P7W { w: self } } #[doc = "Bit 8 - GPIO Port J Present"] #[inline(always)] pub fn sysctl_ppgpio_p8(&mut self) -> _SYSCTL_PPGPIO_P8W { _SYSCTL_PPGPIO_P8W { w: self } } #[doc = "Bit 9 - GPIO Port K Present"] #[inline(always)] pub fn sysctl_ppgpio_p9(&mut self) -> _SYSCTL_PPGPIO_P9W { _SYSCTL_PPGPIO_P9W { w: self } } #[doc = "Bit 10 - GPIO Port L Present"] #[inline(always)] pub fn sysctl_ppgpio_p10(&mut self) -> _SYSCTL_PPGPIO_P10W { _SYSCTL_PPGPIO_P10W { w: self } } #[doc = "Bit 11 - GPIO Port M Present"] #[inline(always)] pub fn sysctl_ppgpio_p11(&mut self) -> _SYSCTL_PPGPIO_P11W { _SYSCTL_PPGPIO_P11W { w: self } } #[doc = "Bit 12 - GPIO Port N Present"] #[inline(always)] pub fn sysctl_ppgpio_p12(&mut self) -> _SYSCTL_PPGPIO_P12W { _SYSCTL_PPGPIO_P12W { w: self } } #[doc = "Bit 13 - GPIO Port P Present"] #[inline(always)] pub fn sysctl_ppgpio_p13(&mut self) -> _SYSCTL_PPGPIO_P13W { _SYSCTL_PPGPIO_P13W { w: self } } #[doc = "Bit 14 - GPIO Port Q Present"] #[inline(always)] pub fn sysctl_ppgpio_p14(&mut self) -> _SYSCTL_PPGPIO_P14W { _SYSCTL_PPGPIO_P14W { w: self } } }
use rand::Rng; pub fn random_double() -> f64 { let mut rng = rand::thread_rng(); rng.gen() } pub fn random_double_range(min: f64, max: f64) -> f64 { let mut rng = rand::thread_rng(); rng.gen_range(min, max) } pub fn clamp(x: f64, min: f64, max: f64) -> f64 { if x < min { min } else if x > max { max } else { x } }
use similar::TextDiff; fn main() { let diff = TextDiff::from_lines( "Hello World\nThis is the second line.\nThis is the third.", "Hallo Welt\nThis is the second line.\nThis is life.\nMoar and more", ); let all_changes = diff .ops() .iter() .flat_map(|op| diff.iter_changes(op)) .collect::<Vec<_>>(); println!("{}", serde_json::to_string_pretty(&all_changes).unwrap()); }
pub mod apod; pub mod common; pub mod crew; pub mod event; pub mod expedition; pub mod launch; pub mod launcher; pub mod manufacturer; pub mod mission; pub mod rocket; pub mod spacecraft; pub mod traits; pub mod url;
use criterion::{ criterion_group, criterion_main, measurement::Measurement, BatchSize, Bencher, BenchmarkId, Criterion, }; use criterion_cycles_per_byte::CyclesPerByte; use utils::{compile, run, setup, setup_and_compile}; mod utils; fn benchmark_compiler<M: Measurement>(c: &mut Criterion<M>, prefix: &str) { let mut group = c.benchmark_group(format!("{prefix}: Compiler")); group.sample_size(100); group.bench_function("hello_world", |b| { b.compile(r#"main _ := "Hello, world!""#); }); group.sample_size(20); let fibonacci_code = create_fibonacci_code(15); group.bench_function("fibonacci", |b| b.compile(&fibonacci_code)); group.finish(); } fn benchmark_vm_runtime<M: Measurement>(c: &mut Criterion<M>, prefix: &str) { let mut group = c.benchmark_group(format!("{prefix}: VM Runtime")); // This is a macro so that we can accept a string or `BenchmarkId`. macro_rules! benchmark { ($id:expr, $source_code:expr, $sample_size:expr $(,)?) => { group.sample_size($sample_size); group.bench_function($id, |b| b.run_vm($source_code)); }; ($id:expr, $parameter:expr, $source_code_factory:expr, $sample_size:expr $(,)?) => { benchmark!( BenchmarkId::new($id, $parameter), &$source_code_factory($parameter), $sample_size, ); }; } benchmark!("hello_world", r#"main _ := "Hello, world!""#, 100); benchmark!("fibonacci", 15, create_fibonacci_code, 20); benchmark!("PLB/binarytrees", 6, create_binary_trees_code, 10); group.finish(); } fn create_fibonacci_code(n: usize) -> String { format!( r#"[ifElse, int] = use "Core" fibRec = {{ fibRec n -> ifElse (n | int.isLessThan 2) {{ n }} {{ fibRec fibRec (n | int.subtract 1) | int.add (fibRec fibRec (n | int.subtract 2)) }} }} fib n = needs (int.is n) fibRec fibRec n main _ := fib {n}"#, ) } /// https://programming-language-benchmarks.vercel.app/problem/binarytrees fn create_binary_trees_code(n: usize) -> String { format!( r#" [channel, equals, if, ifElse, int, iterable, recursive, result, struct, text] = use "Core" createTree n := needs (int.is n) needs (int.isNonNegative n) recursive n {{ recurse n -> ifElse (n | equals 0) {{ [] }} {{ nextSize = n | int.subtract 1 [Left: recurse nextSize, Right: recurse nextSize] }} }} checkTree tree := needs (struct.is tree) recursive tree {{ recurse tree -> left = tree | struct.get Left | result.mapOr {{ it -> recurse it }} 0 right = tree | struct.get Right | result.mapOr {{ it -> recurse it }} 0 1 | int.add left | int.add right }} main _ := n = {n} minDepth = 4 maxDepth = n | int.coerceAtLeast (minDepth | int.add 2) _ = depth = maxDepth | int.add 1 tree = createTree depth longLivedTree = createTree maxDepth recursive minDepth {{ recurse depth -> if (depth | int.isLessThanOrEqualTo maxDepth) {{ iterations = 1 | int.shiftLeft (maxDepth | int.subtract depth | int.add minDepth) check = iterable.generate iterations {{ _ -> createTree depth | checkTree }} | iterable.sum recurse (depth | int.add 2) }} }} "#, ) } trait BencherExtension { fn compile(&mut self, source_code: &str); fn run_vm(&mut self, source_code: &str); } impl<'a, M: Measurement> BencherExtension for Bencher<'a, M> { fn compile(&mut self, source_code: &str) { self.iter_batched( setup, |mut db| compile(&mut db, source_code), BatchSize::SmallInput, ) } fn run_vm(&mut self, source_code: &str) { self.iter_batched( || setup_and_compile(source_code), run, BatchSize::SmallInput, ) } } fn run_benchmarks<M: Measurement>(c: &mut Criterion<M>, prefix: &str) { benchmark_compiler(c, prefix); benchmark_vm_runtime(c, prefix); } fn run_cycle_benchmarks(c: &mut Criterion<CyclesPerByte>) { run_benchmarks(c, "Cycles"); } criterion_group!( name = cycle_benchmarks; config = Criterion::default().with_measurement(CyclesPerByte); targets = run_cycle_benchmarks, ); fn run_time_benchmarks(c: &mut Criterion) { run_benchmarks(c, "Time"); } criterion_group!(time_benchmarks, run_time_benchmarks); // criterion_main!(cycle_benchmarks, time_benchmarks); criterion_main!(time_benchmarks);
/* * Synchronize flashing of a firefly swarm * * Fireflies are stored as a swarm in FireflyWorld. * The whole swarm is a hashmap, mapping an ID to the firefly object. * This allows us to access specific firefly members during the recieve * command. * * The traditional Firefly Algorithm is an optimization metaheuristic which * optimizes over all fireflies, not those just in the sight range, against some * cost or evaluation function. * * We propose using a sight range to do synchronization and movement instead of * a straight optimization. * */ extern crate sekai; use sekai::world::World; use sekai::entity::Entity; use std::collections::HashMap; #[derive(Debug)] struct FireflyWorld { firefly_swarm: Vec<Firefly>, } impl World<Color> for FireflyWorld { // todo: figure out if a firefly can see another firefly fn update(&mut self) { // Iterate through all fireflies in a specifc range, // average color // determine which firefles are near the current iteration // of the firefly and then average the message, then pass the message /* for (id, firefly) in &mut self.firefly_swarm { println!("Updating firefly {}", id); }*/ /* to be implemented for each firefly loop through the rest of the fire flys check all x,y cordinates and calculate the coordinate */ // if a firefly is in sync for a long enough time, add a new firefly // TODO: how to implement: // if fireflies flash at the same time, then they move closer // if they are within a close enough radius, birth new firefly // if a firefly's life is <= 0, remove it // TODO: Execute this in the checking loop } // returns the number of fireflies in the swarm fn num_entities(&self) -> usize { self.firefly_swarm.len() } // calls receive message on every firefly fn receive_message(&mut self, message: Color) { for firefly in &mut self.firefly_swarm { firefly.receive_message(message.clone()); } } } impl FireflyWorld { // birth of new entity fn add_entity(&mut self, firefly: Firefly) { self.firefly_swarm.push(firefly); } // death of some entity fn remove_entity(&mut self, idx: usize) { self.firefly_swarm.swap_remove(idx); } } #[derive(Clone, Debug)] struct Color { red: f32, green: f32, blue: f32, pos: Vec<f32>, } impl Color { fn new(num_dimensions: usize) -> Self { Color{ red: 50_f32, green: 50_f32, blue: 50_f32, pos: Vec::with_capacity(num_dimensions), } } } impl std::ops::Mul<f32> for Color { type Output = Color; fn mul(self, rhs: f32) -> Self { Color { red: self.red * rhs, green: self.green * rhs, blue: self.blue * rhs, pos: self.pos, } } } #[derive(Debug)] struct Firefly { pos: Vec<f32>, color: Color, // RGB flash_cooldown: u8, // initial flash cooldown cur_flash_cooldown: u8, // number of ticks to wait before next flash flash_rate: u8, // the amt by which flash cooldown decreases lifetime: u8, // the number of ticks a firefly lives for sight_range: f32, // how far a firefly can see, radius reproduction_range: f32, // for far a firefly must be to reproduce } impl Firefly { // constructor fn new(num_dimensions: usize) -> Self { Firefly { pos: Vec::with_capacity(num_dimensions), color: Color::new(num_dimensions), flash_cooldown: 100_u8, // TODO: placeholder cur_flash_cooldown: 100_u8, // TODO: placeholder flash_rate: 1_u8, // TODO: placeholder lifetime: 500_u8, // TODO: placeholder sight_range: 50_f32, // TODO: placeholder reproduction_range: 5_f32, // TODO: placeholder } } } /// Fireflies communicate with lights, represented in the /// tuple (RGB) impl Entity<Color> for Firefly { // todo: receive message, send message, fn update(&mut self, world: &World<Color>) { // tick down life self.lifetime -= 1; // tick down flash cooldown self.cur_flash_cooldown -= self.flash_rate; if self.cur_flash_cooldown == 0 { self.cur_flash_cooldown = self.flash_cooldown; } } fn receive_message(&mut self, message: Color) { // If a firefly sees some color, it must by some logic // Placeholder logic for now // for now, alpha will be some scalar adjustment let alpha: f32 = 1e-2; // If all message lights that were received were averaged by the world: // Scale the averaged message by some alpha step size self.color = message * alpha; // If received, reset cur_flash_cooldown self.cur_flash_cooldown = self.flash_cooldown; // how to update flash rate? should this even be parameterized? // TODO: update position based on the message } } fn main() { println!("This is the main function"); } #[test] fn test_world_update() { let mut world = FireflyWorld { firefly_swarm: Vec::new(), }; for _ in 0..10 { world.add_entity(Firefly::new(2)); world.update(); } println!("{:?}", world.firefly_swarm); assert_eq!(world.firefly_swarm.len(), 10); }
// Copyright (c) 2016 The Rouille developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, // at your option. All files in the project carrying such // notice may not be copied, modified, or distributed except // according to those terms. //! Apply content encodings (such as gzip compression) to the response. //! //! This module provides access to the content encodings supported by a request as well as //! a function to automatically apply common content encodings to a response. //! # Basic example //! //! Here is a basic example showing how to use content encodings: //! //! ``` //! use rouille::Request; //! use rouille::Response; //! use rouille::content_encoding; //! //! fn handle_request(request: &Request) -> Response { //! let response = Response::text("Hello world"); //! content_encoding::apply(&request, response) //! } //! ``` use input; use Request; use Response; /// Applies content encoding to the response. /// /// Analyzes the `Accept-Encoding` header of the request. If one of the encodings is recognized and /// supported by rouille, it adds a `Content-Encoding` header to the `Response` and encodes its /// body. /// /// If the response already has a `Content-Encoding` header, this function is a no-op. /// If the response has a `Content-Type` header that isn't textual content, this function is a /// no-op. /// /// The gzip encoding is supported only if you enable the `gzip` feature of rouille (which is /// enabled by default). /// /// # Example /// /// ```rust /// use rouille::content_encoding; /// use rouille::Request; /// use rouille::Response; /// /// fn handle(request: &Request) -> Response { /// content_encoding::apply(request, Response::text("hello world")) /// } /// ``` pub fn apply(request: &Request, mut response: Response) -> Response { // Only text should be encoded. Otherwise just return. if !response_is_text(&response) { return response; } // If any of the response's headers is equal to `Content-Encoding`, ignore the function // call and return immediately. if response .headers .iter() .any(|&(ref key, _)| key.eq_ignore_ascii_case("Content-Encoding")) { return response; } // Now let's get the list of content encodings accepted by the request. // The list should be ordered from the most desired to the least desired. let encoding_preference = ["br", "gzip", "x-gzip", "identity"]; let accept_encoding_header = request.header("Accept-Encoding").unwrap_or(""); if let Some(preferred_index) = input::priority_header_preferred( accept_encoding_header, encoding_preference.iter().cloned(), ) { match encoding_preference[preferred_index] { "br" => brotli(&mut response), "gzip" | "x-gzip" => gzip(&mut response), _ => (), } } response } // Returns true if the Content-Type of the response is a type that should be encoded. // Since encoding is purely an optimization, it's not a problem if the function sometimes has // false positives or false negatives. fn response_is_text(response: &Response) -> bool { response.headers.iter().any(|&(ref key, ref value)| { if !key.eq_ignore_ascii_case("Content-Type") { return false; } let content_type = value.to_lowercase(); content_type.starts_with("text/") || content_type.contains("javascript") || content_type.contains("json") || content_type.contains("xml") || content_type.contains("font") }) } #[cfg(feature = "gzip")] fn gzip(response: &mut Response) { use deflate::deflate_bytes_gzip; use std::io; use std::mem; use ResponseBody; response .headers .push(("Content-Encoding".into(), "gzip".into())); let previous_body = mem::replace(&mut response.data, ResponseBody::empty()); let (mut raw_data, size) = previous_body.into_reader_and_size(); let mut src = match size { Some(size) => Vec::with_capacity(size), None => Vec::new(), }; io::copy(&mut raw_data, &mut src).expect("Failed reading response body while gzipping"); let zipped = deflate_bytes_gzip(&src); response.data = ResponseBody::from_data(zipped); } #[cfg(not(feature = "gzip"))] #[inline] fn gzip(response: &mut Response) {} #[cfg(feature = "brotli")] fn brotli(response: &mut Response) { use brotli::enc::reader::CompressorReader; use std::mem; use ResponseBody; response .headers .push(("Content-Encoding".into(), "br".into())); let previous_body = mem::replace(&mut response.data, ResponseBody::empty()); let (raw_data, _) = previous_body.into_reader_and_size(); // Using default Brotli parameters: 0 buffer_size == 4096, compression level 6, lgwin == 22 response.data = ResponseBody::from_reader(CompressorReader::new(raw_data, 0, 6, 22)); } #[cfg(not(feature = "brotli"))] #[inline] fn brotli(response: &mut Response) {} #[cfg(test)] mod tests { use content_encoding; use Request; use Response; // TODO: more tests for encoding stuff #[test] fn text_response() { assert!(content_encoding::response_is_text(&Response::text(""))); } #[test] fn non_text_response() { assert!(!content_encoding::response_is_text(&Response::from_data( "image/jpeg", "" ))); } #[test] fn no_req_encodings() { let request = Request::fake_http("GET", "/", vec![], vec![]); let response = Response::html("<p>Hello world</p>"); let encoded_response = content_encoding::apply(&request, response); assert!(!encoded_response .headers .iter() .any(|(header_name, _)| header_name == "Content-Encoding")); // No Content-Encoding header let mut encoded_content = vec![]; encoded_response .data .into_reader_and_size() .0 .read_to_end(&mut encoded_content) .unwrap(); assert_eq!( String::from_utf8(encoded_content).unwrap(), "<p>Hello world</p>" ); // No encoding applied } #[test] fn empty_req_encodings() { let request = { let h = vec![("Accept-Encoding".to_owned(), "".to_owned())]; Request::fake_http("GET", "/", h, vec![]) }; let response = Response::html("<p>Hello world</p>"); let encoded_response = content_encoding::apply(&request, response); assert!(!encoded_response .headers .iter() .any(|(header_name, _)| header_name == "Content-Encoding")); // No Content-Encoding header let mut encoded_content = vec![]; encoded_response .data .into_reader_and_size() .0 .read_to_end(&mut encoded_content) .unwrap(); assert_eq!( String::from_utf8(encoded_content).unwrap(), "<p>Hello world</p>" ); // No encoding applied } #[test] fn multi_req_encoding() { let request = { let h = vec![("Accept-Encoding".to_owned(), "foo".to_owned())]; Request::fake_http("GET", "/", h, vec![]) }; let response = Response::html("<p>Hello world</p>"); let encoded_response = content_encoding::apply(&request, response); assert!(!encoded_response .headers .iter() .any(|(header_name, _)| header_name == "Content-Encoding")); // No Content-Encoding header let mut encoded_content = vec![]; encoded_response .data .into_reader_and_size() .0 .read_to_end(&mut encoded_content) .unwrap(); assert_eq!( String::from_utf8(encoded_content).unwrap(), "<p>Hello world</p>" ); // No encoding applied } #[test] fn unknown_req_encoding() { let request = { let h = vec![("Accept-Encoding".to_owned(), "x-gzip, br".to_owned())]; Request::fake_http("GET", "/", h, vec![]) }; let response = Response::html("<p>Hello world</p>"); let encoded_response = content_encoding::apply(&request, response); assert!(encoded_response .headers .contains(&("Content-Encoding".into(), "br".into()))); // Brotli Content-Encoding header } #[test] fn brotli_encoding() { let request = { let h = vec![("Accept-Encoding".to_owned(), "br".to_owned())]; Request::fake_http("GET", "/", h, vec![]) }; let response = Response::html( "<html><head><title>Hello world</title><body><p>Hello world</p></body></html>", ); let encoded_response = content_encoding::apply(&request, response); assert!(encoded_response .headers .contains(&("Content-Encoding".into(), "br".into()))); // Brotli Content-Encoding header let mut encoded_content = vec![]; encoded_response .data .into_reader_and_size() .0 .read_to_end(&mut encoded_content) .unwrap(); assert_eq!( encoded_content, vec![ 27, 75, 0, 0, 4, 28, 114, 164, 129, 5, 210, 206, 25, 30, 90, 114, 224, 114, 73, 109, 45, 196, 23, 126, 240, 144, 77, 40, 26, 211, 228, 67, 73, 40, 236, 55, 101, 254, 127, 147, 194, 129, 132, 65, 130, 120, 152, 249, 68, 56, 93, 2 ] ); // Applied proper Brotli encoding } #[test] fn gzip_encoding() { let request = { let h = vec![("Accept-Encoding".to_owned(), "gzip".to_owned())]; Request::fake_http("GET", "/", h, vec![]) }; let response = Response::html( "<html><head><title>Hello world</title><body><p>Hello world</p></body></html>", ); let encoded_response = content_encoding::apply(&request, response); assert!(encoded_response .headers .contains(&("Content-Encoding".into(), "gzip".into()))); // gzip Content-Encoding header let mut encoded_content = vec![]; encoded_response .data .into_reader_and_size() .0 .read_to_end(&mut encoded_content) .unwrap(); // The 10-byte Gzip header contains an OS ID and a 4 byte timestamp // which are not stable, so we skip them in this comparison. Doing a // literal compare here is slightly silly, but the `deflate` crate has // no public decompressor functions for us to test a round-trip assert_eq!( encoded_content[10..], vec![ 179, 201, 40, 201, 205, 177, 179, 201, 72, 77, 76, 177, 179, 41, 201, 44, 201, 73, 181, 243, 72, 205, 201, 201, 87, 40, 207, 47, 202, 73, 177, 209, 135, 8, 217, 36, 229, 167, 84, 218, 217, 20, 160, 202, 21, 216, 217, 232, 67, 36, 244, 193, 166, 0, 0, 202, 239, 44, 120, 76, 0, 0, 0 ] ); // Applied proper gzip encoding } }
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. /// A gateway associated with an IPsec public key. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum Gateway<'message> { /// As an Internet Protocol version 6 address. InternetProtocolVersion4(Ipv4Addr), /// As an Internet Protocol version 6 address. InternetProtocolVersion6(Ipv6Addr), /// As a domain name. DomainName(WithoutCompressionParsedName<'message>) }
use std::sync::Mutex; use sourcerenderer_core::input::Key; use sourcerenderer_core::platform::Event; use sourcerenderer_core::{ Platform, Vec2, Vec2I, }; use crate::bitset_core::BitSet; pub struct Input { state: Mutex<InputState>, } impl Input { pub fn new() -> Self { let input_state = InputState { lock_mouse: true, ..Default::default() }; Self { state: Mutex::new(input_state), } } pub fn process_input_event<P: Platform>(&self, event: Event<P>) { let mut input_guard = self.state.lock().unwrap(); match event { Event::KeyDown(key) => { input_guard.keyboard_keys.bit_set(key as usize); } Event::KeyUp(key) => { input_guard.keyboard_keys.bit_reset(key as usize); } Event::MouseMoved(position) => { input_guard.mouse_pos = position; } Event::FingerDown(finger_index) => { input_guard.fingers_down |= 1 << finger_index; } Event::FingerUp(finger_index) => { input_guard.fingers_down &= !(1 << finger_index); } Event::FingerMoved { index, position } => { if (index as usize) < input_guard.finger_pos.len() { input_guard.finger_pos[index as usize] = position; } } _ => unreachable!(), } } pub fn poll(&self) -> InputState { self.state.lock().unwrap().clone() } } #[derive(Clone, Default)] pub struct InputState { keyboard_keys: [u32; 4], mouse_pos: Vec2I, mouse_buttons: u32, fingers_down: u32, finger_pos: [Vec2; 6], lock_mouse: bool, } impl InputState { pub fn new() -> Self { Self::default() } /*pub fn set_mouse_lock(&mut self, is_locked: bool) { self.lock_mouse = is_locked; }*/ pub fn mouse_locked(&self) -> bool { self.lock_mouse } /*pub fn set_key_down(&mut self, key: Key, is_down: bool) { if is_down { self.keyboard_keys.bit_set(key as usize); } else { self.keyboard_keys.bit_reset(key as usize); } } pub fn set_finger_down(&mut self, finger_index: u32, is_down: bool) { if is_down { self.fingers_down.bit_set(finger_index as usize); } else { self.fingers_down.bit_reset(finger_index as usize); } } pub fn set_mouse_button_down(&mut self, mouse_button: u32, is_down: bool) { if is_down { self.mouse_buttons.bit_set(mouse_button as usize); } else { self.mouse_buttons.bit_reset(mouse_button as usize); } } pub fn set_mouse_pos(&mut self, position: Vec2I) { self.mouse_pos = position; } pub fn set_finger_position(&mut self, finger_index: u32, position: Vec2) { self.finger_pos[finger_index as usize] = position; }*/ pub fn is_key_down(&self, key: Key) -> bool { self.keyboard_keys.bit_test(key as usize) } pub fn is_finger_down(&self, finger_index: u32) -> bool { self.fingers_down.bit_test(finger_index as usize) } pub fn is_mouse_down(&self, mouse_button: u32) -> bool { self.mouse_buttons.bit_test(mouse_button as usize) } pub fn mouse_position(&self) -> Vec2I { self.mouse_pos } pub fn finger_position(&self, finger_index: u32) -> Vec2 { self.finger_pos[finger_index as usize] } }
#[doc(hidden)] #[macro_export] macro_rules! named_array { ($name:ident, $len:expr) => { #[derive(Clone)] pub struct $name(pub [u8; $len / 8]); #[allow(dead_code)] impl $name { pub const BYTES: usize = $len / 8; pub const BITS: usize = $len; /// Create from a secure random source. pub fn random() -> Self { use rand::Rng as _; let mut ret = [0u8; Self::BYTES]; rand::thread_rng().fill(&mut ret); Self(ret) } /// Create from a slice of exactly the right length, without additional processing. pub fn from_slice(data: &[u8]) -> Self { let mut ret = [0u8; Self::BYTES]; ret.copy_from_slice(data); Self(ret) } } impl Drop for $name { fn drop(&mut self) { use zeroize::Zeroize; (&mut self.0[..]).zeroize() } } impl PartialEq for $name { fn eq(&self, other: &Self) -> bool { &self.0[..] == &other.0[..] } } impl std::fmt::Debug for $name { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}({:?}...)", stringify!($name), &self.0[..8]) } } }; }
mod utils; use wasm_bindgen::prelude::*; use serde::{Serialize,Deserialize}; // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global // allocator. #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] extern { fn alert(s: &str); #[wasm_bindgen(js_namespace = console)] pub fn log(s: &str); } #[wasm_bindgen] pub fn greet() { log("Hello, wasm!"); alert("Hello, wasm!"); } #[wasm_bindgen] pub fn get_version() -> Duck { Duck { index: 4 } } #[wasm_bindgen] pub fn get_dog() -> JsValue { let dog = Dog { index: 10 }; JsValue::from_serde(&dog).unwrap() } // #[wasm_bindgen] // pub fn set_dom_inner(dom: HtmlElement) { // dom.set_inner_html("This is from Rust"); // } #[derive(Serialize, Deserialize)] pub struct Dog { index: i32 } pub struct Duck { index: i32 } impl wasm_bindgen::describe::WasmDescribe for Duck { fn describe() { u32::describe() } } impl wasm_bindgen::convert::IntoWasmAbi for Duck { type Abi = u32; fn into_abi(self) -> u32 { self.index as u32 } }
use std; import std::vec; fn main() { let v: vec[mutable int] = [mutable 10, 20]; assert (v.(0) == 10); assert (v.(1) == 20); vec::reverse(v); assert (v.(0) == 20); assert (v.(1) == 10); let v2 = vec::reversed[int]([10, 20]); assert (v2.(0) == 20); assert (v2.(1) == 10); v.(0) = 30; assert (v2.(0) == 20); // Make sure they work with 0-length vectors too. let v4 = vec::reversed[int]([]); let v3: vec[mutable int] = vec::empty_mut(); vec::reverse[int](v3); }
use proconio::{fastout, input}; use std::collections::{BTreeSet, HashMap}; #[fastout] fn main() { input! { h: usize, w: usize, r_s: usize, c_s: usize, n: usize, walls: [(usize, usize); n], q: usize, }; let mut rows = HashMap::<usize, BTreeSet<usize>>::new(); let mut cols = HashMap::<usize, BTreeSet<usize>>::new(); for (r, c) in walls { rows.entry(r).or_default().insert(c); cols.entry(c).or_default().insert(r); } let mut ans = Vec::new(); let (mut r, mut c) = (r_s, c_s); for _ in 0..q { input! { d: char, len: usize, }; match d { 'L' => { if let Some(row) = rows.get(&r) { if let Some(left) = row.range(..c).last() { // c - len c = (left + 1).max(c.saturating_sub(len)); } else { c = 1.max(c.saturating_sub(len)); } } else { c = 1.max(c.saturating_sub(len)); } } 'R' => { if let Some(row) = rows.get(&r) { if let Some(right) = row.range((c + 1)..).next() { // c + len c = (right - 1).min(c + len); } else { c = w.min(c + len); } } else { c = w.min(c + len); } } 'U' => { if let Some(col) = cols.get(&c) { if let Some(up) = col.range(..r).last() { // r - len r = (up + 1).max(r.saturating_sub(len)); } else { r = 1.max(r.saturating_sub(len)); } } else { r = 1.max(r.saturating_sub(len)); } } 'D' => { if let Some(col) = cols.get(&c) { if let Some(down) = col.range((r + 1)..).next() { // r + len r = (down - 1).min(r + len); } else { r = h.min(r + len); } } else { r = h.min(r + len); } } _ => { unreachable!(); } } ans.push((r, c)); } for (r, c) in ans { println!("{} {}", r, c); } }
use noria::SyncControllerHandle; use std::collections::BTreeMap; use std::thread; use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; fn main() { // inline recipe definition let sql1 = "Article: CREATE TABLE Article (aid int, title varchar(255), \ url text, PRIMARY KEY(aid));"; // two internal views: you cannot query these directly from clients, but you can write // other view definitions that use them. let sql2 = "Vote: CREATE TABLE Vote (aid int, uid int);"; let sql3 = "VoteCount: SELECT Vote.aid, COUNT(uid) AS votes \ FROM Vote GROUP BY Vote.aid;"; // externally queryable materialized view let sql4 = "QUERY ArticleWithVoteCount: \ SELECT Article.aid, title, url, VoteCount.votes AS votes \ FROM Article, VoteCount \ WHERE Article.aid = VoteCount.aid AND Article.aid = ?;"; // set up Noria via recipe let rt = tokio::runtime::Runtime::new().unwrap(); let executor = rt.executor(); let mut db = SyncControllerHandle::from_zk("127.0.0.1:2181/basicdist", executor).unwrap(); db.extend_recipe(sql1).unwrap(); db.extend_recipe(sql2).unwrap(); db.extend_recipe(sql3).unwrap(); db.extend_recipe(sql4).unwrap(); println!("{}", db.graphviz().unwrap()); let executor = rt.executor(); let get_view = move |b: &mut SyncControllerHandle<_, _>, n| loop { match b.view(n) { Ok(v) => return v.into_sync(), Err(_) => { thread::sleep(Duration::from_millis(50)); *b = SyncControllerHandle::from_zk("127.0.0.1:2181/basicdist", executor.clone()) .unwrap(); } } }; let executor = rt.executor(); let get_table = move |b: &mut SyncControllerHandle<_, _>, n| loop { match b.table(n) { Ok(v) => return v.into_sync(), Err(_) => { thread::sleep(Duration::from_millis(50)); *b = SyncControllerHandle::from_zk("127.0.0.1:2181/basicdist", executor.clone()) .unwrap(); } } }; // Get mutators and getter. let mut vote = get_table(&mut db, "Vote"); let mut article = get_table(&mut db, "Article"); let mut awvc = get_view(&mut db, "ArticleWithVoteCount"); println!("Creating article..."); let aid = 1; // Make sure the article exists: if awvc.lookup(&[aid.into()], true).unwrap().is_empty() { println!("Creating new article..."); let title = "test title"; let url = "http://pdos.csail.mit.edu"; article .insert(vec![aid.into(), title.into(), url.into()]) .unwrap(); } let start = Instant::now(); let mut last_print = Instant::now(); let mut times = Vec::new(); loop { let elapsed = last_print.elapsed(); if elapsed >= Duration::from_secs(5) { last_print += elapsed; let mut counts = BTreeMap::new(); for t in &times { *counts.entry(t).or_insert(0) += 1; } for (k, c) in counts { println!("{}: {}", k, c); } println!("---------") } let uid = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs() as i64; while let Err(_) = vote.insert(vec![aid.into(), uid.into()]) { vote = get_table(&mut db, "Vote"); } times.push(start.elapsed().as_secs()); // thread::sleep(Duration::from_millis(1000)); // while let Err(_) = awvc.lookup(&[1.into()], false) { // awvc = get_view(&mut db, "ArticleWithVoteCount"); // } // println!(" Done"); } }
//! Эпюры напряжений, силы, моменты use crate::sig::HasWrite; use nom::{ bytes::complete::take, number::complete::{le_f32, le_i16, le_u8}, IResult, }; use std::fmt; #[derive(Debug)] pub struct Diagram { load_time: u8, //Длительность загружения. 0=постоянное, 1=длительное, 2=кратковременное, 200=ветер1, 201=ветер2 force_type: u8, //Тип нагрузки. 1=сосредоточенная, 4=погонная, 5=момент force_val_1: f32, //Значение нагрузки в первой точке force_pos_1: f32, //Положение первого значения нагрузки по линии от 0 до 1 force_val_2: f32, //Значение нагрузки во второй точке force_pos_2: f32, //Положение второго значения нагрузки по линии от 0 до 1 diagram_next: i16, //Номер следующего фрагмента эпюры. -1=этот узел последний diagram_prev: i16, //Номер предыдущего фрагмента эпюры. -1=этот узел первый force_direction: u8, //Направление приложения силы. 0=вертикально, 1=момент, 2=горизонтально cons_1: i16, //Всегда -1 //10b WS ws: Vec<u8>, //10b } impl HasWrite for Diagram { fn write(&self) -> Vec<u8> { let mut out: Vec<u8> = vec![]; out.extend(&self.load_time.to_le_bytes()); out.extend(&self.force_type.to_le_bytes()); out.extend(&self.force_val_1.to_le_bytes()); out.extend(&self.force_pos_1.to_le_bytes()); out.extend(&self.force_val_2.to_le_bytes()); out.extend(&self.force_pos_2.to_le_bytes()); out.extend(&self.diagram_next.to_le_bytes()); out.extend(&self.diagram_prev.to_le_bytes()); out.extend(&self.force_direction.to_le_bytes()); out.extend(&self.cons_1.to_le_bytes()); out.extend(&self.ws[0..10]); out } fn name(&self) -> &str { "" } } impl fmt::Display for Diagram { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "time {}, type {}, dir: {}, val: |{} - {}|, pos: |{} - {}|, |{} <-> {}|", &self.load_time, &self.force_type, &self.force_direction, &self.force_val_1, &self.force_val_2, &self.force_pos_1, &self.force_pos_2, &self.diagram_prev, &self.diagram_next )?; write!(f, "") } } pub fn read_diagram(i: &[u8]) -> IResult<&[u8], Diagram> { let (i, load_time) = le_u8(i)?; let (i, force_type) = le_u8(i)?; let (i, force_val_1) = le_f32(i)?; let (i, force_pos_1) = le_f32(i)?; let (i, force_val_2) = le_f32(i)?; let (i, force_pos_2) = le_f32(i)?; let (i, diagram_next) = le_i16(i)?; let (i, diagram_prev) = le_i16(i)?; let (i, force_direction) = le_u8(i)?; let (i, cons_1) = le_i16(i)?; let (i, ws) = take(10u8)(i)?; //10b WS let ws = ws.to_vec(); Ok(( i, Diagram { load_time, force_type, force_val_1, force_pos_1, force_val_2, force_pos_2, diagram_next, diagram_prev, force_direction, cons_1, ws, }, )) } #[cfg(test)] fn test_diagram(path_str: &str) { use crate::tests::rab_e_sig_test::read_test_sig; let original_in = read_test_sig(path_str); let (_, diagram) = read_diagram(&original_in).expect("couldn't read_diagram"); assert_eq!(original_in, diagram.write()); } #[test] fn diagram_test() { test_diagram("test_sig/diagrams/diagram.test"); } #[test] fn diagram_wind_test() { test_diagram("test_sig/diagrams/diagram_wind.test"); } #[test] fn diagram_wind2_test() { test_diagram("test_sig/diagrams/diagram_wind2.test"); } #[test] fn s_diagram_full_value_test() { use crate::tests::rab_e_sig_test::read_test_sig; let original_in = read_test_sig("test_sig/diagrams/S_diagram.test"); let (_, diagram) = read_diagram(&original_in).expect("couldn't read_diagram"); let mut ws = vec![]; for i in 1..=10 { ws.push(i); } let c_diagram = Diagram { load_time: 200u8, force_type: 5u8, force_direction: 1u8, force_val_1: 0.831_617_5, force_val_2: 0.0f32, force_pos_1: 0.5f32, force_pos_2: 0.0f32, diagram_prev: 3i16, diagram_next: 13i16, cons_1: -1i16, ws, }; assert_eq!(diagram.write(), c_diagram.write()) }
use byteorder::NetworkEndian; use crate::io::BufMut; use crate::postgres::protocol::Write; #[derive(Debug)] pub struct SslRequest; impl Write for SslRequest { fn write(&self, buf: &mut Vec<u8>) { // packet length: 8 bytes including self buf.put_u32::<NetworkEndian>(8); // 1234 in high 16 bits, 5679 in low 16 buf.put_u32::<NetworkEndian>((1234 << 16) | 5679); } } #[test] fn test_ssl_request() { let mut buf = Vec::new(); SslRequest.write(&mut buf); assert_eq!(&buf, b"\x00\x00\x00\x08\x04\xd2\x16/"); }
pub mod py; pub mod xml;
#![windows_subsystem = "windows"] #![allow(unused_imports, unreachable_code, unused_variables, dead_code)] use crate::ui::Ui; use std::cell::RefCell; mod controller; mod model; mod model_config; mod model_gamemode; #[path = "ui_apiw.rs"] mod ui; mod view; mod view_assets; type GameMVC = domino::mvc::MVCSystem<model::Model, view::View, controller::Controller>; struct Game { pub mvc: GameMVC, } impl Game { fn new() -> Self { let model = model::Model::new(); let view = view::View::new(&model); let controller = controller::Controller::new(&model); let mvc = GameMVC::new(model, view, controller); Game { mvc } } } thread_local! { static THE_GAME: RefCell<Game> = RefCell::new(Game::new()); } fn main() -> apiw::Result<()> { use crate::view::View; env_logger::init(); Ui::initialization()?; Ui::run_event_loop()?; return Ok(()); }
#[macro_use] extern crate cql; macro_rules! assert_response( ($resp:expr) => ( if match $resp.opcode { cql::OpcodeResponse::OpcodeError => true, _ => false } { panic!("Test failed at assertion: {}", match $resp.body { cql::CqlResponseBody::ResponseError(_, message) => message, _ => Cow::Borrowed("Ooops!")}); } ); ); macro_rules! try_test( ($call: expr, $msg: expr) => { match $call { Ok(val) => val, Err(ref err) => panic!("Test failed at library call: {}", err.description()) }; } ); mod test_cql;
pub type IMILBitmapEffect = *mut ::core::ffi::c_void; pub type IMILBitmapEffectConnections = *mut ::core::ffi::c_void; pub type IMILBitmapEffectConnectionsInfo = *mut ::core::ffi::c_void; pub type IMILBitmapEffectConnector = *mut ::core::ffi::c_void; pub type IMILBitmapEffectConnectorInfo = *mut ::core::ffi::c_void; pub type IMILBitmapEffectEvents = *mut ::core::ffi::c_void; pub type IMILBitmapEffectFactory = *mut ::core::ffi::c_void; pub type IMILBitmapEffectGroup = *mut ::core::ffi::c_void; pub type IMILBitmapEffectGroupImpl = *mut ::core::ffi::c_void; pub type IMILBitmapEffectImpl = *mut ::core::ffi::c_void; pub type IMILBitmapEffectInputConnector = *mut ::core::ffi::c_void; pub type IMILBitmapEffectInteriorInputConnector = *mut ::core::ffi::c_void; pub type IMILBitmapEffectInteriorOutputConnector = *mut ::core::ffi::c_void; pub type IMILBitmapEffectOutputConnector = *mut ::core::ffi::c_void; pub type IMILBitmapEffectOutputConnectorImpl = *mut ::core::ffi::c_void; pub type IMILBitmapEffectPrimitive = *mut ::core::ffi::c_void; pub type IMILBitmapEffectPrimitiveImpl = *mut ::core::ffi::c_void; pub type IMILBitmapEffectRenderContext = *mut ::core::ffi::c_void; pub type IMILBitmapEffectRenderContextImpl = *mut ::core::ffi::c_void; pub type IMILBitmapEffects = *mut ::core::ffi::c_void; #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub const CLSID_MILBitmapEffectBevel: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xfd361dbe_6c9b_4de0_8290_f6400c2737ed); #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub const CLSID_MILBitmapEffectBlur: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xa924df87_225d_4373_8f5b_b90ec85ae3de); #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub const CLSID_MILBitmapEffectDropShadow: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0x459a3fbe_d8ac_4692_874b_7a265715aa16); #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub const CLSID_MILBitmapEffectEmboss: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xcd299846_824f_47ec_a007_12aa767f2816); #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub const CLSID_MILBitmapEffectGroup: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xac9c1a9a_7e18_4f64_ac7e_47cf7f051e95); #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub const CLSID_MILBitmapEffectOuterGlow: ::windows_sys::core::GUID = ::windows_sys::core::GUID::from_u128(0xe2161bdd_7eb6_4725_9c0b_8a2a1b4f0667); #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub const MILBITMAPEFFECT_SDK_VERSION: u32 = 16777216u32; #[repr(C)] #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub struct MILMatrixF { pub _11: f64, pub _12: f64, pub _13: f64, pub _14: f64, pub _21: f64, pub _22: f64, pub _23: f64, pub _24: f64, pub _31: f64, pub _32: f64, pub _33: f64, pub _34: f64, pub _41: f64, pub _42: f64, pub _43: f64, pub _44: f64, } impl ::core::marker::Copy for MILMatrixF {} impl ::core::clone::Clone for MILMatrixF { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub struct MilPoint2D { pub X: f64, pub Y: f64, } impl ::core::marker::Copy for MilPoint2D {} impl ::core::clone::Clone for MilPoint2D { fn clone(&self) -> Self { *self } } #[repr(C)] #[doc = "*Required features: `\"Win32_UI_Wpf\"`*"] pub struct MilRectD { pub left: f64, pub top: f64, pub right: f64, pub bottom: f64, } impl ::core::marker::Copy for MilRectD {} impl ::core::clone::Clone for MilRectD { fn clone(&self) -> Self { *self } }
extern crate std; extern crate libc; use libkvm::mem::MemorySlot; use ::utils::memory; pub struct MmapMemorySlot { memory_size: usize, guest_address: u64, host_address: *mut libc::c_void, slot: u32, flags: u32, } impl MmapMemorySlot { pub fn new(memory_size: usize, guest_address: u64, slot: u32, flags: u32) -> MmapMemorySlot { let host_address = memory::vm_memory_alloc(memory_size); MmapMemorySlot { memory_size: memory_size, guest_address: guest_address, host_address, slot, flags, } } pub fn as_slice_mut(&mut self) -> &mut [u8] { unsafe { std::slice::from_raw_parts_mut( self.host_address as *mut u8, self.memory_size) } } } impl MemorySlot for MmapMemorySlot { fn slot_id(&self) -> u32 { self.slot } fn flags(&self) -> u32 { self.flags } fn memory_size(&self) -> usize { self.memory_size } fn guest_address(&self) -> u64 { self.guest_address } fn host_address(&self) -> u64 { self.host_address as u64 } } impl Drop for MmapMemorySlot { fn drop(&mut self) { memory::anon_ram_free(self.host_address, self.memory_size); } }
use cstree::build::GreenNodeBuilder; use cstree::{green::GreenNode, interning::Resolver}; use crate::lexer::{Lexer, Token, TokenKind}; #[derive(cstree::Syntax, Debug, Clone, Copy, PartialEq, Eq, Hash)] #[repr(u32)] pub enum SyntaxKind { Whitespace, #[static_text("\n")] Newline, LineComment, BlockComment, String, QuotedString, #[static_text("{")] LeftBrace, #[static_text("}")] RightBrace, Error, Root, Arenas, Arena, KeyValuePair, Key, Value, } impl ::core::convert::From<TokenKind> for SyntaxKind { fn from(kind: TokenKind) -> Self { match kind { TokenKind::Whitespace => Self::Whitespace, TokenKind::Newline => Self::Newline, TokenKind::LineComment => Self::LineComment, TokenKind::BlockComment => Self::BlockComment, TokenKind::String => Self::String, TokenKind::QuotedString => Self::QuotedString, TokenKind::LeftBrace => Self::LeftBrace, TokenKind::RightBrace => Self::RightBrace, TokenKind::Error => Self::Error, } } } pub type ArenasInfoSyntax = SyntaxKind; #[derive(Debug)] pub struct ParseResult<I> { green_node: GreenNode, resolver: I, errors: Vec<String>, } pub fn parse(text: &str) -> ParseResult<impl Resolver> { let parser = Parser::new(text); parser.parse() } #[derive(Debug)] struct Parser<'input> { lexer: Lexer<'input>, builder: GreenNodeBuilder<'static, 'static, ArenasInfoSyntax>, errors: Vec<String>, } impl<'input> Parser<'input> { fn new(text: &'input str) -> Self { Self { lexer: Lexer::new(text), builder: GreenNodeBuilder::new(), errors: Vec::new(), } } fn bump(&mut self) -> Option<Token> { let token = self.lexer.next(); if let Some(token) = token { match token.kind() { TokenKind::Whitespace => self.token(token), TokenKind::Newline => self.static_token(SyntaxKind::Newline), TokenKind::LineComment => self.token(token), TokenKind::BlockComment => self.token(token), TokenKind::String => self.token(token), TokenKind::QuotedString => self.token(token), TokenKind::LeftBrace => self.static_token(SyntaxKind::LeftBrace), TokenKind::RightBrace => self.static_token(SyntaxKind::RightBrace), TokenKind::Error => self.token(token), } } token } fn token(&mut self, token: Token) { self.builder.token(token.kind().into(), token.text()); } fn static_token(&mut self, syntax_kind: SyntaxKind) { self.builder.static_token(syntax_kind); } fn parse(mut self) -> ParseResult<impl Resolver> { self.builder.start_node(SyntaxKind::Root); //self.builder.start_node(SyntaxKind::Error); while let Some(_token) = self.bump() {} //self.builder.finish_node(); self.builder.finish_node(); let (tree, cache) = self.builder.finish(); ParseResult { green_node: tree, resolver: cache.unwrap().into_interner().unwrap(), errors: self.errors, } } } pub type SyntaxNode = cstree::syntax::SyntaxNode<ArenasInfoSyntax>; pub type SyntaxToken = cstree::syntax::SyntaxToken<ArenasInfoSyntax>; pub type SyntaxElement = cstree::syntax::SyntaxElement<ArenasInfoSyntax>; #[cfg(test)] mod tests { use super::*; //#[test] fn test_parse() { let parse = parse(" \t\n//foo\n/*bar*/{hurz\"hurz\"_"); let root = SyntaxNode::new_root_with_resolver(parse.green_node, parse.resolver); dbg!(root); assert!(false); } }
// Copyright 2018 Austin Bonander <austin.bonander@gmail.com> // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(missing_docs)] mod std_buf; #[cfg(feature = "slice-deque")] mod slice_deque_buf; use self::std_buf::StdBuf; #[cfg(feature = "slice-deque")] use self::slice_deque_buf::SliceDequeBuf; pub enum BufImpl { Std(StdBuf), #[cfg(feature = "slice-deque")] Ringbuf(SliceDequeBuf), } macro_rules! forward_method { (pub fn $fnname:ident(&self $($args:tt)*) [$($passargs:tt)*] $(-> $ret:ty)*) => { pub fn $fnname(&self $($args)*) $(-> $ret)* { match *self { BufImpl::Std(ref buf) => buf.$fnname($($passargs)*), #[cfg(feature = "slice-deque")] BufImpl::Ringbuf(ref buf) => buf.$fnname($($passargs)*), } } }; (pub fn $fnname:ident(&mut self $($args:tt)*) [$($passargs:tt)*] $(-> $ret:ty)*) => { pub fn $fnname(&mut self $($args)*) $(-> $ret)* { match *self { BufImpl::Std(ref mut buf) => buf.$fnname($($passargs)*), #[cfg(feature = "slice-deque")] BufImpl::Ringbuf(ref mut buf) => buf.$fnname($($passargs)*), } } }; (pub unsafe fn $fnname:ident(&self $($args:tt)*) [$($passargs:tt)*] $(-> $ret:ty)*) => { pub unsafe fn $fnname(&self $($args)*) $(-> $ret)* { match *self { BufImpl::Std(ref buf) => buf.$fnname($($passargs)*), #[cfg(feature = "slice-deque")] BufImpl::Ringbuf(ref buf) => buf.$fnname($($passargs)*), } } }; (pub unsafe fn $fnname:ident(&mut self $($args:tt)*) [$($passargs:tt)*] $(-> $ret:ty)*) => { pub unsafe fn $fnname(&mut self $($args)*) $(-> $ret)* { match *self { BufImpl::Std(ref mut buf) => buf.$fnname($($passargs)*), #[cfg(feature = "slice-deque")] BufImpl::Ringbuf(ref mut buf) => buf.$fnname($($passargs)*), } } }; } macro_rules! forward_methods { ($($($qualifiers:ident)+ ($($args:tt)*) [$($passargs:tt)*] $(-> $ret:ty)*);+;) => ( $(forward_method! { $($qualifiers)+ ($($args)*) [$($passargs)*] $(-> $ret)* })* ) } impl BufImpl { pub fn with_capacity(cap: usize) -> Self { BufImpl::Std(StdBuf::with_capacity(cap)) } #[cfg(feature = "slice-deque")] pub fn with_capacity_ringbuf(cap: usize) -> Self { BufImpl::Ringbuf(SliceDequeBuf::with_capacity(cap)) } pub fn is_ringbuf(&self) -> bool { match *self { #[cfg(feature = "slice-deque")] BufImpl::Ringbuf(_) => true, _ => false, } } forward_methods! { pub fn capacity(&self)[] -> usize; pub fn len(&self)[] -> usize; pub fn usable_space(&self)[] -> usize; pub fn reserve(&mut self, additional: usize)[additional] -> bool; pub fn make_room(&mut self)[]; pub fn buf(&self)[] -> &[u8]; pub fn buf_mut(&mut self)[] -> &mut [u8]; pub unsafe fn write_buf(&mut self)[] -> &mut [u8]; pub unsafe fn bytes_written(&mut self, add: usize)[add]; pub fn consume(&mut self, amt: usize)[amt]; } }
fn main() { let mut terms = vec![ "sunrise", "hiking" ]; terms.push("abc"); for t in &terms { println!("{}", t); } }
use std::error::Error as StdErr; use std::fmt; use std::io; use std::result::Result as StdRes; use std::sync::mpsc::{RecvError, SendError, TryRecvError}; pub type Result<T> = StdRes<T, Error>; #[derive(Debug)] pub enum Error { Io(io::Error), Corrupted(bincode::Error), Channel(Box<dyn StdErr + Sync + Send>), Missing, WriteBatchFull(usize), } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Error::Corrupted(_) => write!(f, "Serialization error: Store may be corrupted"), Error::Channel(e) => write!(f, "Internal communication error: {}", e), Error::Io(e) => write!(f, "I/O error: {}", e), Error::Missing => write!(f, "Item not present in ledger"), Error::WriteBatchFull(capacity) => write!(f, "WriteBatch capacity {} full", capacity), } } } impl StdErr for Error { fn source(&self) -> Option<&(dyn StdErr + 'static)> { match self { Error::Io(e) => Some(e), Error::Corrupted(ref e) => Some(e), Error::Channel(e) => Some(e.as_ref()), Error::Missing => None, Error::WriteBatchFull(_) => None, } } } impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Error::Io(e) } } impl<W> From<io::IntoInnerError<W>> for Error { fn from(e: io::IntoInnerError<W>) -> Self { Error::Io(e.into()) } } impl From<bincode::Error> for Error { fn from(e: bincode::Error) -> Self { Error::Corrupted(e) } } impl<T> From<SendError<T>> for Error where T: Send + Sync + 'static, { fn from(e: SendError<T>) -> Self { Error::Channel(Box::new(e)) } } impl From<RecvError> for Error { fn from(e: RecvError) -> Self { Error::Channel(Box::new(e)) } } impl From<TryRecvError> for Error { fn from(e: TryRecvError) -> Self { Error::Channel(Box::new(e)) } }
use std::str::FromStr; use std::sync::Arc; use hashbrown::HashMap; use iox_time::TimeProvider; use super::{TaskRegistration, TaskTracker}; /// Every future registered with a `TaskRegistry` is assigned a unique /// `TaskId` #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct TaskId(pub usize); impl FromStr for TaskId { type Err = std::num::ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Self(FromStr::from_str(s)?)) } } impl ToString for TaskId { fn to_string(&self) -> String { self.0.to_string() } } pub trait AbstractTaskRegistry<T> where T: std::fmt::Debug + Send + Sync, { /// Register a new tracker in the registry fn register(&mut self, metadata: T) -> (TaskTracker<T>, TaskRegistration); /// Returns a complete tracker fn complete(&mut self, metadata: T) -> TaskTracker<T> { self.register(metadata).0 } /// Get the tracker associated with a given id fn get(&self, id: TaskId) -> Option<TaskTracker<T>>; /// Returns the number of tracked tasks fn tracked_len(&self) -> usize; /// Returns a list of trackers, including those that are no longer running fn tracked(&self) -> Vec<TaskTracker<T>>; /// Returns a list of active trackers fn running(&self) -> Vec<TaskTracker<T>>; /// Removes completed tasks from the registry and returns a vector of /// those removed. /// /// Should be called periodically. fn reclaim(&mut self) -> Vec<TaskTracker<T>>; } /// Allows tracking the lifecycle of futures registered by /// `TrackedFutureExt::track` with an accompanying metadata payload of type T /// /// Additionally can trigger graceful cancellation of registered futures #[derive(Debug)] pub struct TaskRegistry<T> where T: Send + Sync, { next_id: usize, tasks: HashMap<TaskId, TaskTracker<T>>, time_provider: Arc<dyn TimeProvider>, } impl<T> TaskRegistry<T> where T: Send + Sync, { pub fn new(time_provider: Arc<dyn TimeProvider>) -> Self { Self { next_id: 0, tasks: Default::default(), time_provider, } } } impl<T> AbstractTaskRegistry<T> for TaskRegistry<T> where T: std::fmt::Debug + Send + Sync, { fn register(&mut self, metadata: T) -> (TaskTracker<T>, TaskRegistration) { let id = TaskId(self.next_id); self.next_id += 1; let registration = TaskRegistration::new(Arc::clone(&self.time_provider)); let tracker = TaskTracker::new(id, &registration, metadata); self.tasks.insert(id, tracker.clone()); (tracker, registration) } fn get(&self, id: TaskId) -> Option<TaskTracker<T>> { self.tasks.get(&id).cloned() } fn tracked_len(&self) -> usize { self.tasks.len() } fn tracked(&self) -> Vec<TaskTracker<T>> { self.tasks.values().cloned().collect() } fn running(&self) -> Vec<TaskTracker<T>> { self.tasks .values() .filter_map(|v| { if !v.is_complete() { return Some(v.clone()); } None }) .collect() } /// Removes completed tasks from the registry and returns an iterator of /// those removed fn reclaim(&mut self) -> Vec<TaskTracker<T>> { self.tasks .extract_if(|_, v| v.is_complete()) .map(|(_, v)| v) .collect() } }
use crate::util::{self, color}; use crate::{config, err, git}; use anyhow::Result; use std::env; use std::fs::File; use std::process::Command; /// Edit portions of the `config.json`, depending on `mode`. /// /// # Arguments /// /// * `mode` - Optional specifier of which portion of the config should be edited. /// Valid values are: /// - `"me"`: edit this machine's specific overrides. pub fn edit(mode: Option<&str>) -> Result<()> { match mode { None => { Command::new(editor()?) .arg(config::tittle_config_file()) .status()?; } Some("me") => { edit_machine()?; } Some(mode) => { return err::err(format!("Invalid edit mode {}", color::emphasis(mode))) } }; git::commit(&env::args().collect::<Vec<String>>()[1..].join(" "))?; Ok(()) } /// Edit this machine's specific overrides. fn edit_machine() -> Result<()> { use std::io::prelude::*; let machine_id = util::machine_id()?; let mut temp_override_path = env::temp_dir(); temp_override_path.push(format!("overrides-{}", machine_id)); let mut config = config::get_config()?; let old_overrides = serde_json::to_string_pretty(&config.my_overrides())?; let mut temp_override_file = File::create(&temp_override_path)?; temp_override_file.write_all(&old_overrides.as_bytes())?; temp_override_file.flush()?; Command::new(editor()?).arg(&temp_override_path).status()?; let mut new_overrides = String::new(); File::open(temp_override_path)?.read_to_string(&mut new_overrides)?; config.set_my_overrides(serde_json::from_str(&new_overrides)?)?; config::write_config(&config) } /// Returns this machine's `$EDITOR`. pub fn editor() -> Result<String> { match env::var("EDITOR") { Ok(editor) => Ok(editor), Err(_) => return err::err("Please set an $EDITOR to edit the tittle config."), } }
fn main() { let v1:Vec<i32> = vec![1,2,3]; let mut v2 = vec![1.2, 2.3, 4.5]; let v3 = vec!["cpp", "python", "perl"]; v2.push(5); println!("v1 = {:?}, v2 = {:?}, v3 = {:?}", v1, v2, v3); }
extern crate hlua; #[test] fn readwrite() { #[derive(Clone)] struct Foo; impl<L> hlua::Push<L> for Foo where L: hlua::AsMutLua { fn push_to_lua(self, lua: L) -> hlua::PushGuard<L> { hlua::userdata::push_userdata(self, lua, |_|{}) } } impl<L> hlua::LuaRead<L> for Foo where L: hlua::AsMutLua { fn lua_read_at_position(lua: L, index: i32) -> Result<Foo, L> { let val: Result<hlua::userdata::UserdataOnStack<Foo, _>, _> = hlua::LuaRead::lua_read_at_position(lua, index); val.map(|d| d.clone()) } } let mut lua = hlua::Lua::new(); lua.set("a", Foo); let _: Foo = lua.get("a").unwrap(); } #[test] fn destructor_called() { use std::sync::{Arc, Mutex}; let called = Arc::new(Mutex::new(false)); struct Foo { called: Arc<Mutex<bool>> } impl Drop for Foo { fn drop(&mut self) { let mut called = self.called.lock().unwrap(); (*called) = true; } } impl<L> hlua::Push<L> for Foo where L: hlua::AsMutLua { fn push_to_lua(self, lua: L) -> hlua::PushGuard<L> { hlua::userdata::push_userdata(self, lua, |_|{}) } } { let mut lua = hlua::Lua::new(); lua.set("a", Foo{called: called.clone()}); } let locked = called.lock().unwrap(); assert!(*locked); } #[test] fn type_check() { #[derive(Clone)] struct Foo; impl<L> hlua::Push<L> for Foo where L: hlua::AsMutLua { fn push_to_lua(self, lua: L) -> hlua::PushGuard<L> { hlua::userdata::push_userdata(self, lua, |_|{}) } } impl<L> hlua::LuaRead<L> for Foo where L: hlua::AsMutLua { fn lua_read_at_position(lua: L, index: i32) -> Result<Foo, L> { let val: Result<hlua::userdata::UserdataOnStack<Foo, _>, _> = hlua::LuaRead::lua_read_at_position(lua, index); val.map(|d| d.clone()) } } #[derive(Clone)] struct Bar; impl<L> hlua::Push<L> for Bar where L: hlua::AsMutLua { fn push_to_lua(self, lua: L) -> hlua::PushGuard<L> { hlua::userdata::push_userdata(self, lua, |_|{}) } } impl<L> hlua::LuaRead<L> for Bar where L: hlua::AsMutLua { fn lua_read_at_position(lua: L, index: i32) -> Result<Bar, L> { let val: Result<hlua::userdata::UserdataOnStack<Bar, _>, _> = hlua::LuaRead::lua_read_at_position(lua, index); val.map(|d| d.clone()) } } let mut lua = hlua::Lua::new(); lua.set("a", Foo); let x: Option<Bar> = lua.get("a"); assert!(x.is_none()) } #[test] fn metatables() { #[derive(Clone)] struct Foo; impl<L> hlua::Push<L> for Foo where L: hlua::AsMutLua { fn push_to_lua(self, lua: L) -> hlua::PushGuard<L> { hlua::userdata::push_userdata(self, lua, |mut table| { table.set("__index".to_string(), vec![ ("test".to_string(), hlua::function(|| 5)) ]); }) } } let mut lua = hlua::Lua::new(); lua.set("a", Foo); let x: i32 = lua.execute("return a.test()").unwrap(); assert_eq!(x, 5); }
use std::cmp::Ordering; use std::collections::BTreeMap; use std::fmt; use std::hash::{Hash, Hasher}; use firefly_binary::{BinaryEntrySpecifier, BitVec, Bitstring}; use firefly_diagnostics::{SourceSpan, Spanned}; use firefly_intern::{symbols, Ident, Symbol}; use firefly_number::{Float, Integer, Number}; use firefly_syntax_base::{self as syntax_base, Annotations, BinaryOp, FunctionName, UnaryOp}; use super::{Arity, Fun, FunctionVar, Guard, Name, Type}; use crate::evaluator::{self, EvalError}; use crate::lexer::DelayedSubstitution; /// The set of all possible expressions #[derive(Debug, Clone, PartialEq, Spanned)] pub enum Expr { // An identifier/variable/function reference Var(Var), // Literal values Literal(Literal), FunctionVar(FunctionVar), // Delayed substitution of macro DelayedSubstitution(#[span] SourceSpan, DelayedSubstitution), // The various list forms Cons(Cons), // Other data structures Tuple(Tuple), Map(Map), MapUpdate(MapUpdate), Binary(Binary), Record(Record), RecordAccess(RecordAccess), RecordIndex(RecordIndex), RecordUpdate(RecordUpdate), // Comprehensions ListComprehension(ListComprehension), BinaryComprehension(BinaryComprehension), Generator(Generator), // Complex expressions Begin(Begin), Apply(Apply), Remote(Remote), BinaryExpr(BinaryExpr), UnaryExpr(UnaryExpr), Match(Match), If(If), Catch(Catch), Case(Case), Receive(Receive), Try(Try), Fun(Fun), Protect(Protect), } impl Expr { pub fn try_resolve_apply(span: SourceSpan, callee: Expr, args: Vec<Expr>) -> Self { let arity = args.len().try_into().unwrap(); match callee { Expr::Remote(remote) => match (remote.module.as_ref(), remote.function.as_ref()) { (Expr::Literal(Literal::Atom(m)), Expr::Literal(Literal::Atom(f))) => { let name = FunctionVar::new(remote.span, m.name, f.name, arity); Expr::Apply(Apply { span, callee: Box::new(Expr::FunctionVar(name)), args, }) } _ => Expr::Apply(Apply { span, callee: Box::new(Expr::Remote(remote)), args, }), }, callee => Expr::Apply(Apply { span, callee: Box::new(callee), args, }), } } pub fn is_safe(&self) -> bool { match self { Self::Var(_) | Self::Literal(_) | Self::Cons(_) | Self::Tuple(_) => true, _ => false, } } pub fn is_generator(&self) -> bool { match self { Self::Generator(_) => true, _ => false, } } /// Returns true if this expression is one that is sensitive to imperative assignment pub fn is_block_like(&self) -> bool { match self { Self::Match(Match { ref expr, .. }) => expr.is_block_like(), Self::Begin(_) | Self::If(_) | Self::Case(_) => true, _ => false, } } pub fn is_literal(&self) -> bool { match self { Self::Literal(_) => true, Self::Cons(ref cons) => cons.is_literal(), Self::Tuple(ref tuple) => tuple.is_literal(), _ => false, } } pub fn as_literal(&self) -> Option<&Literal> { match self { Self::Literal(ref lit) => Some(lit), _ => None, } } /// If this expression is an atom, this function returns the Ident /// backing the atom value. This is a common request in the compiler, /// hence its presence here pub fn as_atom(&self) -> Option<Ident> { match self { Self::Literal(Literal::Atom(a)) => Some(*a), _ => None, } } /// Same as `as_atom`, but unwraps the inner Symbol #[inline] pub fn as_atom_symbol(&self) -> Option<Symbol> { self.as_atom().map(|id| id.name) } /// Returns `Some(bool)` if the expression represents a literal boolean, otherwise None pub fn as_boolean(&self) -> Option<bool> { match self { Self::Literal(lit) => lit.as_boolean(), _ => None, } } pub fn as_var(&self) -> Option<Var> { match self { Self::Var(v) => Some(*v), _ => None, } } pub fn is_lc(&self) -> bool { match self { Self::ListComprehension(_) => true, _ => false, } } pub fn to_lc(self) -> ListComprehension { match self { Self::ListComprehension(lc) => lc, _ => panic!("not a list comprehension"), } } pub fn is_data_constructor(&self) -> bool { match self { Self::Literal(_) | Self::Cons(_) | Self::Tuple(_) => true, _ => false, } } pub fn coerce_to_float(self) -> Expr { match self { expr @ Expr::Literal(Literal::Float(_, _)) => expr, Expr::Literal(Literal::Integer(span, i)) => { Expr::Literal(Literal::Float(span, Float::new(i.to_float()).unwrap())) } Expr::Literal(Literal::Char(span, c)) => { Expr::Literal(Literal::Float(span, Float::new(c as i64 as f64).unwrap())) } expr => expr, } } } impl From<Name> for Expr { fn from(name: Name) -> Self { match name { Name::Atom(ident) => Self::Literal(Literal::Atom(ident)), Name::Var(ident) => Self::Var(Var(ident)), } } } impl From<Arity> for Expr { fn from(arity: Arity) -> Self { match arity { Arity::Int(i) => Self::Literal(Literal::Integer(SourceSpan::UNKNOWN, i.into())), Arity::Var(ident) => Self::Var(Var(ident)), } } } impl From<FunctionName> for Expr { fn from(name: FunctionName) -> Self { Self::FunctionVar(name.into()) } } impl TryInto<Literal> for Expr { type Error = Expr; fn try_into(self) -> Result<Literal, Self::Error> { match self { Self::Literal(lit) => Ok(lit), Self::Tuple(tuple) => tuple.try_into().map_err(Expr::Tuple), Self::Cons(cons) => cons.try_into().map_err(Expr::Cons), Self::Map(map) => map.try_into().map_err(Expr::Map), other => Err(other), } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Spanned)] pub struct Var(pub Ident); impl Var { #[inline] pub fn sym(&self) -> Symbol { self.0.name } #[inline] pub fn is_wildcard(&self) -> bool { self.0.name == symbols::Underscore } #[inline] pub fn is_wanted(&self) -> bool { self.0.as_str().get().starts_with('_') == false } #[inline] pub fn is_compiler_generated(&self) -> bool { self.0.as_str().get().starts_with('$') } } impl From<Ident> for Var { fn from(i: Ident) -> Self { Self(i) } } impl PartialOrd for Var { #[inline] fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Ord for Var { #[inline] fn cmp(&self, other: &Self) -> Ordering { self.0.as_str().get().cmp(other.0.as_str().get()) } } #[derive(Debug, Clone, Spanned)] pub struct Cons { #[span] pub span: SourceSpan, pub head: Box<Expr>, pub tail: Box<Expr>, } impl Cons { pub fn is_literal(&self) -> bool { let mut current = Some(self); while let Some(Self { head, tail, .. }) = current.take() { if !head.is_literal() { return false; } match tail.as_ref() { Expr::Literal(_) => (), Expr::Tuple(ref tuple) if tuple.is_literal() => (), Expr::Map(ref map) if map.is_literal() => (), Expr::Cons(ref cons) => current = Some(cons), _ => return false, } } true } } impl TryInto<Literal> for Cons { type Error = Cons; fn try_into(self) -> Result<Literal, Self::Error> { let Cons { span, head, tail } = self; match (*head).try_into() { Ok(hd) => match (*tail).try_into() { Ok(tl) => Ok(Literal::Cons(span, Box::new(hd), Box::new(tl))), Err(tl) => Err(Cons { span, head: Box::new(Expr::Literal(hd)), tail: Box::new(tl), }), }, Err(hd) => Err(Cons { span, head: Box::new(hd), tail, }), } } } impl PartialEq for Cons { fn eq(&self, other: &Self) -> bool { self.head == other.head && self.tail == other.tail } } #[derive(Debug, Clone, Spanned)] pub struct Tuple { #[span] pub span: SourceSpan, pub elements: Vec<Expr>, } impl Tuple { pub fn is_literal(&self) -> bool { self.elements.iter().all(|expr| expr.is_literal()) } } impl TryInto<Literal> for Tuple { type Error = Tuple; fn try_into(mut self) -> Result<Literal, Self::Error> { if self.is_literal() { let elements = self .elements .drain(..) .map(|e| e.try_into().unwrap()) .collect(); Ok(Literal::Tuple(self.span, elements)) } else { Err(self) } } } impl PartialEq for Tuple { fn eq(&self, other: &Self) -> bool { self.elements == other.elements } } #[derive(Debug, Clone, Spanned)] pub struct Map { #[span] pub span: SourceSpan, pub fields: Vec<MapField>, } impl Map { pub fn is_literal(&self) -> bool { for field in self.fields.iter() { if !field.key_ref().is_literal() { return false; } if !field.value_ref().is_literal() { return false; } } true } } impl TryInto<Literal> for Map { type Error = Map; fn try_into(mut self) -> Result<Literal, Self::Error> { if self.is_literal() { let mut map: BTreeMap<Literal, Literal> = BTreeMap::new(); for field in self.fields.drain(..) { let key = field.key().try_into().unwrap(); let value = field.value().try_into().unwrap(); map.insert(key, value); } Ok(Literal::Map(self.span, map)) } else { Err(self) } } } impl PartialEq for Map { fn eq(&self, other: &Self) -> bool { self.fields == other.fields } } // Updating fields on an existing map, e.g. `Map#{field1 = value1}.` #[derive(Debug, Clone, Spanned)] pub struct MapUpdate { #[span] pub span: SourceSpan, pub map: Box<Expr>, pub updates: Vec<MapField>, } impl PartialEq for MapUpdate { fn eq(&self, other: &Self) -> bool { self.map == other.map && self.updates == other.updates } } /// Maps can have two different types of field assignment: /// /// * assoc - inserts or updates the given key with the given value /// * exact - updates the given key with the given value, or produces an error #[derive(Debug, Clone, Spanned)] pub enum MapField { Assoc { #[span] span: SourceSpan, key: Expr, value: Expr, }, Exact { #[span] span: SourceSpan, key: Expr, value: Expr, }, } impl MapField { pub fn key(&self) -> Expr { self.key_ref().clone() } pub fn key_ref(&self) -> &Expr { match self { Self::Assoc { ref key, .. } => key, Self::Exact { ref key, .. } => key, } } pub fn value(&self) -> Expr { self.value_ref().clone() } pub fn value_ref(&self) -> &Expr { match self { Self::Assoc { ref value, .. } => value, Self::Exact { ref value, .. } => value, } } } impl PartialEq for MapField { fn eq(&self, other: &Self) -> bool { (self.key() == other.key()) && (self.value() == other.value()) } } /// The set of literal values #[derive(Debug, Clone, Spanned)] pub enum Literal { Atom(Ident), String(Ident), Char(#[span] SourceSpan, char), Integer(#[span] SourceSpan, Integer), Float(#[span] SourceSpan, Float), Nil(#[span] SourceSpan), Cons(#[span] SourceSpan, Box<Literal>, Box<Literal>), Tuple(#[span] SourceSpan, Vec<Literal>), Map(#[span] SourceSpan, BTreeMap<Literal, Literal>), Binary(#[span] SourceSpan, BitVec), } impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Write; match self { Self::Atom(id) => write!(f, "'{}'", id.name), Self::String(id) => write!(f, "\"{}\"", id.as_str().get().escape_debug()), Self::Char(_, c) => write!(f, "${}", c.escape_debug()), Self::Integer(_, i) => write!(f, "{}", i), Self::Float(_, flt) => write!(f, "{}", flt), Self::Nil(_) => write!(f, "[]"), Self::Cons(_, h, t) => { if let Ok(elements) = self.as_proper_list() { f.write_char('[')?; for (i, elem) in elements.iter().enumerate() { if i > 0 { f.write_str(", ")?; } write!(f, "{}", elem)?; } f.write_char(']') } else { write!(f, "[{} | {}]", h, t) } } Self::Tuple(_, elements) => { f.write_char('{')?; for (i, elem) in elements.iter().enumerate() { if i > 0 { f.write_str(", ")?; } write!(f, "{}", elem)?; } f.write_char('}') } Self::Map(_, map) => { f.write_str("#{")?; for (i, (k, v)) in map.iter().enumerate() { if i > 0 { f.write_str(", ")?; } write!(f, "{} => {}", k, v)?; } f.write_char('}') } Self::Binary(_, bin) => write!(f, "{}", bin.display()), } } } impl Literal { pub fn from_proper_list(span: SourceSpan, mut elements: Vec<Literal>) -> Self { elements.drain(..).rfold(Self::Nil(span), |lit, tail| { Self::Cons(lit.span(), Box::new(lit), Box::new(tail)) }) } pub fn as_boolean(&self) -> Option<bool> { match self { Self::Atom(id) => match id.name { symbols::True => Some(true), symbols::False => Some(false), _ => None, }, _ => None, } } pub fn as_integer(&self) -> Option<Integer> { match self { Self::Integer(_, i) => Some(i.clone()), Self::Char(_, c) => Some(Integer::Small(*c as i64)), _ => None, } } /// Converts this literal into a vector of elements representing a proper list pub fn as_proper_list(&self) -> Result<Vec<Literal>, ()> { match self { Self::String(s) => { let span = s.span; Ok(s.as_str() .get() .chars() .map(|c| Literal::Integer(span, Integer::Small(c as i64))) .collect()) } Self::Cons(_, head, tail) => { let mut elements = vec![]; elements.push(head.as_ref().clone()); let mut current = Some(tail.as_ref()); while let Some(next) = current.take() { match next { // [H | "string"] =:= [H, $s, $t, $r, $i, $n, $g] Self::String(s) => { let span = s.span; for c in s.as_str().get().chars() { elements.push(Literal::Integer(span, Integer::Small(c as i64))); } } // End of list Self::Nil(_) => { break; } // [H | T] Self::Cons(_, head, tail) => { elements.push(head.as_ref().clone()); current = Some(tail.as_ref()); } // Not a proper list _ => return Err(()), } } Ok(elements) } Self::Nil(_) => Ok(vec![]), // Not a list _ => Err(()), } } pub fn try_from_tuple(tuple: &Tuple) -> Option<Self> { if !tuple.is_literal() { return None; } let elements = tuple .elements .iter() .map(|expr| expr.as_literal().unwrap().clone()) .collect(); Some(Self::Tuple(tuple.span, elements)) } } impl From<Number> for Literal { fn from(n: Number) -> Self { let span = SourceSpan::default(); match n { Number::Integer(i) => Self::Integer(span, i), Number::Float(f) => Self::Float(span, f), } } } impl TryInto<Number> for Literal { type Error = Literal; fn try_into(self) -> Result<Number, Self::Error> { match self { Self::Integer(_, i) => Ok(i.into()), Self::Float(_, f) => Ok(f.into()), Self::Char(_, c) => Ok(Number::Integer(Integer::Small(c as i64))), other => Err(other), } } } impl From<bool> for Literal { fn from(b: bool) -> Self { let span = SourceSpan::default(); if b { Self::Atom(Ident::new(symbols::True, span)) } else { Self::Atom(Ident::new(symbols::False, span)) } } } impl Into<syntax_base::Literal> for Literal { fn into(self) -> syntax_base::Literal { match self { Self::Atom(id) => syntax_base::Literal::atom(id.span, id.name), Self::Char(span, c) => syntax_base::Literal::integer(span, c as i64), Self::Integer(span, i) => syntax_base::Literal::integer(span, i), Self::Float(span, f) => syntax_base::Literal::float(span, f), Self::Nil(span) => syntax_base::Literal::nil(span), Self::String(id) => { let span = id.span; id.as_str() .get() .chars() .map(|c| syntax_base::Literal::integer(span, c as i64)) .rfold(syntax_base::Literal::nil(span), |tl, c| { syntax_base::Literal::cons(span, c, tl) }) } Self::Cons(span, head, tail) => { syntax_base::Literal::cons(span, (*head).into(), (*tail).into()) } Self::Tuple(span, mut elements) => { syntax_base::Literal::tuple(span, elements.drain(..).map(Self::into).collect()) } Self::Map(span, mut map) => { let mut new_map: BTreeMap<syntax_base::Literal, syntax_base::Literal> = BTreeMap::new(); while let Some((k, v)) = map.pop_first() { new_map.insert(k.into(), v.into()); } syntax_base::Literal { span, annotations: Annotations::default(), value: syntax_base::Lit::Map(new_map), } } Self::Binary(span, bin) => syntax_base::Literal { span, annotations: Annotations::default(), value: syntax_base::Lit::Binary(bin), }, } } } impl PartialEq for Literal { #[inline] fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::Atom(x), Self::Atom(y)) => x.name == y.name, (Self::Atom(_), _) => false, (Self::String(x), Self::String(y)) => x.name == y.name, (x @ Self::String(_), y @ Self::Cons(_, _, _)) => { let xs = x.as_proper_list().unwrap(); match y.as_proper_list() { Ok(ys) => xs == ys, Err(_) => false, } } (Self::String(_), _) => false, (Self::Char(_, x), Self::Char(_, y)) => x == y, (Self::Char(_, x), Self::Integer(_, y)) => { let x = Integer::Small(*x as u32 as i64); x.eq(y) } (Self::Char(_, _), _) => false, (Self::Integer(_, x), Self::Integer(_, y)) => x == y, (Self::Integer(_, x), Self::Char(_, y)) => { let y = Integer::Small(*y as u32 as i64); x.eq(&y) } (Self::Integer(_, _), _) => false, (Self::Float(_, x), Self::Float(_, y)) => x == y, (Self::Float(_, _), _) => false, (Self::Nil(_), Self::Nil(_)) => true, (Self::Nil(_), Self::String(s)) if s.name == symbols::Empty => true, (Self::Nil(_), _) => false, (Self::Cons(_, h1, t1), Self::Cons(_, h2, t2)) => h1 == h2 && t1 == t2, (x @ Self::Cons(_, _, _), y @ Self::String(_)) => { let ys = y.as_proper_list().unwrap(); match x.as_proper_list() { Ok(xs) => xs == ys, Err(_) => false, } } (Self::Cons(_, _, _), _) => false, (Self::Tuple(_, xs), Self::Tuple(_, ys)) => xs == ys, (Self::Tuple(_, _), _) => false, (Self::Map(_, x), Self::Map(_, y)) => x == y, (Self::Map(_, _), _) => false, (Self::Binary(_, x), Self::Binary(_, y)) => x.eq(y), (Self::Binary(_, _), _) => false, } } } impl Eq for Literal {} impl PartialOrd for Literal { // number < atom < reference < fun < port < pid < tuple < map < nil < list < bit string fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match (self, other) { (Self::Float(_, x), Self::Float(_, y)) => x.partial_cmp(y), (Self::Float(_, x), Self::Integer(_, y)) => x.partial_cmp(y), (Self::Float(_, x), Self::Char(_, y)) => { x.partial_cmp(&Integer::Small(*y as u32 as i64)) } (Self::Float(_, _), _) => Some(Ordering::Less), (Self::Integer(_, x), Self::Integer(_, y)) => x.partial_cmp(y), (Self::Integer(_, x), Self::Float(_, y)) => x.partial_cmp(y), (Self::Integer(_, x), Self::Char(_, y)) => { x.partial_cmp(&Integer::Small(*y as u32 as i64)) } (Self::Integer(_, _), _) => Some(Ordering::Less), (Self::Char(_, x), Self::Integer(_, y)) => y.partial_cmp(x).map(|o| o.reverse()), (Self::Char(_, x), Self::Float(_, y)) => { let x = *x as u32 as i64; y.partial_cmp(&x).map(|o| o.reverse()) } (Self::Char(_, x), Self::Char(_, y)) => y .partial_cmp(&Integer::Small(*x as u32 as i64)) .map(|o| o.reverse()), (Self::Char(_, _), _) => Some(Ordering::Less), (Self::Atom(_), Self::Float(_, _)) | (Self::Atom(_), Self::Integer(_, _)) | (Self::Atom(_), Self::Char(_, _)) => Some(Ordering::Greater), (Self::Atom(x), Self::Atom(y)) => x.partial_cmp(y), (Self::Atom(_), _) => Some(Ordering::Less), (Self::Tuple(_, _), Self::Float(_, _)) | (Self::Tuple(_, _), Self::Integer(_, _)) | (Self::Tuple(_, _), Self::Char(_, _)) | (Self::Tuple(_, _), Self::Atom(_)) => Some(Ordering::Greater), (Self::Tuple(_, xs), Self::Tuple(_, ys)) => xs.partial_cmp(ys), (Self::Tuple(_, _), _) => Some(Ordering::Less), (Self::Map(_, _), Self::Float(_, _)) | (Self::Map(_, _), Self::Integer(_, _)) | (Self::Map(_, _), Self::Char(_, _)) | (Self::Map(_, _), Self::Atom(_)) | (Self::Map(_, _), Self::Tuple(_, _)) => Some(Ordering::Greater), (Self::Map(_, x), Self::Map(_, y)) => x.partial_cmp(y), (Self::Map(_, _), _) => Some(Ordering::Less), (Self::Nil(_), Self::Nil(_)) => Some(Ordering::Equal), (Self::Nil(_), Self::String(_)) | (Self::Nil(_), Self::Cons(_, _, _)) => { Some(Ordering::Less) } (Self::Nil(_), _) => Some(Ordering::Greater), (Self::String(s), Self::Nil(_)) if s.name == symbols::Empty => Some(Ordering::Equal), (Self::String(_), Self::Nil(_)) => Some(Ordering::Greater), (Self::String(x), Self::String(y)) => x.partial_cmp(y), (x @ Self::String(_), y @ Self::Cons(_, _, _)) => match y.as_proper_list() { Ok(ys) => { let xs = x.as_proper_list().unwrap(); xs.partial_cmp(&ys) } Err(_) => Some(Ordering::Less), }, (Self::String(_), _) => Some(Ordering::Greater), (Self::Cons(_, h1, t1), Self::Cons(_, h2, t2)) => match h1.partial_cmp(h2) { Some(Ordering::Equal) => t1.partial_cmp(t2), other => other, }, (x @ Self::Cons(_, _, _), y @ Self::String(_)) => match x.as_proper_list() { Ok(xs) => { let ys = y.as_proper_list().unwrap(); xs.partial_cmp(&ys) } Err(_) => Some(Ordering::Greater), }, (Self::Cons(_, _, _), Self::Binary(_, _)) => Some(Ordering::Less), (Self::Cons(_, _, _), _) => Some(Ordering::Greater), (Self::Binary(_, x), Self::Binary(_, y)) => x.partial_cmp(y), (Self::Binary(_, _), _) => Some(Ordering::Greater), } } } impl Hash for Literal { fn hash<H: Hasher>(&self, state: &mut H) { core::mem::discriminant(self).hash(state); match self { Self::Atom(x) => x.name.hash(state), Self::String(s) => s.name.hash(state), Self::Float(_, f) => f.hash(state), Self::Integer(_, i) => i.hash(state), Self::Char(_, c) => c.hash(state), Self::Nil(_) => (), Self::Cons(_, h, t) => { h.hash(state); t.hash(state); } Self::Tuple(_, elements) => Hash::hash_slice(elements.as_slice(), state), Self::Map(_, map) => map.hash(state), Self::Binary(_, bin) => bin.hash(state), } } } impl Ord for Literal { #[inline] fn cmp(&self, other: &Self) -> Ordering { self.partial_cmp(other).unwrap() } } #[derive(Debug, Clone, Spanned)] pub struct Record { #[span] pub span: SourceSpan, pub name: Ident, pub fields: Vec<RecordField>, } impl PartialEq for Record { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.fields == other.fields } } // Accessing a record field value, e.g. Expr#myrec.field1 #[derive(Debug, Clone, Spanned)] pub struct RecordAccess { #[span] pub span: SourceSpan, pub record: Box<Expr>, pub name: Ident, pub field: Ident, } impl PartialEq for RecordAccess { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.field == other.field && self.record == other.record } } // Referencing a record fields index, e.g. #myrec.field1 #[derive(Debug, Clone, Spanned)] pub struct RecordIndex { #[span] pub span: SourceSpan, pub name: Ident, pub field: Ident, } impl PartialEq for RecordIndex { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.field == other.field } } // Update a record field value, e.g. Expr#myrec{field1=ValueExpr} #[derive(Debug, Clone, Spanned)] pub struct RecordUpdate { #[span] pub span: SourceSpan, pub record: Box<Expr>, pub name: Ident, pub updates: Vec<RecordField>, } impl PartialEq for RecordUpdate { fn eq(&self, other: &Self) -> bool { self.name == other.name && self.record == other.record && self.updates == other.updates } } /// Record fields always have a name, but both default value and type /// are optional in a record definition. When instantiating a record, /// if no value is given for a field, and no default is given, /// then `undefined` is the default. #[derive(Debug, Clone, Spanned)] pub struct RecordField { #[span] pub span: SourceSpan, pub name: Ident, pub value: Option<Expr>, pub ty: Option<Type>, } impl PartialEq for RecordField { fn eq(&self, other: &Self) -> bool { (self.name == other.name) && (self.value == other.value) && (self.ty == other.ty) } } #[derive(Debug, Clone, Spanned)] pub struct Binary { #[span] pub span: SourceSpan, pub elements: Vec<BinaryElement>, } impl PartialEq for Binary { fn eq(&self, other: &Self) -> bool { self.elements == other.elements } } /// Used to represent a specific segment in a binary constructor, to /// produce a binary, all segments must be evaluated, and then assembled #[derive(Debug, Clone, Spanned)] pub struct BinaryElement { #[span] pub span: SourceSpan, pub bit_expr: Expr, pub bit_size: Option<Expr>, pub specifier: Option<BinaryEntrySpecifier>, } impl PartialEq for BinaryElement { fn eq(&self, other: &Self) -> bool { (self.bit_expr == other.bit_expr) && (self.bit_size == other.bit_size) && (self.specifier == other.specifier) } } /// A bit type can come in the form `Type` or `Type:Size` #[derive(Debug, Clone, Spanned)] pub enum BitType { Name(#[span] SourceSpan, Ident), Sized(#[span] SourceSpan, Ident, usize), } impl PartialEq for BitType { fn eq(&self, other: &Self) -> bool { match (self, other) { (&BitType::Name(_, ref x1), &BitType::Name(_, ref y1)) => x1 == y1, (&BitType::Sized(_, ref x1, ref x2), &BitType::Sized(_, ref y1, ref y2)) => { (x1 == y1) && (x2 == y2) } _ => false, } } } #[derive(Debug, Clone, Spanned)] pub struct ListComprehension { #[span] pub span: SourceSpan, pub body: Box<Expr>, pub qualifiers: Vec<Expr>, } impl PartialEq for ListComprehension { fn eq(&self, other: &Self) -> bool { self.body == other.body && self.qualifiers == other.qualifiers } } #[derive(Debug, Clone, Spanned)] pub struct BinaryComprehension { #[span] pub span: SourceSpan, pub body: Box<Expr>, pub qualifiers: Vec<Expr>, } impl PartialEq for BinaryComprehension { fn eq(&self, other: &Self) -> bool { self.body == other.body && self.qualifiers == other.qualifiers } } /// A generator is one of two types of expressions that act as qualifiers in a commprehension, the other is a filter #[derive(Debug, Clone, Spanned)] pub struct Generator { #[span] pub span: SourceSpan, pub ty: GeneratorType, pub pattern: Box<Expr>, pub expr: Box<Expr>, } impl PartialEq for Generator { fn eq(&self, other: &Self) -> bool { self.pattern == other.pattern && self.expr == other.expr } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum GeneratorType { Default, Bitstring, } impl Default for GeneratorType { fn default() -> Self { Self::Default } } // A sequence of expressions, e.g. begin expr1, .., exprN end #[derive(Debug, Clone, Spanned)] pub struct Begin { #[span] pub span: SourceSpan, pub body: Vec<Expr>, } impl PartialEq for Begin { fn eq(&self, other: &Self) -> bool { self.body == other.body } } // Function application, e.g. foo(expr1, .., exprN) #[derive(Debug, Clone, Spanned)] pub struct Apply { #[span] pub span: SourceSpan, pub callee: Box<Expr>, pub args: Vec<Expr>, } impl Apply { pub fn new(span: SourceSpan, callee: Expr, args: Vec<Expr>) -> Self { Self { span, callee: Box::new(callee), args, } } pub fn remote(span: SourceSpan, module: Symbol, function: Symbol, args: Vec<Expr>) -> Self { Self { span, callee: Box::new(Expr::FunctionVar(FunctionVar::new( span, module, function, args.len().try_into().unwrap(), ))), args, } } pub fn local(span: SourceSpan, function: Symbol, args: Vec<Expr>) -> Self { Self { span, callee: Box::new(Expr::FunctionVar(FunctionVar::new_local( span, function, args.len().try_into().unwrap(), ))), args, } } } impl PartialEq for Apply { fn eq(&self, other: &Self) -> bool { self.callee == other.callee && self.args == other.args } } // Remote, e.g. Foo:Bar #[derive(Debug, Clone, Spanned)] pub struct Remote { #[span] pub span: SourceSpan, pub module: Box<Expr>, pub function: Box<Expr>, } impl Remote { pub fn new(span: SourceSpan, module: Expr, function: Expr) -> Self { Self { span, module: Box::new(module), function: Box::new(function), } } pub fn new_literal(span: SourceSpan, module: Symbol, function: Symbol) -> Self { Self { span, module: Box::new(Expr::Literal(Literal::Atom(Ident::new(module, span)))), function: Box::new(Expr::Literal(Literal::Atom(Ident::new(function, span)))), } } /// Try to resolve this remote expression to a constant function reference of the given arity pub fn try_eval(&self, arity: u8) -> Result<FunctionName, EvalError> { let span = self.span; let module = evaluator::eval_expr(self.module.as_ref(), None)?; let function = evaluator::eval_expr(self.function.as_ref(), None)?; match (module, function) { (Literal::Atom(m), Literal::Atom(f)) => Ok(FunctionName::new(m.name, f.name, arity)), _ => Err(EvalError::InvalidConstExpression { span }), } } } impl PartialEq for Remote { fn eq(&self, other: &Self) -> bool { self.module == other.module && self.function == other.function } } #[derive(Debug, Clone, Spanned)] pub struct BinaryExpr { #[span] pub span: SourceSpan, pub lhs: Box<Expr>, pub op: BinaryOp, pub rhs: Box<Expr>, } impl BinaryExpr { pub fn new(span: SourceSpan, op: BinaryOp, lhs: Expr, rhs: Expr) -> Self { Self { span, op, lhs: Box::new(lhs), rhs: Box::new(rhs), } } } impl PartialEq for BinaryExpr { fn eq(&self, other: &Self) -> bool { self.op == other.op && self.lhs == other.lhs && self.rhs == other.rhs } } #[derive(Debug, Clone, Spanned)] pub struct UnaryExpr { #[span] pub span: SourceSpan, pub op: UnaryOp, pub operand: Box<Expr>, } impl PartialEq for UnaryExpr { fn eq(&self, other: &Self) -> bool { self.op == other.op && self.operand == other.operand } } #[derive(Debug, Clone, Spanned)] pub struct Match { #[span] pub span: SourceSpan, pub pattern: Box<Expr>, pub expr: Box<Expr>, } impl PartialEq for Match { fn eq(&self, other: &Self) -> bool { self.pattern == other.pattern && self.expr == other.expr } } #[derive(Debug, Clone, Spanned)] pub struct If { #[span] pub span: SourceSpan, pub clauses: Vec<Clause>, } impl If { /// Returns true if the last clause of the `if` is the literal boolean `true` pub fn has_wildcard_clause(&self) -> bool { self.clauses .last() .map(|clause| clause.is_wildcard()) .unwrap_or(false) } } impl PartialEq for If { fn eq(&self, other: &Self) -> bool { self.clauses == other.clauses } } #[derive(Debug, Clone, Spanned)] pub struct Catch { #[span] pub span: SourceSpan, pub expr: Box<Expr>, } impl PartialEq for Catch { fn eq(&self, other: &Self) -> bool { self.expr == other.expr } } #[derive(Debug, Clone, Spanned)] pub struct Case { #[span] pub span: SourceSpan, pub expr: Box<Expr>, pub clauses: Vec<Clause>, } impl PartialEq for Case { fn eq(&self, other: &Self) -> bool { self.expr == other.expr && self.clauses == other.clauses } } #[derive(Debug, Clone, Spanned)] pub struct Receive { #[span] pub span: SourceSpan, pub clauses: Option<Vec<Clause>>, pub after: Option<After>, } impl PartialEq for Receive { fn eq(&self, other: &Self) -> bool { self.clauses == other.clauses && self.after == other.after } } #[derive(Debug, Clone, Spanned)] pub struct Try { #[span] pub span: SourceSpan, pub exprs: Vec<Expr>, pub clauses: Option<Vec<Clause>>, pub catch_clauses: Option<Vec<Clause>>, pub after: Option<Vec<Expr>>, } impl PartialEq for Try { fn eq(&self, other: &Self) -> bool { self.exprs == other.exprs && self.clauses == other.clauses && self.catch_clauses == other.catch_clauses && self.after == other.after } } /// Represents the `after` clause of a `receive` expression #[derive(Debug, Clone, Spanned)] pub struct After { #[span] pub span: SourceSpan, pub timeout: Box<Expr>, pub body: Vec<Expr>, } impl PartialEq for After { fn eq(&self, other: &Self) -> bool { self.timeout == other.timeout && self.body == other.body } } /// Represents a single match clause in a `case`, `try`, or `receive` expression #[derive(Debug, Clone, Spanned)] pub struct Clause { #[span] pub span: SourceSpan, pub patterns: Vec<Expr>, pub guards: Vec<Guard>, pub body: Vec<Expr>, pub compiler_generated: bool, } impl Clause { pub fn new( span: SourceSpan, patterns: Vec<Expr>, guards: Vec<Guard>, body: Vec<Expr>, compiler_generated: bool, ) -> Self { Self { span, patterns, guards, body, compiler_generated, } } pub fn for_if( span: SourceSpan, guards: Vec<Guard>, body: Vec<Expr>, compiler_generated: bool, ) -> Self { Self { span, patterns: vec![Expr::Var(Var(Ident::new(symbols::Underscore, span)))], guards, body, compiler_generated, } } pub fn for_catch( span: SourceSpan, kind: Expr, error: Expr, trace: Option<Expr>, guards: Vec<Guard>, body: Vec<Expr>, ) -> Self { let trace = trace.unwrap_or_else(|| Expr::Var(Var(Ident::from_str("_")))); Self { span, patterns: vec![kind, error, trace], guards, body, compiler_generated: false, } } pub fn is_wildcard(&self) -> bool { let is_wild = self.patterns.iter().all(|p| { if let Expr::Var(v) = p { v.is_wildcard() } else { false } }); if is_wild { match self.guards.len() { 0 => true, 1 => self .guards .first() .and_then(|g| g.as_boolean()) .unwrap_or_default(), _ => false, } } else { false } } } impl PartialEq for Clause { fn eq(&self, other: &Self) -> bool { self.patterns == other.patterns && self.guards == other.guards && self.body == other.body } } #[derive(Debug, Clone, Spanned)] pub struct Protect { #[span] pub span: SourceSpan, pub body: Box<Expr>, } impl PartialEq for Protect { fn eq(&self, other: &Self) -> bool { self.body.eq(&other.body) } }
use std::{ net::{IpAddr, Ipv4Addr, SocketAddr}, sync::Arc, }; use generated_types::i_ox_testing_client::IOxTestingClient; use influxdb_storage_client::{ connection::{Builder as ConnectionBuilder, Connection, GrpcConnection}, Client as StorageClient, }; use service_common::test_util::TestDatabaseStore; use snafu::{ResultExt, Snafu}; use tokio::task::JoinHandle; use tokio_stream::wrappers::TcpListenerStream; use crate::service::ErrorLogger; #[derive(Debug, Snafu)] pub enum FixtureError { #[snafu(display("Error binding fixture server: {}", source))] Bind { source: std::io::Error }, #[snafu(display("Error creating fixture: {}", source))] Tonic { source: tonic::transport::Error }, } /// Wrapper around raw clients and test database #[derive(Debug)] pub struct Fixture { pub client_connection: Connection, pub iox_client: IOxTestingClient<GrpcConnection>, pub storage_client: StorageClient, pub test_storage: Arc<TestDatabaseStore>, pub join_handle: JoinHandle<()>, } impl Fixture { /// Start up a test storage server listening on `port`, returning /// a fixture with the test server and clients pub async fn new() -> Result<Self, FixtureError> { Self::new_with_semaphore_size(u16::MAX as usize).await } pub async fn new_with_semaphore_size(semaphore_size: usize) -> Result<Self, FixtureError> { let test_storage = Arc::new(TestDatabaseStore::new_with_semaphore_size(semaphore_size)); // Get a random port from the kernel by asking for port 0. let bind_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 0); let socket = tokio::net::TcpListener::bind(bind_addr) .await .context(BindSnafu)?; // Pull the assigned port out of the socket let bind_addr = socket.local_addr().unwrap(); println!("Starting InfluxDB IOx storage test server on {bind_addr:?}"); let trace_header_parser = trace_http::ctx::TraceHeaderParser::new(); let router = tonic::transport::Server::builder() .layer(trace_http::tower::TraceLayer::new( trace_header_parser, Arc::clone(&test_storage.metric_registry), None, true, "test server", )) .add_service(service_grpc_testing::make_server()) .add_service(crate::make_server(Arc::clone(&test_storage))); let server = async move { let stream = TcpListenerStream::new(socket); router .serve_with_incoming(stream) .await .log_if_error("Running Tonic Server") .ok(); }; let join_handle = tokio::task::spawn(server); let client_connection = ConnectionBuilder::default() .connect_timeout(std::time::Duration::from_secs(30)) .build(format!("http://{bind_addr}")) .await .unwrap(); let iox_client = IOxTestingClient::new(client_connection.clone().into_grpc_connection()); let storage_client = StorageClient::new(client_connection.clone()); Ok(Self { client_connection, iox_client, storage_client, test_storage, join_handle, }) } } impl Drop for Fixture { fn drop(&mut self) { self.join_handle.abort(); } }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! This mod is the key point about compatibility. //! Everytime update anything in this file, update the `VER` and let the tests pass. use std::collections::BTreeMap; use std::collections::BTreeSet; use chrono::DateTime; use chrono::Utc; use common_meta_app::share as mt; use common_protos::pb; use enumflags2::BitFlags; use crate::reader_check_msg; use crate::FromToProto; use crate::Incompatible; use crate::MIN_READER_VER; use crate::VER; impl FromToProto for mt::ObjectSharedByShareIds { type PB = pb::ObjectSharedByShareIds; fn get_pb_ver(p: &Self::PB) -> u64 { p.ver } fn from_pb(p: pb::ObjectSharedByShareIds) -> Result<Self, Incompatible> { reader_check_msg(p.ver, p.min_reader_ver)?; let v = Self { share_ids: BTreeSet::from_iter(p.share_ids.iter().copied()), }; Ok(v) } fn to_pb(&self) -> Result<pb::ObjectSharedByShareIds, Incompatible> { let p = pb::ObjectSharedByShareIds { ver: VER, min_reader_ver: MIN_READER_VER, share_ids: Vec::from_iter(self.share_ids.iter().copied()), }; Ok(p) } } impl FromToProto for mt::ShareNameIdent { type PB = pb::ShareNameIdent; fn get_pb_ver(p: &Self::PB) -> u64 { p.ver } fn from_pb(p: pb::ShareNameIdent) -> Result<Self, Incompatible> { reader_check_msg(p.ver, p.min_reader_ver)?; let v = Self { tenant: p.tenant, share_name: p.share_name, }; Ok(v) } fn to_pb(&self) -> Result<pb::ShareNameIdent, Incompatible> { let p = pb::ShareNameIdent { ver: VER, min_reader_ver: MIN_READER_VER, tenant: self.tenant.clone(), share_name: self.share_name.clone(), }; Ok(p) } } impl FromToProto for mt::ShareGrantObject { type PB = pb::ShareGrantObject; fn get_pb_ver(p: &Self::PB) -> u64 { p.ver } fn from_pb(p: pb::ShareGrantObject) -> Result<Self, Incompatible> { reader_check_msg(p.ver, p.min_reader_ver)?; match p.object { Some(pb::share_grant_object::Object::DbId(db_id)) => { Ok(mt::ShareGrantObject::Database(db_id)) } Some(pb::share_grant_object::Object::TableId(table_id)) => { Ok(mt::ShareGrantObject::Table(table_id)) } None => Err(Incompatible { reason: "ShareGrantObject cannot be None".to_string(), }), } } fn to_pb(&self) -> Result<pb::ShareGrantObject, Incompatible> { let object = match self { mt::ShareGrantObject::Database(db_id) => { Some(pb::share_grant_object::Object::DbId(*db_id)) } mt::ShareGrantObject::Table(table_id) => { Some(pb::share_grant_object::Object::TableId(*table_id)) } }; let p = pb::ShareGrantObject { ver: VER, min_reader_ver: MIN_READER_VER, object, }; Ok(p) } } impl FromToProto for mt::ShareGrantEntry { type PB = pb::ShareGrantEntry; fn get_pb_ver(p: &Self::PB) -> u64 { p.ver } fn from_pb(p: pb::ShareGrantEntry) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.ver, p.min_reader_ver)?; let privileges = BitFlags::<mt::ShareGrantObjectPrivilege, u64>::from_bits(p.privileges); match privileges { Ok(privileges) => Ok(mt::ShareGrantEntry { object: mt::ShareGrantObject::from_pb(p.object.ok_or_else(|| Incompatible { reason: "ShareGrantEntry.object can not be None".to_string(), })?)?, privileges, grant_on: DateTime::<Utc>::from_pb(p.grant_on)?, update_on: match p.update_on { Some(t) => Some(DateTime::<Utc>::from_pb(t)?), None => None, }, }), Err(e) => Err(Incompatible { reason: format!("UserPrivilegeType error: {}", e), }), } } fn to_pb(&self) -> Result<pb::ShareGrantEntry, Incompatible> { Ok(pb::ShareGrantEntry { ver: VER, min_reader_ver: MIN_READER_VER, object: Some(self.object().to_pb()?), privileges: self.privileges().bits(), grant_on: self.grant_on.to_pb()?, update_on: match &self.update_on { Some(t) => Some(t.to_pb()?), None => None, }, }) } } impl FromToProto for mt::ShareMeta { type PB = pb::ShareMeta; fn get_pb_ver(p: &Self::PB) -> u64 { p.ver } fn from_pb(p: pb::ShareMeta) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.ver, p.min_reader_ver)?; let mut entries = BTreeMap::new(); for entry in p.entries { let entry = mt::ShareGrantEntry::from_pb(entry)?; entries.insert(entry.to_string(), entry.clone()); } Ok(mt::ShareMeta { database: match p.database { Some(db) => Some(mt::ShareGrantEntry::from_pb(db)?), None => None, }, entries, comment: p.comment.clone(), accounts: BTreeSet::from_iter(p.accounts.clone().into_iter()), share_from_db_ids: BTreeSet::from_iter(p.share_from_db_ids.clone().into_iter()), share_on: DateTime::<Utc>::from_pb(p.share_on)?, update_on: match p.update_on { Some(t) => Some(DateTime::<Utc>::from_pb(t)?), None => None, }, }) } fn to_pb(&self) -> Result<pb::ShareMeta, Incompatible> { let mut entries = Vec::new(); for entry in self.entries.iter() { entries.push(entry.1.to_pb()?); } Ok(pb::ShareMeta { ver: VER, min_reader_ver: MIN_READER_VER, database: match &self.database { Some(db) => Some(mt::ShareGrantEntry::to_pb(db)?), None => None, }, entries, accounts: Vec::from_iter(self.accounts.clone().into_iter()), share_from_db_ids: Vec::from_iter(self.share_from_db_ids.clone().into_iter()), comment: self.comment.clone(), share_on: self.share_on.to_pb()?, update_on: match &self.update_on { Some(t) => Some(t.to_pb()?), None => None, }, }) } } impl FromToProto for mt::ShareAccountMeta { type PB = pb::ShareAccountMeta; fn get_pb_ver(p: &Self::PB) -> u64 { p.ver } fn from_pb(p: pb::ShareAccountMeta) -> Result<Self, Incompatible> where Self: Sized { reader_check_msg(p.ver, p.min_reader_ver)?; Ok(mt::ShareAccountMeta { account: p.account.clone(), share_id: p.share_id, share_on: DateTime::<Utc>::from_pb(p.share_on)?, accept_on: match p.accept_on { Some(t) => Some(DateTime::<Utc>::from_pb(t)?), None => None, }, }) } fn to_pb(&self) -> Result<pb::ShareAccountMeta, Incompatible> { Ok(pb::ShareAccountMeta { ver: VER, min_reader_ver: MIN_READER_VER, account: self.account.clone(), share_id: self.share_id, share_on: self.share_on.to_pb()?, accept_on: match &self.accept_on { Some(t) => Some(t.to_pb()?), None => None, }, }) } }
use curve25519_dalek::scalar::Scalar; use rand::{CryptoRng, RngCore}; use rand::prelude::ThreadRng; use crate::toolbox::secrets; use crate::toolbox::util::random_scalar; use log::{trace, debug, info}; /// A SecretSharing implementation that is based on the all-shares XOR method. In particular, you must have every outstanding share /// of a secret in order to reconstruct that secret. Because it is the simplest method, it has no interesting parameters. pub struct Xor<R: CryptoRng + RngCore> { rng: R, } impl<R> Xor<R> where R: RngCore + CryptoRng { /// Create a new Xor object with the provided RNG implementation. pub fn new(rng: R) -> Self { Xor { rng } } } impl Xor<ThreadRng> { /// Create a new Xor object with a default RNG. pub fn new_without_rng() -> Self { Xor { rng: rand::thread_rng() } } } impl<R> secrets::SecretSharing for Xor<R> where R: RngCore + CryptoRng { fn share(&mut self, secret: &Scalar, nr_of_shares: usize) -> Result<Vec<Scalar>, String> { info!("Sharing {} pieces of {:?}", nr_of_shares, secret); let mut shares: Vec<Scalar> = Vec::new(); // generate n - 1 random shares; the zero-th share is the secret for _ in 1..nr_of_shares { shares.push(random_scalar(&mut self.rng)); } // calculate the final share as a function of the random shares and the secret let val = xor_many_scalars(secret, shares.iter()); shares.push(val); return Ok(shares); } /// For Xor, sparse_shares should include a None value for each share you want us to generate fn complete(&mut self, secret: &Scalar, sparse_shares: &Vec<Option<Scalar>>) -> Result<Vec<Scalar>, String> { let mut empties = 0; let mut shares: Vec<Scalar> = Vec::new(); let mut new_shares: Vec<Scalar> = Vec::new(); for share in sparse_shares.iter() { if share.is_some() { shares.push(share.unwrap()); } else { if empties != 0 { // the "zero-th" empty will be the one we have to XOR-calculate trace!("Pushing a random Scalar to extra empty"); new_shares.push(random_scalar(&mut self.rng)); } empties += 1; } } if empties < 1 { return Err(String::from("No empty shares in which to complete!")); } info!("Completing {} empties with {} known and secret = {:?}", empties, shares.len(), secret); let intermediate = xor_many_scalars(secret, shares.iter()); let val = xor_many_scalars(&intermediate, new_shares.iter()); debug!("The XOR'd share is {:?}", val); new_shares.push(val); let ret_shares: Vec<Scalar> = sparse_shares.iter().map(|x| { if x.is_some() { x.unwrap() } else { // it's OK to panic if we get a None here, since that should *never* happen new_shares.pop().unwrap() } }).collect(); return Ok(ret_shares); } fn reconstruct(&mut self, shares: &Vec<Scalar>) -> Result<Scalar, String> { if shares.len() < 1 { return Err(String::from("No shares provided, impossible to reconstruct!")); } let secret = xor_many_scalars(&Scalar::zero(), shares.iter()); if secret.is_canonical() { debug!("Used {} shares to reconstruct {:?}", shares.len(), secret); return Ok(secret); } else { debug!("Value from XOR isn't within the group! {:?}", secret); return Err(String::from("The reconstructed secret is outside the group")); } } } /// Given a list of scalars, XOR them all together. We split out the `first` separately for efficiency's sake; /// there are use cases where the initial value you want to XOR with isn't in your Vector, so you don't need to /// push it. /// /// The resulting value is **not** guaranteed to be modulo group order. If this is important for your particular use /// case, you'll need to do those checks after calling this function. /// /// TODO is there a special way we need to write this function, to achieve some sort of SIMD benefit? fn xor_many_scalars<'a, T>(first: &Scalar, others: T) -> Scalar where T: Iterator<Item=&'a Scalar>, { let mut the_bytes = first.clone().to_bytes(); for scalar in others { let scal_bytes = scalar.to_bytes(); for i in 0..the_bytes.len() { the_bytes[i] ^= scal_bytes[i]; } } Scalar::from_bits(the_bytes) } #[allow(unused_imports)] mod tests { use super::*; use crate::toolbox::secrets::SecretSharing; use curve25519_dalek::constants::BASEPOINT_ORDER; #[test] fn xor_many_easy() { let zeroes = vec![Scalar::zero(); 5]; let res1 = xor_many_scalars(&Scalar::zero(), zeroes.iter()); assert_eq!(Scalar::zero(), res1); let res2 = xor_many_scalars(&Scalar::one(), zeroes.iter()); assert_eq!(Scalar::one(), res2); } #[test] fn xor_many_medium() { let scalars = vec![Scalar::from(8675309u32), Scalar::from(5551212u32), Scalar::from(4561414u32)]; let res1 = xor_many_scalars(&Scalar::zero(), scalars.iter()); assert_eq!(Scalar::from(9793927u32), res1); let res2 = xor_many_scalars(&Scalar::from(9743901u32), scalars.iter()); assert_eq!(Scalar::from(122778u32), res2); } #[test] fn xor_share() { let mut xor = Xor::new_without_rng(); let shares = xor.share(&Scalar::one(), 3); assert!(shares.is_ok()); let new_shares = shares.unwrap().clone(); info!("Shares: {:?}", new_shares); match xor.reconstruct(&new_shares) { Ok(val) => assert_eq!(Scalar::one(), val), Err(e) => assert!(false, format!("Error reconstructing secret: {}", e)), } } #[test] fn xor_reconstruct() { let mut xor = Xor::new_without_rng(); let shares = vec![Scalar::from(8675309u32), Scalar::from(5551212u32), Scalar::from(4561414u32)]; match xor.reconstruct(&shares) { Ok(val) => assert_eq!(Scalar::from(9793927u32), val), Err(e) => assert!(false, format!("Error reconstructing secret: {}", e)), } } #[test] fn xor_complete_easy() { let mut xor = Xor::new_without_rng(); let sparse_shares = vec![Some(Scalar::from(123456789u32)), None]; match xor.complete(&Scalar::one(), &sparse_shares) { Ok(shares) => { assert_eq!(Scalar::from(123456789u32), shares[0]); assert_eq!(Scalar::from(123456788u32), shares[1]); }, Err(e) => assert!(false, format!("Error completing shares: {}", e)) } } #[test] fn xor_complete_medium() { let mut xor = Xor::new_without_rng(); let sparse_shares = vec![Some(Scalar::from(8675309u32)), Some(Scalar::from(5551212u32)), Some(Scalar::from(4561414u32)), None]; match xor.complete(&Scalar::from(122778u32), &sparse_shares) { Ok(shares) => { assert_eq!(Scalar::from(8675309u32), shares[0]); assert_eq!(Scalar::from(5551212u32), shares[1]); assert_eq!(Scalar::from(4561414u32), shares[2]); assert_eq!(Scalar::from(9743901u32), shares[3]); }, Err(e) => assert!(false, format!("Error completing shares: {}", e)), } } #[test] fn xor_complete_hard() { let mut xor = Xor::new_without_rng(); let sparse_shares = vec![None, None, None, None, None, Some(Scalar::from(711117u32)), None, None, None, None]; match xor.complete(&Scalar::one(), &sparse_shares) { Ok(shares) => { assert_eq!(sparse_shares.len(), shares.len()); let mut found = false; for s in shares.iter() { // make sure our one original share is in there somewhere if sparse_shares[5].unwrap() == *s { found = true; } } assert!(found, "Didn't find original share in the completed list"); let val = xor.reconstruct(&shares); assert!(val.is_ok()); assert_eq!(Scalar::one(), val.unwrap()); }, Err(e) => assert!(false, format!("Unable to complete: {}", e)), } } #[test] fn xor_reconstruct_out_of_group() { // 0x10 = 0000 1010 largest byte of \ell // 0x5 = 0000 0101 // XOR = 0000 1111 let share1 = Scalar::from_canonical_bytes([12, 227, 105, 171, 173, 162, 96, 141, 241, 244, 32, 246, 255, 9, 210, 32, 110, 245, 179, 133, 8, 34, 83, 32, 220, 162, 102, 9, 189, 38, 231, 5]).unwrap(); let share2 = BASEPOINT_ORDER; let share3 = Scalar::from(74u8); let mut xor = Xor::new_without_rng(); let shares = vec![share1, share2, share3]; assert!(xor.reconstruct(&shares).is_err(), "Shouldn't have been able to reconstruct!"); } }
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. pub fn serialize_operation_cancel_cluster( input: &crate::input::CancelClusterInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_cancel_cluster_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_cancel_job( input: &crate::input::CancelJobInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_cancel_job_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_create_address( input: &crate::input::CreateAddressInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_create_address_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_create_cluster( input: &crate::input::CreateClusterInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_create_cluster_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_create_job( input: &crate::input::CreateJobInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_create_job_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_create_long_term_pricing( input: &crate::input::CreateLongTermPricingInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_create_long_term_pricing_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_create_return_shipping_label( input: &crate::input::CreateReturnShippingLabelInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_create_return_shipping_label_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_describe_address( input: &crate::input::DescribeAddressInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_describe_address_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_describe_addresses( input: &crate::input::DescribeAddressesInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_describe_addresses_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_describe_cluster( input: &crate::input::DescribeClusterInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_describe_cluster_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_describe_job( input: &crate::input::DescribeJobInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_describe_job_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_describe_return_shipping_label( input: &crate::input::DescribeReturnShippingLabelInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_describe_return_shipping_label_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_get_job_manifest( input: &crate::input::GetJobManifestInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_get_job_manifest_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_get_job_unlock_code( input: &crate::input::GetJobUnlockCodeInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_get_job_unlock_code_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_get_snowball_usage( _input: &crate::input::GetSnowballUsageInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { Ok(smithy_http::body::SdkBody::from("{}")) } pub fn serialize_operation_get_software_updates( input: &crate::input::GetSoftwareUpdatesInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_get_software_updates_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_list_cluster_jobs( input: &crate::input::ListClusterJobsInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_list_cluster_jobs_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_list_clusters( input: &crate::input::ListClustersInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_list_clusters_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_list_compatible_images( input: &crate::input::ListCompatibleImagesInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_list_compatible_images_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_list_jobs( input: &crate::input::ListJobsInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_list_jobs_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_list_long_term_pricing( input: &crate::input::ListLongTermPricingInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_list_long_term_pricing_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_update_cluster( input: &crate::input::UpdateClusterInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_update_cluster_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_update_job( input: &crate::input::UpdateJobInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_update_job_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_update_job_shipment_state( input: &crate::input::UpdateJobShipmentStateInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_update_job_shipment_state_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) } pub fn serialize_operation_update_long_term_pricing( input: &crate::input::UpdateLongTermPricingInput, ) -> Result<smithy_http::body::SdkBody, smithy_types::Error> { let mut out = String::new(); let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out); crate::json_ser::serialize_structure_update_long_term_pricing_input(&mut object, input); object.finish(); Ok(smithy_http::body::SdkBody::from(out)) }
use crate::{NodeId, PortIndex}; pub struct Connection { pub output_id: NodeId, pub output_port: PortIndex, pub input_id: NodeId, pub input_port: PortIndex, } impl Connection { pub fn new( output_id: NodeId, output_port: PortIndex, input_id: NodeId, input_port: PortIndex, ) -> Connection { Connection { output_id, output_port, input_id, input_port, } } }
use std::ffi::{OsStr, OsString}; use test_pwstr::Windows::Win32::Foundation::PWSTR; use windows::core::*; #[test] fn test() { unsafe { let expected = "hello\0"; let len = expected.chars().count(); let p = get_pwstr_from("hello"); assert_eq!(from_parts(p.abi().0, len), expected); let p = get_pwstr_from(String::from("hello")); assert_eq!(from_parts(p.abi().0, len), expected); let p = get_pwstr_from(OsStr::new("hello")); assert_eq!(from_parts(p.abi().0, len), expected); let p = get_pwstr_from(OsString::from("hello")); assert_eq!(from_parts(p.abi().0, len), expected); } } fn get_pwstr_from<'a>(t: impl IntoParam<'a, PWSTR>) -> Param<'a, PWSTR> { t.into_param() } fn from_parts(p: *const u16, len: usize) -> String { let buf = unsafe { core::slice::from_raw_parts(p, len) }; String::from_utf16_lossy(buf) }
use backend::x86::X86Platform; use backend::{regalloc, Platform}; use intermediate; use minijava; use std::fs::{DirBuilder, File}; use std::io::prelude::*; use std::process::Command; fn test_translate_file(dir: &str, file_name: &str) { let mut file = File::open(format!("{}/{}.java", dir, file_name)).unwrap(); let mut source = String::new(); file.read_to_string(&mut source).unwrap(); let ast = minijava::parser::parse_prg(&source).expect("Parse error"); let symbol_table = minijava::symbols::SymbolTable::new(&ast).unwrap(); minijava::typecheck::verify_prg(&symbol_table, &ast).unwrap(); let translated = intermediate::Translator::<X86Platform>::new(&symbol_table).process(&ast); let canonized = intermediate::Canonizer::new().process(translated); let traced = intermediate::Tracer::new().process(canonized); let mut code = X86Platform::code_gen(&traced); regalloc::alloc::<X86Platform>(&mut code); let path = &format!("/tmp/{}", file_name); DirBuilder::new() .recursive(true) .create(path) .expect("Cannot create test directory"); let test_s = format!("{}/{}.s", path, file_name); let mut code_file = File::create(&test_s).unwrap(); let _ = write!(code_file, "{}", code); let sim = Command::new("symb386") .arg(&test_s) .output() .expect("failed to execute process"); let _cp = Command::new("cp") .arg(format!("{}/{}.java", dir, file_name)) .arg(format!("{}/{}.java", path, file_name)) .output() .expect("failed to execute cp"); let _javac = Command::new("javac") .current_dir(path) .arg(format!("{}.java", file_name)) .output() .expect("failed to execute javac"); let java = Command::new("java") .current_dir(path) .arg(file_name) .output() .expect("failed to execute java"); assert!(!java.status.success() || sim.status.success()); assert!(!sim.status.success() || java.status.success()); if java.status.success() { assert_eq!( String::from_utf8_lossy(&java.stdout), String::from_utf8_lossy(&sim.stdout) ) } } static TEST_DIR: &'static str = "test"; macro_rules! minijava_tests { ($($name:ident,)*) => { $( #[test] #[allow(non_snake_case)] fn $name() { test_translate_file(TEST_DIR, stringify!($name)) } )* } } minijava_tests! { ArrayAccess, ArrayBounds, BinarySearch, BinaryTree, Binding, // Brainfuck, BubbleSort, Div, Effects, // E, EulerTest, Factorial, FactorialMem, // FibInteger, Fib, FibL, // GameOfLife, Graph, // Hanoi, LinearSearch, LinkedList, // Mandelbrot, ManyArgs, Newton, Primes, QuickSort, ShortCutAnd, Stck, Sum, TestEq, TrivialClass, While, }
use crate::{ errors::*, fastfile::{BackingReader, FastFileReader, FastFileReaderBuilder}, os, strategy::ReaderStrategy, }; use failure::Fail; use std::{fs::File, os::unix::io::AsRawFd}; pub struct DefaultMacOsReaderStrategy {} impl ReaderStrategy for DefaultMacOsReaderStrategy { fn get_reader(&self, ffrb: FastFileReaderBuilder) -> Result<FastFileReader> { let size = get_file_size(&ffrb)?; let file = ffrb.file; let inner = create_backing_reader(file, size)?; Ok(FastFileReader::new(inner, size)) } } fn get_file_size(ffrb: &FastFileReaderBuilder) -> Result<usize> { let size = if let Some(size) = ffrb.size { size } else if let Some(size_hint) = ffrb.size_hint { size_hint } else { let file = &ffrb.file; let meta = file.metadata().map_err(|e| e.context(ErrorKind::FileOpFailed))?; meta.len() as usize }; Ok(size) } fn create_backing_reader(file: File, file_size: usize) -> Result<BackingReader> { prepare_file_for_reading(&file, file_size)?; BackingReader::file(file) } #[allow(clippy::collapsible_if)] fn prepare_file_for_reading<T: AsRawFd>(fd: &T, file_size: usize) -> Result<()> { if file_size >= 8 * 1024 { let fd = fd.as_raw_fd(); if file_size <= 268_435_456 { os::read_ahead(fd)?; } else { os::read_advise(fd, file_size)?; } } Ok(()) }
import std::smallintmap; import std::option; import syntax::ast::*; import syntax::visit; import visit::vt; tag ast_node { node_item(@item); node_obj_ctor(@item); node_native_item(@native_item); node_expr(@expr); } type map = std::map::hashmap[node_id, ast_node]; fn map_crate(c: &crate) -> map { // FIXME: This is using an adapter to convert the smallintmap // interface to the hashmap interface. It would be better to just // convert everything to use the smallintmap. let map = new_smallintmap_int_adapter[ast_node](); let v_map = @{visit_item: bind map_item(map, _, _, _), visit_native_item: bind map_native_item(map, _, _, _), visit_expr: bind map_expr(map, _, _, _) with *visit::default_visitor[()]()}; visit::visit_crate(c, (), visit::mk_vt(v_map)); ret map; } fn map_item(map: &map, i: &@item, e: &(), v: &vt[()]) { map.insert(i.id, node_item(i)); alt i.node { item_obj(_, _, ctor_id) { map.insert(ctor_id, node_obj_ctor(i)); } _ { } } visit::visit_item(i, e, v); } fn map_native_item(map: &map, i: &@native_item, e: &(), v: &vt[()]) { map.insert(i.id, node_native_item(i)); visit::visit_native_item(i, e, v); } fn map_expr(map: &map, ex: &@expr, e: &(), v: &vt[()]) { map.insert(ex.id, node_expr(ex)); visit::visit_expr(ex, e, v); } fn new_smallintmap_int_adapter[@V]() -> std::map::hashmap[int, V] { let key_idx = fn (key: &int) -> uint { key as uint }; let idx_key = fn (idx: &uint) -> int { idx as int }; ret new_smallintmap_adapter(key_idx, idx_key); } // This creates an object with the hashmap interface backed // by the smallintmap type, because I don't want to go through // the entire codebase adapting all the callsites to the different // interface. // FIXME: hashmap and smallintmap should support the same interface. fn new_smallintmap_adapter[@K, @V](key_idx: fn(&K) -> uint , idx_key: fn(&uint) -> K ) -> std::map::hashmap[K, V] { obj adapter[@K, @V](map: smallintmap::smallintmap[V], key_idx: fn(&K) -> uint , idx_key: fn(&uint) -> K ) { fn size() -> uint { fail } fn insert(key: &K, value: &V) -> bool { let exists = smallintmap::contains_key(map, key_idx(key)); smallintmap::insert(map, key_idx(key), value); ret !exists; } fn contains_key(key: &K) -> bool { ret smallintmap::contains_key(map, key_idx(key)); } fn get(key: &K) -> V { ret smallintmap::get(map, key_idx(key)); } fn find(key: &K) -> option::t[V] { ret smallintmap::find(map, key_idx(key)); } fn remove(key: &K) -> option::t[V] { fail } fn rehash() { fail } iter items() -> @{key: K, val: V} { let idx = 0u; for item: option::t[V] in map.v { alt item { option::some(elt) { let value = elt; let key = idx_key(idx); put @{key: key, val: value}; } option::none. { } } idx += 1u; } } iter keys() -> K { for each p: @{key: K, val: V} in self.items() { put p.key; } } } let map = smallintmap::mk[V](); ret adapter(map, key_idx, idx_key); } // Local Variables: // mode: rust // fill-column: 78; // indent-tabs-mode: nil // c-basic-offset: 4 // buffer-file-coding-system: utf-8-unix // compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'"; // End:
#[macro_use] extern crate log; extern crate env_logger; extern crate libssh; use libssh::constants::{SSHRequest,SSHAuthMethod}; use libssh::native::libssh::{SSH_LOG_NOLOG}; use libssh::ssh_bind::SSHBind; use libssh::ssh_session::{SSHSession,SSHMessage}; const HOST: &'static str = "127.0.0.1"; const SSH_LOG_LEVEL: i32 = SSH_LOG_NOLOG; fn unhandled_preauth_req(msg: &SSHMessage) -> bool { let msg_type = msg.get_type(); let msg_subtype = msg.get_subtype(); println!("Message before auth: {:?}, {}", msg_type, msg_subtype); msg_type != SSHRequest::Auth || msg_subtype != SSHAuthMethod::Password as i32 } fn ans_pass_auth(msg: &SSHMessage) { msg.auth_set_methods(&[SSHAuthMethod::Password]).ok(); msg.reply_default().ok(); } fn handle_session(session: &SSHSession) { loop { match session.get_message() { Ok(msg) => { if unhandled_preauth_req(&msg) { ans_pass_auth(&msg); } else { let user = msg.get_auth_user().unwrap(); let pass = msg.get_auth_password().unwrap(); println!("Authenticated user: {}", user); println!("Password: {}", pass); ans_pass_auth(&msg); } }, Err(err_msg) => { println!("End of session: {}", err_msg); break; } } } session.disconnect(); } fn server() { let bind = SSHBind::new("./keys/id_rsa", Some(HOST), Some("2222")).unwrap(); bind.set_log_level(SSH_LOG_LEVEL).unwrap(); bind.listen().unwrap(); println!("server: listening"); loop { match bind.get_session() { Ok(session) => handle_session(&session), Err(err) => println!("Error while opening session {}", err) } } } fn main() { env_logger::init().unwrap(); libssh::with_ssh(|| { server(); }) }
#[doc = "Reader of register DTR2"] pub type R = crate::R<u32, super::DTR2>; #[doc = "Writer for register DTR2"] pub type W = crate::W<u32, super::DTR2>; #[doc = "Register DTR2 `reset()`'s with value 0"] impl crate::ResetValue for super::DTR2 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DTGF`"] pub type DTGF_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DTGF`"] pub struct DTGF_W<'a> { w: &'a mut W, } impl<'a> DTGF_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Reader of field `DTAE`"] pub type DTAE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DTAE`"] pub struct DTAE_W<'a> { w: &'a mut W, } impl<'a> DTAE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `DTPE`"] pub type DTPE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DTPE`"] pub struct DTPE_W<'a> { w: &'a mut W, } impl<'a> DTPE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } impl R { #[doc = "Bits 0:7 - Dead-time generator setup"] #[inline(always)] pub fn dtgf(&self) -> DTGF_R { DTGF_R::new((self.bits & 0xff) as u8) } #[doc = "Bit 16 - Deadtime Asymmetric Enable"] #[inline(always)] pub fn dtae(&self) -> DTAE_R { DTAE_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - Deadtime Preload Enable"] #[inline(always)] pub fn dtpe(&self) -> DTPE_R { DTPE_R::new(((self.bits >> 17) & 0x01) != 0) } } impl W { #[doc = "Bits 0:7 - Dead-time generator setup"] #[inline(always)] pub fn dtgf(&mut self) -> DTGF_W { DTGF_W { w: self } } #[doc = "Bit 16 - Deadtime Asymmetric Enable"] #[inline(always)] pub fn dtae(&mut self) -> DTAE_W { DTAE_W { w: self } } #[doc = "Bit 17 - Deadtime Preload Enable"] #[inline(always)] pub fn dtpe(&mut self) -> DTPE_W { DTPE_W { w: self } } }
use mrusty::*; use protocol::{WorldState, Entity}; const threshold : f64 = 0.1; mrusty_class!(WorldState, "WorldState", { def!("time", |mruby, slf: (&WorldState)| { mruby.fixnum(slf.time as i32) }); def!("entities", |mruby, slf: (&WorldState)| { let entities = slf.entities.iter() .map(|e| mruby.obj(e.clone())) .collect(); mruby.array(entities) }); }); mrusty_class!(Entity, "Entity", { def!("id", |mruby, slf: (&Entity)| { mruby.fixnum(slf.id as i32) }); def!("x", |mruby, slf: (&Entity)| { mruby.float(slf.x) }); def!("y", |mruby, slf: (&Entity)| { mruby.float(slf.y) }); def!("radius", |mruby, slf: (&Entity)| { mruby.float(slf.radius) }); def!("creature?", |mruby, slf: (&Entity)| { mruby.bool(slf.entity_type == 0) }); def!("resource?", |mruby, slf: (&Entity)| { mruby.bool(slf.entity_type == 1) }); def!("spike?", |mruby, slf: (&Entity)| { mruby.bool(slf.entity_type == 2) }); def!("distance", |mruby, slf: (&Entity), other: (&Entity)| { let dx = slf.x - other.x; let dy = slf.y - other.y; return mruby.float(dx * dx + dy * dy); }); def!("<", |mruby, slf: (&Entity), other: (&Entity)| { return mruby.bool(other.radius - slf.radius > threshold); }); def!(">", |mruby, slf: (&Entity), other: (&Entity)| { return mruby.bool(slf.radius - other.radius > threshold); }); });
#[allow(dead_code)] use anyhow::{Context, Result}; use clap::Parser; /// Search for a pattern in a file and display the lines that contain it. #[derive(Parser)] struct Cli { /// The pattern to look for pattern: String, /// The path to the file to read #[clap(parse(from_os_str))] path: std::path::PathBuf, } fn main() -> Result<()> { let args = Cli::parse(); let content = std::fs::read_to_string(&args.path).expect("Couldn't read this file"); diru_grrs::find_matches(&content, &args.pattern, std::io::stdout()).with_context(|| "error finding matches")?; Ok(()) } #[test] fn find_a_match() { let mut writer = Vec::new(); let result = diru_grrs::find_matches("lorem ipsum\ndolor sit amet", "lorem", &mut writer).with_context(|| "error writing matches"); match result { Ok(_) => assert_eq!(writer, b"lorem ipsum\n"), Err(err) => assert_eq!("error writing matches", err.to_string()), } }
// Copyright (c) 2017 FaultyRAM // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT // or http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. //! A helper library for Google Code Jam solutions. //! //! In the Google Code Jam, solving a problem typically requires the following steps: //! //! 1. Open an input file containing a series of test cases. //! 2. Open an output file where solutions will be written. //! 2. Read the first line from the input file, which consists solely of an unsigned integer //! specifying the number of test cases in the input file. //! 3. For each test case, perform the following steps: //! 1. Obtain the corresponding test data by reading one or more lines from the input file (it //! may be a fixed number, or specified within the test data itself). //! 2. Perform some logic using the test data, in order to obtain a set of results. //! 3. Write the string `"Case #N:"` (where `N` is the number of completed test cases) followed //! by the results obtained in the previous step, formatted as the problem requires. //! //! Writing code to handle all of the above is tedious and time-consuming, in a situation where //! every microsecond counts. `gcj-helper` is designed to handle the boilerplate, so you can focus //! on writing solutions instead. //! //! # The `TestEngine` type //! //! To execute test cases, you need to create a `TestEngine` and call `TestEngine::run()`. //! `TestEngine::run()` accepts two closures: //! //! 1. A *parser* that reads from an input file and returns the data for one test case. //! 2. A *solver* that performs logic on the data for one test case and returns a result, encoded //! as a `Display` type. //! //! This two-step process to writing solutions is useful for two reasons: //! //! * It separates parsing from the solution itself, making your code easier to read; //! * It enables test case parallelisation if the `parallel` feature is enabled, improving //! run-time performance at the cost of increased build times. //! //! # The `InputReader` type //! //! `gcj-helper` provides parsers with access to an `InputReader`, which obtains data from the //! input file in a `io::Read`-like fashion. The `InputReader::read_next_line()` method reads a //! line of text from the input file, consuming the end-of-line marker, and returns a `&str` //! containing the result. //! //! # Formatting test results //! //! Before each test case, the `TestEngine` writes the string `"Case #N:"`, where `N` is the //! current test case. This does not prepend or append any whitespace. This means that if the //! colon must be followed by a space, your result should begin with one, and that the result must //! end with a newline. #![cfg_attr(feature = "clippy", feature(plugin))] #![cfg_attr(feature = "clippy", plugin(clippy))] #![cfg_attr(feature = "clippy", forbid(clippy))] #![cfg_attr(feature = "clippy", forbid(clippy_internal))] #![cfg_attr(feature = "clippy", forbid(clippy_pedantic))] #![forbid(warnings)] #![forbid(box_pointers)] #![forbid(fat_ptr_transmutes)] #![forbid(missing_copy_implementations)] #![forbid(missing_debug_implementations)] #![forbid(missing_docs)] #![forbid(trivial_casts)] #![forbid(trivial_numeric_casts)] #![forbid(unsafe_code)] #![forbid(unused_extern_crates)] #![forbid(unused_import_braces)] #![deny(unused_qualifications)] #![forbid(unused_results)] #![forbid(variant_size_differences)] #[cfg(feature = "parallel")] extern crate rayon; #[cfg(feature = "parallel")] use rayon::prelude::*; use std::{env, io}; use std::ffi::OsString; use std::fmt::{Arguments, Display}; use std::fs::{File, OpenOptions}; use std::io::{LineWriter, Read, Write}; use std::path::Path; /// Facilitates the execution of problem solving code. #[derive(Debug)] pub struct TestEngine<I: AsRef<Path>, O: AsRef<Path>> { /// A path to an input file. input_file_path: I, /// A path to an output file. output_file_path: O, } /// Supports reading from an input file. #[derive(Debug)] pub struct InputReader { /// A string representing the input file. input: String, /// The current position within the input file. offset: usize, } /// Supports writing to an output file. struct OutputWriter(LineWriter<File>); impl<I: AsRef<Path>, O: AsRef<Path>> TestEngine<I, O> { /// Creates a new test engine using the specified input and output file paths. /// /// Calling this method is cheap; no files are opened until `TestEngine::run()` is called. pub fn new(input_file_path: I, output_file_path: O) -> TestEngine<I, O> { TestEngine { input_file_path: input_file_path, output_file_path: output_file_path, } } #[cfg(not(feature = "parallel"))] /// Consumes the test engine, executing a parser and solver once per test case. /// /// # Panics /// /// This method panics in the event of an I/O error. pub fn run< D: Sized + Send + Sync, R: Display + Sized + Send, P: Fn(&mut InputReader) -> D, S: Fn(&D) -> R + Sync > ( self, p: P, s: S, ) { let mut reader = InputReader::new(self.input_file_path); let mut writer = OutputWriter::new(self.output_file_path); let mut current_case: usize = 1; let case_count = reader.get_case_count(); while current_case <= case_count { writer.write_test_result(current_case, (s)(&(p)(&mut reader))); current_case += 1; } } /// Consumes the test engine, executing a parser and solver once per test case. /// /// # Panics /// /// This method panics in the event of an I/O error. #[cfg(feature = "parallel")] pub fn run< D: Sized + Send + Sync, R: Display + Sized + Send, P: Fn(&mut InputReader) -> D, S: Fn(&D) -> R + Sync > ( self, p: P, s: S, ) { let mut reader = InputReader::new(self.input_file_path); let mut writer = OutputWriter::new(self.output_file_path); let case_count = reader.get_case_count(); let mut data = Vec::with_capacity(0); data.reserve_exact(case_count); for _ in 0..case_count { data.push((p(&mut reader), None)); } data.par_iter_mut().for_each(|d| d.1 = Some(s(&d.0))); for (i, &(_, ref r)) in data.iter().enumerate() { writer.write_test_result( i + 1, match *r { Some(ref x) => x, None => unreachable!(), }, ); } } } impl TestEngine<OsString, OsString> { /// Creates a new test engine using input and output file paths obtained from command line /// arguments. /// /// Calling this method is cheap; no files are opened until `TestEngine::run()` is called. /// /// # Panics /// /// This method panics if either the input file path or output file path is missing. pub fn from_args() -> TestEngine<OsString, OsString> { let mut args = env::args_os(); let input_file_path = args.nth(1).expect("input file path not specified"); let output_file_path = args.next().expect("output file path not specified"); Self::new(input_file_path, output_file_path) } } impl Default for TestEngine<OsString, OsString> { fn default() -> TestEngine<OsString, OsString> { Self::from_args() } } impl InputReader { /// Reads a line of text from the input file, consuming the end-of-line marker if one is /// present. pub fn read_next_line(&mut self) -> &str { if self.offset >= self.input.len() { panic!("could not read line from input file: reached end of file"); } let start = self.offset; let end = self.input .char_indices() .skip(start) .take_while(|&(_, c)| c != '\n') .last() .map_or(start, |(i, _)| i + 1); let s = &self.input[start..end]; if s.is_empty() { self.offset = end + 2; } else { self.offset = end + 1; } s } /// Creates a new input reader over the given input file. fn new<P: AsRef<Path>>(path: P) -> InputReader { let mut file = OpenOptions::new() .read(true) .open(path) .expect("could not open input file for reading"); let mut s = String::with_capacity(0); let _ = file.read_to_string(&mut s) .expect("could not read input file into string"); InputReader { input: s, offset: 0, } } /// Reads the number of test cases from the input file. fn get_case_count(&mut self) -> usize { usize::from_str_radix(self.read_next_line(), 10).expect("could not parse test case count") } } impl OutputWriter { /// Creates a new output writer over the given output file. fn new<P: AsRef<Path>>(path: P) -> OutputWriter { OutputWriter( LineWriter::new( OpenOptions::new() .write(true) .truncate(true) .create(true) .open(path) .expect("could not open output file for writing"), ), ) } /// Writes a test result to the output file. fn write_test_result<R: Display>(&mut self, case: usize, result: R) { let case_prefix = "Case #"; let case_number = case.to_string(); let case_colon = ":"; let r = result.to_string(); let mut output = String::with_capacity(0); output.reserve_exact(case_prefix.len() + case_number.len() + case_colon.len() + r.len(),); output.push_str(case_prefix); output.push_str(&case_number); output.push_str(case_colon); output.push_str(&r); self.write_all(output.as_bytes()) .expect("could not write test result to output file"); } } impl Write for OutputWriter { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.write(buf) } fn flush(&mut self) -> io::Result<()> { self.0.flush() } fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { self.0.write_all(buf) } fn write_fmt(&mut self, fmt: Arguments) -> io::Result<()> { self.0.write_fmt(fmt) } }
#![allow(dead_code)] use anyhow::{anyhow, Error}; use std::fmt; use std::fs::File; use std::io::{BufRead, BufReader}; use std::option::NoneError; #[derive(Debug)] pub struct AOCError { pub e: Error, } pub type AOCResult<T> = Result<T, AOCError>; impl std::fmt::Display for AOCError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.e) } } // This type of implementation allows me to use '?' to // return early from an Optional that resolves to None in // a function that returns Result<T, E>. // // This special logic -- creating a new struct -- is required because // 1. NoneError does not implement std::error::Error and therefore can't // be given straight to an anyhow::Error // 2. I cannot implement std's Error for NoneError -- I've seen this referred // to as orphan implementation. I own neither type nor trait; it's illegal. // // impl From<NoneError> for AOCError { fn from(_: NoneError) -> Self { AOCError { e: anyhow!("Got None, expected Some(.)"), } } } // Oh gosh what a nightmare. I can't implement both a generic impl for // From<T: std::error::Error> AND From<NoneError> because the two seem // to overlap! This is bizarre because NoneError doesn't implement // std::error::Error -- if it did, specialization should fix it -- but // I think it's because NoneError quacks like a std::error::Error, which // confuses the type system. // // To get around this, I'm just manually implementing per error as I // encounter them. // // I hate this so much. macro_rules! aocerror { ($type:path) => { impl From<$type> for AOCError { fn from(e: $type) -> Self { AOCError { e: Error::from(e) } } } }; } aocerror!(std::num::ParseIntError); aocerror!(regex::Error); aocerror!(strum::ParseError); // // WHAT DO YOU MEAN MACROS HAVE DIFFERENT EXPORT RULES? // Because modules are secretly slurped together into one giant file at the // end of the day, and macros are metaprogramming widgets that are applied // during a preprocessing, it doesn't make sense to have them namespaced. // So we have the super ugly custom bail! macro to replace the fact that // I can't just use anyhow::bail!() #[macro_export] macro_rules! aocbail { ($($args:tt)*) => { return Err(AOCError { e: anyhow::anyhow!($($args)*), }); }; } #[macro_export] macro_rules! regex { ($regex:literal) => { regex::Regex::new($regex).unwrap() }; } pub fn get_input(filename: &str) -> Box<dyn Iterator<Item = String>> { Box::new( BufReader::new(File::open("input/".to_owned() + filename).unwrap()) .lines() .map(|l| l.unwrap()), ) }
use std::path::Path; use std::fs::File; use std::io::BufWriter; use png::HasParameters; #[derive(Clone, Debug)] pub struct Color { r: f64, g: f64, b: f64, a: f64 } impl Color { pub fn new(r: f64, g: f64, b: f64) -> Color { Color {r, g, b, a: 1.0} } pub fn black() -> Color { Color {r: 0.0, g: 0.0, b: 0.0, a: 1.0} } pub fn mul(&self, other: &Color) -> Color { Color::new(self.r * other.r, self.g * other.g, self.b * other.b) } pub fn scale(&self, scaler: f64) -> Color { Color::new(self.r * scaler, self.g * scaler, self.b * scaler) } pub fn add(&self, other: &Color) -> Color { Color::new(self.r + other.r, self.g + other.g, self.b + other.b) } pub fn is_green(&self) -> bool { self.g > self.r && self.g > self.b } } pub struct Image { img: Vec<Color>, pub w: usize, pub h: usize } impl Image { pub fn new_rgba(w: usize, h: usize) -> Image { let img = vec![Color::black(); w * h]; Image {img, w, h} } pub fn set_rgb(&mut self, i: usize, j: usize, c: &Color) { self.img[i * self.h + j] = c.clone(); } pub fn at(&self, i: usize, j: usize) -> Color { self.img[i * self.w + j].clone() } fn clip2byte(x: f64) -> u8 { if x > 255.0 { 255 } else { if x < 0.0 { 0 } else { x as u8 } } } pub fn flat(&self) -> Vec<u8> { let mut bytes = vec![]; for i in 0 .. self.h { for j in 0 .. self.w { let c = self.at(i, j); let ur = Image::clip2byte(c.r * 255.0); let ug = Image::clip2byte(c.g * 255.0); let ub = Image::clip2byte(c.b * 255.0); let ua = Image::clip2byte(c.a * 255.0); bytes.push(ur); bytes.push(ug); bytes.push(ub); bytes.push(ua); } } bytes } pub fn write_ppm(&self, file: String) { let path = Path::new(&file); let file = File::create(path).unwrap(); let ref mut w = BufWriter::new(file); let mut encoder = png::Encoder::new(w, self.h as u32, self.w as u32); encoder.set(png::ColorType::RGBA).set(png::BitDepth::Eight); let mut writer = encoder.write_header().unwrap(); writer.write_image_data(&self.flat()).unwrap(); } }
use crate::lexer::*; use crate::parsers::expression::argument::argument_with_parenthesis; use crate::parsers::expression::argument::argument_without_parenthesis; use crate::parsers::expression::block::block; use crate::parsers::expression::block::do_block; /// `super` ( [ no ⏎ ] [ no ⎵ ] *argument_with_parenthesis* )? *block*? pub(crate) fn super_with_optional_argument(i: Input) -> NodeResult { map( tuple((tag("super"), opt(argument_with_parenthesis), opt(block))), |_| Node::Placeholder, )(i) } /// `super` *argument_without_parenthesis* pub(crate) fn super_with_argument(i: Input) -> NodeResult { map(tuple((tag("super"), argument_without_parenthesis)), |_| { Node::Placeholder })(i) } /// `super` *argument_without_parenthesis* *do_block* pub(crate) fn super_with_argument_and_do_block(i: Input) -> NodeResult { map( tuple((tag("super"), argument_without_parenthesis, do_block)), |_| Node::Placeholder, )(i) }
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)] #[repr(transparent)] #[doc(hidden)] pub struct IPreviewBuildsManager(pub ::windows::core::IInspectable); unsafe impl ::windows::core::Interface for IPreviewBuildsManager { type Vtable = IPreviewBuildsManager_abi; const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xfa07dd61_7e4f_59f7_7c9f_def9051c5f62); } #[repr(C)] #[doc(hidden)] pub struct IPreviewBuildsManager_abi( pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32, pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut bool) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: bool) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, #[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, #[cfg(not(feature = "Foundation"))] usize, ); #[repr(transparent)] #[doc(hidden)] pub struct IPreviewBuildsManagerStatics(pub ::windows::core::IInspectable); unsafe impl ::windows::core::Interface for IPreviewBuildsManagerStatics { type Vtable = IPreviewBuildsManagerStatics_abi; const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x3e422887_b112_5a70_7da1_97d78d32aa29); } #[repr(C)] #[doc(hidden)] pub struct IPreviewBuildsManagerStatics_abi( pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32, pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut bool) -> ::windows::core::HRESULT, ); #[repr(transparent)] #[doc(hidden)] pub struct IPreviewBuildsState(pub ::windows::core::IInspectable); unsafe impl ::windows::core::Interface for IPreviewBuildsState { type Vtable = IPreviewBuildsState_abi; const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xa2f2903e_b223_5f63_7546_3e8eac070a2e); } #[repr(C)] #[doc(hidden)] pub struct IPreviewBuildsState_abi( pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32, pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT, #[cfg(feature = "Foundation_Collections")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT, #[cfg(not(feature = "Foundation_Collections"))] usize, ); #[repr(transparent)] #[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)] pub struct PreviewBuildsManager(pub ::windows::core::IInspectable); impl PreviewBuildsManager { pub fn ArePreviewBuildsAllowed(&self) -> ::windows::core::Result<bool> { let this = self; unsafe { let mut result__: bool = ::core::mem::zeroed(); (::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<bool>(result__) } } pub fn SetArePreviewBuildsAllowed(&self, value: bool) -> ::windows::core::Result<()> { let this = self; unsafe { (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), value).ok() } } pub fn GetCurrentState(&self) -> ::windows::core::Result<PreviewBuildsState> { let this = self; unsafe { let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed(); (::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), &mut result__).from_abi::<PreviewBuildsState>(result__) } } #[cfg(feature = "Foundation")] pub fn SyncAsync(&self) -> ::windows::core::Result<super::super::Foundation::IAsyncOperation<bool>> { let this = self; unsafe { let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed(); (::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::Foundation::IAsyncOperation<bool>>(result__) } } pub fn GetDefault() -> ::windows::core::Result<PreviewBuildsManager> { Self::IPreviewBuildsManagerStatics(|this| unsafe { let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed(); (::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<PreviewBuildsManager>(result__) }) } pub fn IsSupported() -> ::windows::core::Result<bool> { Self::IPreviewBuildsManagerStatics(|this| unsafe { let mut result__: bool = ::core::mem::zeroed(); (::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), &mut result__).from_abi::<bool>(result__) }) } pub fn IPreviewBuildsManagerStatics<R, F: FnOnce(&IPreviewBuildsManagerStatics) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> { static mut SHARED: ::windows::core::FactoryCache<PreviewBuildsManager, IPreviewBuildsManagerStatics> = ::windows::core::FactoryCache::new(); unsafe { SHARED.call(callback) } } } unsafe impl ::windows::core::RuntimeType for PreviewBuildsManager { const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.Management.Update.PreviewBuildsManager;{fa07dd61-7e4f-59f7-7c9f-def9051c5f62})"); } unsafe impl ::windows::core::Interface for PreviewBuildsManager { type Vtable = IPreviewBuildsManager_abi; const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xfa07dd61_7e4f_59f7_7c9f_def9051c5f62); } impl ::windows::core::RuntimeName for PreviewBuildsManager { const NAME: &'static str = "Windows.Management.Update.PreviewBuildsManager"; } impl ::core::convert::From<PreviewBuildsManager> for ::windows::core::IUnknown { fn from(value: PreviewBuildsManager) -> Self { value.0 .0 } } impl ::core::convert::From<&PreviewBuildsManager> for ::windows::core::IUnknown { fn from(value: &PreviewBuildsManager) -> Self { value.0 .0.clone() } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for PreviewBuildsManager { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> { ::windows::core::Param::Owned(self.0 .0) } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a PreviewBuildsManager { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> { ::windows::core::Param::Borrowed(&self.0 .0) } } impl ::core::convert::From<PreviewBuildsManager> for ::windows::core::IInspectable { fn from(value: PreviewBuildsManager) -> Self { value.0 } } impl ::core::convert::From<&PreviewBuildsManager> for ::windows::core::IInspectable { fn from(value: &PreviewBuildsManager) -> Self { value.0.clone() } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for PreviewBuildsManager { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> { ::windows::core::Param::Owned(self.0) } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a PreviewBuildsManager { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> { ::windows::core::Param::Borrowed(&self.0) } } unsafe impl ::core::marker::Send for PreviewBuildsManager {} unsafe impl ::core::marker::Sync for PreviewBuildsManager {} #[repr(transparent)] #[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)] pub struct PreviewBuildsState(pub ::windows::core::IInspectable); impl PreviewBuildsState { #[cfg(feature = "Foundation_Collections")] pub fn Properties(&self) -> ::windows::core::Result<super::super::Foundation::Collections::ValueSet> { let this = self; unsafe { let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed(); (::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::Foundation::Collections::ValueSet>(result__) } } } unsafe impl ::windows::core::RuntimeType for PreviewBuildsState { const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.Management.Update.PreviewBuildsState;{a2f2903e-b223-5f63-7546-3e8eac070a2e})"); } unsafe impl ::windows::core::Interface for PreviewBuildsState { type Vtable = IPreviewBuildsState_abi; const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xa2f2903e_b223_5f63_7546_3e8eac070a2e); } impl ::windows::core::RuntimeName for PreviewBuildsState { const NAME: &'static str = "Windows.Management.Update.PreviewBuildsState"; } impl ::core::convert::From<PreviewBuildsState> for ::windows::core::IUnknown { fn from(value: PreviewBuildsState) -> Self { value.0 .0 } } impl ::core::convert::From<&PreviewBuildsState> for ::windows::core::IUnknown { fn from(value: &PreviewBuildsState) -> Self { value.0 .0.clone() } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for PreviewBuildsState { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> { ::windows::core::Param::Owned(self.0 .0) } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a PreviewBuildsState { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> { ::windows::core::Param::Borrowed(&self.0 .0) } } impl ::core::convert::From<PreviewBuildsState> for ::windows::core::IInspectable { fn from(value: PreviewBuildsState) -> Self { value.0 } } impl ::core::convert::From<&PreviewBuildsState> for ::windows::core::IInspectable { fn from(value: &PreviewBuildsState) -> Self { value.0.clone() } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for PreviewBuildsState { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> { ::windows::core::Param::Owned(self.0) } } impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a PreviewBuildsState { fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> { ::windows::core::Param::Borrowed(&self.0) } } unsafe impl ::core::marker::Send for PreviewBuildsState {} unsafe impl ::core::marker::Sync for PreviewBuildsState {}
// Copyright 2019-2020 PolkaX. Licensed under MIT or Apache-2.0. mod filter_test; mod order_test; mod query_test; // use super::*; // use crate::query::Entry; // common utils // TODO
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::alloc::Allocator; use std::intrinsics::unlikely; use std::iter::TrustedLen; use std::mem::MaybeUninit; use common_base::mem_allocator::MmapAllocator; use super::container::HeapContainer; use super::table0::Entry; use super::table0::Table0; use super::table0::Table0Iter; use super::table0::Table0IterMut; use super::traits::HashtableLike; use super::traits::Keyable; use super::utils::ZeroEntry; use crate::FastHash; pub struct Hashtable<K, V, A = MmapAllocator> where K: Keyable, A: Allocator + Clone, { pub(crate) zero: ZeroEntry<K, V>, pub(crate) table: Table0<K, V, HeapContainer<Entry<K, V>, A>, A>, } unsafe impl<K: Keyable + Send, V: Send, A: Allocator + Clone + Send> Send for Hashtable<K, V, A> {} unsafe impl<K: Keyable + Sync, V: Sync, A: Allocator + Clone + Sync> Sync for Hashtable<K, V, A> {} impl<K, V, A> Hashtable<K, V, A> where K: Keyable, A: Allocator + Clone + Default, { pub fn new() -> Self { Self::new_in(Default::default()) } pub fn with_capacity(capacity: usize) -> Self { Self::with_capacity_in(capacity, Default::default()) } } impl<K, V, A> Default for Hashtable<K, V, A> where K: Keyable, A: Allocator + Clone + Default, { fn default() -> Self { Self::new() } } impl<K, V, A> Hashtable<K, V, A> where K: Keyable, A: Allocator + Clone, { pub fn new_in(allocator: A) -> Self { Self::with_capacity_in(256, allocator) } pub fn with_capacity_in(capacity: usize, allocator: A) -> Self { Self { table: Table0::with_capacity_in(capacity, allocator), zero: ZeroEntry(None), } } #[inline(always)] pub fn is_empty(&self) -> bool { self.len() == 0 } #[inline(always)] pub fn len(&self) -> usize { self.zero.is_some() as usize + self.table.len() } #[inline(always)] pub fn capacity(&self) -> usize { self.zero.is_some() as usize + self.table.capacity() } #[inline(always)] pub fn entry(&self, key: &K) -> Option<&Entry<K, V>> { if unlikely(K::equals_zero(key)) { if let Some(entry) = self.zero.as_ref() { return Some(entry); } else { return None; } } unsafe { self.table.get(key) } } #[inline(always)] pub fn get(&self, key: &K) -> Option<&V> { unsafe { self.entry(key).map(|e| e.val.assume_init_ref()) } } #[inline(always)] pub fn entry_mut(&mut self, key: &K) -> Option<&mut Entry<K, V>> { if unlikely(K::equals_zero(key)) { if let Some(entry) = self.zero.as_mut() { return Some(entry); } else { return None; } } unsafe { self.table.get_mut(key) } } #[inline(always)] pub fn get_mut(&mut self, key: &K) -> Option<&mut V> { unsafe { self.entry_mut(key).map(|e| e.val.assume_init_mut()) } } #[inline(always)] pub fn contains(&self, key: &K) -> bool { self.get(key).is_some() } /// # Safety /// /// The uninitialized value of returned entry should be written immediately. #[inline(always)] pub unsafe fn insert_and_entry( &mut self, key: K, ) -> Result<&mut Entry<K, V>, &mut Entry<K, V>> { if unlikely(K::equals_zero(&key)) { let res = self.zero.is_some(); if !res { *self.zero = Some(MaybeUninit::zeroed().assume_init()); } let zero = self.zero.as_mut().unwrap(); if res { return Err(zero); } else { return Ok(zero); } } self.table.check_grow(); self.table.insert(key) } /// # Safety /// /// The returned uninitialized value should be written immediately. #[inline(always)] pub unsafe fn insert(&mut self, key: K) -> Result<&mut MaybeUninit<V>, &mut V> { match self.insert_and_entry(key) { Ok(e) => Ok(&mut e.val), Err(e) => Err(e.val.assume_init_mut()), } } pub fn iter(&self) -> HashtableIter<'_, K, V> { HashtableIter { inner: self.zero.iter().chain(self.table.iter()), } } } impl<K, A> Hashtable<K, (), A> where K: Keyable, A: Allocator + Clone, { #[inline(always)] pub fn set_insert(&mut self, key: K) -> Result<&mut MaybeUninit<()>, &mut ()> { unsafe { self.insert(key) } } #[inline(always)] pub fn set_merge(&mut self, other: &Self) { if let Some(entry) = other.zero.0.as_ref() { self.zero = ZeroEntry(Some(Entry { key: entry.key, val: MaybeUninit::uninit(), _alignment: [0; 0], })); } unsafe { self.table.set_merge(&other.table); } } } pub struct HashtableIter<'a, K, V> { pub inner: std::iter::Chain<std::option::Iter<'a, Entry<K, V>>, Table0Iter<'a, K, V>>, } impl<'a, K, V> Iterator for HashtableIter<'a, K, V> where K: Keyable { type Item = &'a Entry<K, V>; fn next(&mut self) -> Option<Self::Item> { self.inner.next() } fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() } } unsafe impl<'a, K, V> TrustedLen for HashtableIter<'a, K, V> where K: Keyable {} pub struct HashtableIterMut<'a, K, V> { inner: std::iter::Chain<std::option::IterMut<'a, Entry<K, V>>, Table0IterMut<'a, K, V>>, } impl<'a, K, V> Iterator for HashtableIterMut<'a, K, V> where K: Keyable { type Item = &'a mut Entry<K, V>; fn next(&mut self) -> Option<Self::Item> { self.inner.next() } } impl<K, V, A> HashtableLike for Hashtable<K, V, A> where K: Keyable + FastHash, A: Allocator + Clone + 'static, { type Key = K; type Value = V; type EntryRef<'a> = &'a Entry<K, V> where Self: 'a, K:'a, V: 'a; type EntryMutRef<'a> = &'a mut Entry<K, V> where Self: 'a, K:'a, V: 'a; type Iterator<'a> = HashtableIter<'a, K, V> where Self: 'a, K:'a, V: 'a; type IteratorMut<'a> = HashtableIterMut<'a, K, V> where Self: 'a, K:'a, V: 'a; fn len(&self) -> usize { self.len() } fn bytes_len(&self) -> usize { std::mem::size_of::<Self>() + self.table.heap_bytes() } fn entry(&self, key_ref: &Self::Key) -> Option<Self::EntryRef<'_>> { self.entry(key_ref) } fn entry_mut(&mut self, key_ref: &Self::Key) -> Option<Self::EntryMutRef<'_>> { self.entry_mut(key_ref) } fn get(&self, key_ref: &Self::Key) -> Option<&Self::Value> { self.get(key_ref) } fn get_mut(&mut self, key_ref: &Self::Key) -> Option<&mut Self::Value> { self.get_mut(key_ref) } unsafe fn insert( &mut self, key: &Self::Key, ) -> Result<&mut MaybeUninit<Self::Value>, &mut Self::Value> { self.insert(*key) } #[inline(always)] unsafe fn insert_and_entry( &mut self, key: &Self::Key, ) -> Result<Self::EntryMutRef<'_>, Self::EntryMutRef<'_>> { if unlikely(K::equals_zero(key)) { let res = self.zero.is_some(); if !res { *self.zero = Some(MaybeUninit::zeroed().assume_init()); } let zero = self.zero.as_mut().unwrap(); if res { return Err(zero); } else { return Ok(zero); } } self.table.check_grow(); self.table.insert(*key) } #[inline(always)] unsafe fn insert_and_entry_with_hash( &mut self, key: &Self::Key, hash: u64, ) -> Result<Self::EntryMutRef<'_>, Self::EntryMutRef<'_>> { if unlikely(K::equals_zero(key)) { let res = self.zero.is_some(); if !res { *self.zero = Some(MaybeUninit::zeroed().assume_init()); } let zero = self.zero.as_mut().unwrap(); if res { return Err(zero); } else { return Ok(zero); } } self.table.check_grow(); self.table.insert_with_hash(*key, hash) } fn iter(&self) -> Self::Iterator<'_> { HashtableIter { inner: self.zero.iter().chain(self.table.iter()), } } fn clear(&mut self) { self.zero.0.take(); self.table.clear(); } }
extern crate cgl; mod demo; fn main() { let image = demo::load(7); demo::save(&image, 8); }
use bintree::Tree; use std::fmt; pub fn add_value<T: fmt::Display + Copy + PartialOrd>(node: &mut Tree<T>, value: T) { if let Some(v) = node.get_value() { if value < *v { if let Some(left) = node.get_left_mut() { add_value(left, value); } } else { if let Some(right) = node.get_right_mut() { add_value(right, value); } } } else { *node = Tree::leaf(value); } } pub fn from_list<T: fmt::Display + Copy + PartialOrd>(values: &Vec<T>) -> Tree<T> { let mut tree = Tree::<T>::end(); for value in values { add_value(&mut tree, *value); } tree } #[cfg(test)] mod tests { use super::*; use bintree::Tree; #[test] fn test_add_value() { let mut tree = Tree::end(); add_value(&mut tree, 2); assert_eq!(tree.to_string(), "T(2 . .)"); add_value(&mut tree, 3); assert_eq!(tree.to_string(), "T(2 . T(3 . .))"); add_value(&mut tree, 0); assert_eq!(tree.to_string(), "T(2 T(0 . .) T(3 . .))"); } #[test] fn test_from_list() { let tree = from_list(&vec![3, 2, 5, 7, 1]); assert_eq!(tree.to_string(), "T(3 T(2 T(1 . .) .) T(5 . T(7 . .)))") } }
/* * @lc app=leetcode.cn id=226 lang=rust * * [226] 翻转二叉树 * * https://leetcode-cn.com/problems/invert-binary-tree/description/ * * algorithms * Easy (66.54%) * Total Accepted: 10.7K * Total Submissions: 16.1K * Testcase Example: '[4,2,7,1,3,6,9]' * * 翻转一棵二叉树。 * * 示例: * * 输入: * * ⁠ 4 * ⁠ / \ * ⁠ 2 7 * ⁠/ \ / \ * 1 3 6 9 * * 输出: * * ⁠ 4 * ⁠ / \ * ⁠ 7 2 * ⁠/ \ / \ * 9 6 3 1 * * 备注: * 这个问题是受到 Max Howell 的 原问题 启发的 : * * 谷歌:我们90%的工程师使用您编写的软件(Homebrew),但是您却无法在面试时在白板上写出翻转二叉树这道题,这太糟糕了。 * */ use std::cell::RefCell; use std::mem::replace; use std::rc::Rc; // Definition for a binary tree node. #[derive(Debug, PartialEq, Eq)] pub struct TreeNode { pub val: i32, pub left: Option<Rc<RefCell<TreeNode>>>, pub right: Option<Rc<RefCell<TreeNode>>>, } impl TreeNode { #[inline] pub fn new(val: i32) -> Self { TreeNode { val, left: None, right: None, } } } impl Solution { pub fn invert_tree(root: Option<Rc<RefCell<TreeNode>>>) -> Option<Rc<RefCell<TreeNode>>> { let mut root = root; Self::invert_tree1(&mut root); root } fn invert_tree1(node: &mut Option<Rc<RefCell<TreeNode>>>) { if let Some(node) = node { let mut left = replace(&mut node.borrow_mut().left, None); let mut right = replace(&mut node.borrow_mut().right, None); Self::invert_tree1(&mut left); Self::invert_tree1(&mut right); node.borrow_mut().left = right; node.borrow_mut().right = left; }; } } fn main() { let mut root = TreeNode::new(10); root.left = Option::Some(Rc::new(RefCell::new(TreeNode::new(9)))); root.right = Option::Some(Rc::new(RefCell::new(TreeNode::new(11)))); let root = Solution::invert_tree(Option::Some(Rc::new(RefCell::new(root)))); println!("{:#?}", root); } struct Solution {}
// Copyright (c) 2016 DWANGO Co., Ltd. All Rights Reserved. // See the LICENSE file at the top-level directory of this distribution. //! The `Executor` trait and its implementations. use std::io; use futures::{Async, Future, IntoFuture}; pub use self::in_place::{InPlaceExecutor, InPlaceExecutorHandle}; pub use self::thread_pool::{ThreadPoolExecutor, ThreadPoolExecutorHandle}; use fiber::Spawn; use sync::oneshot::{Monitor, MonitorError}; mod in_place; mod thread_pool; /// The `Executor` trait allows for spawning and executing fibers. pub trait Executor: Sized { /// The handle type of the executor. type Handle: Spawn + Clone + Send + 'static; /// Returns the handle of the executor. fn handle(&self) -> Self::Handle; /// Spawns a fiber which will execute given future. fn spawn<F>(&self, future: F) where F: Future<Item = (), Error = ()> + Send + 'static { self.handle().spawn(future) } /// Equivalent to `self.spawn(futures::lazy(|| f()))`. fn spawn_fn<F, T>(&self, f: F) where F: FnOnce() -> T + Send + 'static, T: IntoFuture<Item = (), Error = ()> + Send + 'static, T::Future: Send { self.handle().spawn_fn(f) } /// Spawns a fiber and returns a future to monitor it's execution result. fn spawn_monitor<F, T, E>(&self, f: F) -> Monitor<T, E> where F: Future<Item = T, Error = E> + Send + 'static, T: Send + 'static, E: Send + 'static { self.handle().spawn_monitor(f) } /// Runs one one unit of works. fn run_once(&mut self) -> io::Result<()>; /// Runs until the monitored fiber exits. fn run_fiber<T, E>(&mut self, mut monitor: Monitor<T, E>) -> io::Result<Result<T, MonitorError<E>>> { loop { match monitor.poll() { Err(e) => return Ok(Err(e)), Ok(Async::Ready(v)) => return Ok(Ok(v)), Ok(Async::NotReady) => {} } self.run_once()?; } } /// Runs infinitely until an error happens. fn run(mut self) -> io::Result<()> { loop { self.run_once()? } } }
extern crate gpioctrl; extern crate structopt; #[macro_use] extern crate structopt_derive; use structopt::StructOpt; use gpioctrl::*; const PIN_LED1:i32 = 0; const PIN_LED2:i32 = 2; const PIN_INPUT:i32 = 3; fn led1_ctrl(status: &str) { match status { "on" => , "off" => , _ => (), } } fn led2_ctrl(status: &str) { match status { "on" => , "off" => , _ => (), } } fn init_ports() { wiringPiSetup(); pinMode(PIN_LED1, cffi::OUTPUT); pinMode(PIN_LED2, cffi::OUTPUT); pinMode(PIN_INPUT, cffi::INPUT); } #[derive(StructOpt, Debug)] struct Opt { #[structopt(long = "led1", help = "turn led1 ON/OFF")] led1: Option<String>, #[structopt(long = "led2", help = "turn led2 ON/OFF")] led2: Option<String>, #[structopt(long="readinput", help = "read status of input pin")] readinput: bool, } fn main() { let opt = Opt::from_args(); println!("{:?}", opt); init_ports(); if let Some(status) = opt.led1 { led1_ctrl(&status); } if let Some(status) = opt.led2 { led2_ctrl(&status); } if opt.readinput { println!("{}", digitalRead(PIN_INPUT)); } }
// Copyright 2020-2021, The Tremor Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::errors::Error; use http_types::{headers, StatusCode}; use serde::{Deserialize, Serialize}; use tide::Response; use tremor_runtime::system::World; use tremor_runtime::url::TremorUrl; pub mod binding; pub mod offramp; pub mod onramp; pub mod pipeline; pub mod prelude; pub mod version; pub type Request = tide::Request<State>; pub type Result<T> = std::result::Result<T, Error>; #[derive(Clone)] pub struct State { pub world: World, } #[derive(Clone, Copy, Debug)] pub enum ResourceType { Json, Yaml, Trickle, } impl ResourceType { #[must_use] pub fn as_str(self) -> &'static str { match self { Self::Yaml => "application/yaml", Self::Json => "application/json", Self::Trickle => "application/vnd.trickle", } } } #[must_use] pub fn content_type(req: &Request) -> Option<ResourceType> { match req .header(&headers::CONTENT_TYPE) .map(headers::HeaderValues::last) .map(headers::HeaderValue::as_str) { Some("application/yaml") => Some(ResourceType::Yaml), Some("application/json") => Some(ResourceType::Json), Some("application/vnd.trickle") => Some(ResourceType::Trickle), _ => None, } } #[must_use] pub fn accept(req: &Request) -> ResourceType { // TODO implement correctly / RFC compliance match req .header(headers::ACCEPT) .map(headers::HeaderValues::last) .map(headers::HeaderValue::as_str) { Some("application/yaml") => ResourceType::Yaml, Some("application/vnd.trickle") => ResourceType::Trickle, _ => ResourceType::Json, } } pub fn serialize<T: Serialize>(t: ResourceType, d: &T, code: StatusCode) -> Result<Response> { match t { ResourceType::Yaml => Ok(Response::builder(code) .header(headers::CONTENT_TYPE, t.as_str()) .body(serde_yaml::to_string(d)?) .build()), ResourceType::Json => Ok(Response::builder(code) .header(headers::CONTENT_TYPE, t.as_str()) .body(simd_json::to_string(d)?) .build()), ResourceType::Trickle => Err(Error::new( StatusCode::InternalServerError, "Unsuported formatting as trickle".into(), )), } } pub fn serialize_error(t: ResourceType, d: Error) -> Result<Response> { match t { ResourceType::Json | ResourceType::Yaml => serialize(t, &d, d.code), // formatting errors as trickle does not make sense so for this // fall back to the error's conversion into tide response ResourceType::Trickle => Ok(d.into()), } } pub async fn reply<T: Serialize + Send + Sync + 'static>( req: Request, result_in: T, persist: bool, ok_code: StatusCode, ) -> Result<Response> { if persist { let world = &req.state().world; world.save_config().await?; } serialize(accept(&req), &result_in, ok_code) } async fn decode<T>(mut req: Request) -> Result<(Request, T)> where for<'de> T: Deserialize<'de>, { let mut body = req.body_bytes().await?; match content_type(&req) { Some(ResourceType::Yaml) => serde_yaml::from_slice(body.as_slice()) .map_err(|e| { Error::new( StatusCode::BadRequest, format!("Could not decode YAML: {}", e), ) }) .map(|data| (req, data)), Some(ResourceType::Json) => simd_json::from_slice(body.as_mut_slice()) .map_err(|e| { Error::new( StatusCode::BadRequest, format!("Could not decode JSON: {}", e), ) }) .map(|data| (req, data)), Some(ResourceType::Trickle) | None => Err(Error::new( StatusCode::UnsupportedMediaType, "No content type provided".into(), )), } } fn build_url(path: &[&str]) -> Result<TremorUrl> { let url = format!("/{}", path.join("/")); TremorUrl::parse(&url).map_err(|_e| { Error::new( StatusCode::InternalServerError, format!("Could not decode Tremor URL: {}", url), ) }) }
use std::collections::HashMap; use std::collections::LinkedList; use std::collections::BTreeSet; pub fn linklistboi() { let rust_boi = true; if rust_boi == true { println!("Rust is pretty cool"); } let mut cars = HashMap::new(); cars.insert("BMW", 1); cars.insert("Audi", 2); cars.insert("Ferrari", 3); let mut linkstuff = LinkedList::new(); linkstuff.push_front(3); let mut btreeboi = BTreeSet::new(); btreeboi.insert(43); }
mod lib; use lib::spdbify_proteomes; extern crate clap; use clap::{App, Arg}; fn main() { let matches = App::new("spdbify") .version("1.0") .author("Kevin Amses <amsesk@umich.edu>") .about("This crate simply prepares protein FASTAs and lineage information for incorporation into swissprot-style databases used by SCGid.") .arg(Arg::with_name("mappings") .long("map") .value_name("PATH") .help("Path to three-columned map (tsv) mapping organism tags (1) to the full paths to proteome fastas (2), and organism lineage information (3).") .takes_value(true)) .arg(Arg::with_name("output") .long("out") .value_name("PATH") .help("Path to output concatenated, renamed protein FASTA. Directories must exist.") .takes_value(true)) .get_matches(); spdbify_proteomes( matches.value_of("mappings").unwrap(), matches.value_of("output").unwrap(), ); }
//! The `madvise` function. //! //! # Safety //! //! `madvise` operates on a raw pointer. Some forms of `madvise` may //! mutate the memory or have other side effects. #![allow(unsafe_code)] use crate::{backend, io}; use core::ffi::c_void; pub use backend::mm::types::Advice; /// `posix_madvise(addr, len, advice)`—Declares an expected access pattern /// for a memory-mapped file. /// /// # Safety /// /// `addr` must be a valid pointer to memory that is appropriate to /// call `posix_madvise` on. Some forms of `advice` may mutate the memory /// or evoke a variety of side-effects on the mapping and/or the file. /// /// # References /// - [POSIX] /// - [Linux `madvise`] /// - [Linux `posix_madvise`] /// - [Apple] /// - [FreeBSD] /// - [NetBSD] /// - [OpenBSD] /// - [DragonFly BSD] /// - [illumos] /// - [glibc] /// /// [POSIX]: https://pubs.opengroup.org/onlinepubs/9699919799/functions/posix_madvise.html /// [Linux `madvise`]: https://man7.org/linux/man-pages/man2/madvise.2.html /// [Linux `posix_madvise`]: https://man7.org/linux/man-pages/man3/posix_madvise.3.html /// [Apple]: https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man2/madvise.2.html /// [FreeBSD]: https://man.freebsd.org/cgi/man.cgi?query=madvise&sektion=2 /// [NetBSD]: https://man.netbsd.org/madvise.2 /// [OpenBSD]: https://man.openbsd.org/madvise.2 /// [DragonFly BSD]: https://man.dragonflybsd.org/?command=madvise&section=2 /// [illumos]: https://illumos.org/man/3C/madvise /// [glibc]: https://www.gnu.org/software/libc/manual/html_node/Memory_002dmapped-I_002fO.html#index-madvise #[inline] #[doc(alias = "posix_madvise")] pub unsafe fn madvise(addr: *mut c_void, len: usize, advice: Advice) -> io::Result<()> { backend::mm::syscalls::madvise(addr, len, advice) }
use std::io::Read; use hilbert_curve::BytewiseCached; use typedrw::TypedMemoryMap; pub trait EdgeMapper { fn map_edges(&self, action: impl FnMut(u32, u32)); } pub struct DeltaCompressedReaderMapper<R: Read, F: Fn()->R> { reader: F, } impl<R: Read, F: Fn()->R> DeltaCompressedReaderMapper<R, F> { pub fn new(reader: F) -> DeltaCompressedReaderMapper<R, F> { DeltaCompressedReaderMapper { reader: reader, } } } impl<R: Read, F: Fn()->R> EdgeMapper for DeltaCompressedReaderMapper<R, F> { fn map_edges(&self, mut action: impl FnMut(u32, u32)) { let mut hilbert = BytewiseCached::new(); let mut current = 0u64; let mut reader = (self.reader)(); let mut delta = 0u64; // for accumulating a delta let mut depth = 0u8; // for counting number of zeros let mut buffer = vec![0u8; 1 << 16]; while let Ok(read) = reader.read(&mut buffer[..]) { if read == 0 { // Reached EOF. break; } for &byte in &buffer[..read] { if byte == 0 && delta == 0 { depth += 1; } else { delta = (delta << 8) + (byte as u64); if depth == 0 { current += delta; delta = 0; let (x,y) = hilbert.detangle(current); action(x,y); } else { depth -= 1; } } } } } } pub struct DeltaCompressedSliceMapper<'a> { slice: &'a [u8], } impl<'a> DeltaCompressedSliceMapper<'a> { pub fn new(slice: &'a [u8]) -> DeltaCompressedSliceMapper<'a> { DeltaCompressedSliceMapper { slice: slice, } } } impl<'a> EdgeMapper for DeltaCompressedSliceMapper<'a> { fn map_edges(&self, mut action: impl FnMut(u32, u32)) { let mut hilbert = BytewiseCached::new(); let mut current = 0u64; let mut cursor = 0; while cursor < self.slice.len() { let byte = unsafe { *self.slice.get_unchecked(cursor) }; cursor += 1; if byte > 0 { current += byte as u64; let (x,y) = hilbert.detangle(current); action(x,y); } else { let mut depth = 2; while unsafe { *self.slice.get_unchecked(cursor) } == 0 { cursor += 1; depth += 1; } let mut delta = 0u64; while depth > 0 { delta = (delta << 8) + (unsafe { *self.slice.get_unchecked(cursor) } as u64); cursor += 1; depth -= 1; } current += delta; let (x,y) = hilbert.detangle(current); action(x,y); } } } } // // naughty method using unsafe transmute to read a filled binary buffer as a typed buffer // fn read_as_typed<'a, R: Read, T: Copy>(reader: &mut R, buffer: &'a mut[u8]) -> Result<&'a[T]> { // if mem::size_of::<T>() * (buffer.len() / mem::size_of::<T>()) < buffer.len() { // panic!("buffer size must be a multiple of mem::size_of::<T>() = {:?}", mem::size_of::<T>()); // } // // let mut read = try!(reader.read(buffer)); // while mem::size_of::<T>() * (read / mem::size_of::<T>()) < read { // read += try!(reader.read(&mut buffer[read..])); // } // // Ok(unsafe { mem::transmute(RawSlice { // data: buffer.as_mut_ptr() as *const T, // len: read / mem::size_of::<T>(), // }) }) // } // pub struct UpperLowerMapper<R1: Read, R2: Read, F1: Fn()->R1, F2: Fn()->R2> { // pub upper: F1, // pub lower: F2, // } // // impl<R1: Read, R2: Read, F1: Fn()->R1, F2: Fn()->R2> EdgeMapper for UpperLowerMapper<R1, R2, F1, F2> { // fn map_edges<F: FnMut(u32, u32) -> ()>(&self, mut action: F) -> () { // let mut upper_reader = (self.upper)(); // let mut lower_reader = (self.lower)(); // let mut upper_buffer = vec![0u8; 1 << 20]; // let mut lower_buffer = vec![0u8; 1 << 20]; // while let Ok(upper) = read_as_typed::<_,((u16,u16),u32)>(&mut upper_reader, &mut upper_buffer[..]) { // for &((ux, uy), mut count) in upper { // let ux = (ux as u32) << 16; // let uy = (uy as u32) << 16; // while count > 0 { // let size = min(lower_buffer.len(), 4 * count as usize); // if let Ok(lower) = read_as_typed::<_,(u16,u16)>(&mut lower_reader, &mut lower_buffer[..size]) { // for &(lx, ly) in lower { // action(ux + lx as u32, uy + ly as u32); // } // count -= lower.len() as u32; // } // } // } // } // } // } // pub struct NodesEdgesMapper<R1: Read, R2: Read, F1: Fn()->R1, F2: Fn()->R2> { // pub nodes: F1, // pub edges: F2, // } // // impl<R1: Read, R2: Read, F1: Fn()->R1, F2: Fn()->R2> EdgeMapper for NodesEdgesMapper<R1, R2, F1, F2> { // fn map_edges<F: FnMut(u32, u32) -> ()>(&self, mut action: F) -> () { // let mut nodes_reader = (self.nodes)(); // let mut edges_reader = (self.edges)(); // let mut nodes_buffer = vec![0u8; 1 << 20]; // let mut edges_buffer = vec![0u8; 1 << 20]; // while let Ok(nodes) = read_as_typed::<_,(u32,u32)>(&mut nodes_reader, &mut nodes_buffer[..]) { // for &(source, mut count) in nodes { // while count > 0 { // let size = min(edges_buffer.len(), 4 * count as usize); // if let Ok(edges) = read_as_typed::<_,u32>(&mut edges_reader, &mut edges_buffer[..size]) { // for &target in edges { // action(source, target); // } // count -= edges.len() as u32; // } // } // } // } // } // } pub struct UpperLowerMemMapper { upper: TypedMemoryMap<((u16,u16), u32)>, lower: TypedMemoryMap<(u16, u16)>, } impl UpperLowerMemMapper { pub fn new(graph_name: &str) -> UpperLowerMemMapper { UpperLowerMemMapper { upper: TypedMemoryMap::new(format!("{}.upper", graph_name)), lower: TypedMemoryMap::new(format!("{}.lower", graph_name)), } } } impl EdgeMapper for UpperLowerMemMapper { fn map_edges(&self, mut action: impl FnMut(u32, u32)) { let mut slice = &self.lower[..]; for &((u16_x, u16_y), count) in &self.upper[..] { let u16_x = (u16_x as u32) << 16; let u16_y = (u16_y as u32) << 16; for &(l16_x, l16_y) in &slice[.. count as usize] { action(u16_x | l16_x as u32, u16_y | l16_y as u32); } slice = &slice[count as usize ..]; } } } pub struct NodesEdgesMemMapper { nodes: TypedMemoryMap<(u32, u32)>, edges: TypedMemoryMap<u32>, } impl NodesEdgesMemMapper { pub fn new(graph_name: &str) -> NodesEdgesMemMapper { NodesEdgesMemMapper { nodes: TypedMemoryMap::new(format!("{}.nodes", graph_name)), edges: TypedMemoryMap::new(format!("{}.edges", graph_name)), } } } impl EdgeMapper for NodesEdgesMemMapper { fn map_edges(&self, mut action: impl FnMut(u32, u32)) { let mut slice = &self.edges[..]; for &(node, count) in &self.nodes[..] { for &edge in &slice[.. count as usize] { action(node, edge); } slice = &slice[count as usize ..]; } } } pub struct ReaderMapper<B: ::std::io::BufRead, F: Fn() -> B> { pub reader: F, } impl<R: ::std::io::BufRead, RF: Fn() -> R> EdgeMapper for ReaderMapper<R, RF> { fn map_edges(&self, mut action: impl FnMut(u32, u32)) { let reader = (self.reader)(); for readline in reader.lines() { let line = readline.ok().expect("read error"); if !line.starts_with('#') { let mut elts = line[..].split_whitespace(); let src: u32 = elts.next().unwrap().parse().ok().expect("malformed src"); let dst: u32 = elts.next().unwrap().parse().ok().expect("malformed dst"); action(src, dst); } } } }
/// Platform independent SIMD vector types and operations. #[unstable(feature = "stdsimd", issue = "0")] pub mod simd { pub use coresimd::v128::*; pub use coresimd::v256::*; pub use coresimd::v512::*; pub use coresimd::v64::*; } /// Platform dependent vendor intrinsics. #[unstable(feature = "stdsimd", issue = "0")] pub mod arch { #[cfg(target_arch = "x86")] pub mod x86 { pub use coresimd::x86::*; } #[cfg(target_arch = "x86_64")] pub mod x86_64 { pub use coresimd::x86::*; } #[cfg(target_arch = "arm")] pub mod arm { pub use coresimd::arm::*; } #[cfg(target_arch = "aarch64")] pub mod aarch64 { pub use coresimd::arm::*; pub use coresimd::aarch64::*; } } #[macro_use] mod macros; mod simd_llvm; mod v128; mod v256; mod v512; mod v64; /// 32-bit wide vector tpyes mod v32 { #[cfg(not(test))] use prelude::v1::*; use coresimd::simd_llvm::*; define_ty! { i16x2, i16, i16 } define_impl! { i16x2, i16, 2, i16x2, x0, x1 } define_ty! { u16x2, u16, u16 } define_impl! { u16x2, u16, 2, i16x2, x0, x1 } define_ty! { i8x4, i8, i8, i8, i8 } define_impl! { i8x4, i8, 4, i8x4, x0, x1, x2, x3 } define_ty! { u8x4, u8, u8, u8, u8 } define_impl! { u8x4, u8, 4, i8x4, x0, x1, x2, x3 } define_casts!( (i16x2, i64x2, as_i64x2), (u16x2, i64x2, as_i64x2), (i8x4, i32x4, as_i32x4), (u8x4, i32x4, as_i32x4) ); } /// 16-bit wide vector tpyes mod v16 { #[cfg(not(test))] use prelude::v1::*; use coresimd::simd_llvm::*; define_ty! { i8x2, i8, i8 } define_impl! { i8x2, i8, 2, i8x2, x0, x1 } define_ty! { u8x2, u8, u8 } define_impl! { u8x2, u8, 2, i8x2, x0, x1 } define_casts!((i8x2, i64x2, as_i64x2), (u8x2, i64x2, as_i64x2)); } #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] mod x86; #[cfg(any(target_arch = "arm", target_arch = "aarch64"))] mod arm; #[cfg(target_arch = "aarch64")] mod aarch64; mod nvptx;
fn main() -> ::std::result::Result<(), &'static str> { Err("main function returns result") }
#![feature(llvm_asm)] fn test() -> () { unsafe { let test: i32; // using `str r0, [%0]` or any other instruction, it still crashes. llvm_asm!("mov %0, r0" : "=m"(test) ::); } } fn main() { test(); }
//! Tic-tac-toe //! //! A simple implementation of tic-tac-toe (a.k.a. naughts and crosses) in rust. //! //! This is a work-in-progress and was written as a pedagogical project. //! //! # TODO //! Game statistics. //! Multidimensional board. //! Optimise victory check. //! Coloured output. //! Redraw the board, rather than printing a new one. //! Add benchmarks. //! Increase unit test coverage & measure coverage. extern crate clap; use std::fmt; use std::io; use std::process::exit; use std::io::prelude::*; use std::str::FromStr; use std::num::ParseIntError; //////////////////////////////////////////////////////////////////////////////// // Types //////////////////////////////////////////////////////////////////////////////// /// A single coordinate in two-dimensional space. #[derive(Debug, PartialEq)] struct Point { x: usize, y: usize, } /// A single square in the Tic-tac-toe board. #[derive(Copy, Clone, PartialEq)] enum Square { None, // TODO: replace with Some() Circle, Cross, } /// The Tic-tac-toe board. struct Board { squares: [Square; 9], } impl FromStr for Point { type Err = ParseIntError; fn from_str(s: &str) -> Result<Self, Self::Err> { let mut i = s.split(","); let x_str = i.next().unwrap_or(""); let y_str = i.next().unwrap_or(""); let x = x_str.trim().parse::<usize>()?; // Todo: could trim ( here let y = y_str.trim().parse::<usize>()?; // and trim ) here. Ok(Point { x: x, y: y }) } } impl fmt::Display for Square { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Square::None => write!(f, " "), Square::Circle => write!(f, "O"), Square::Cross => write!(f, "X"), } } } impl fmt::Display for Board { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " {} | {} | {} -- + - + -- {} | {} | {} -- + - + -- {} | {} | {} ", self.squares[0], self.squares[1], self.squares[2], self.squares[3], self.squares[4], self.squares[5], self.squares[6], self.squares[7], self.squares[8], ) } } impl Board { fn point_to_index(&self, p: &Point) -> std::result::Result<usize, usize> { return if p.x <= 2 && p.y <= 2 { Ok(p.x + 3 * p.y) } else { Err(1) }; } fn set_square(&mut self, p: &Point, square: &Square) -> std::result::Result<usize, usize> { let index = self.point_to_index(p)?; if self.squares[index] == Square::None { self.squares[index] = *square; return Ok(0); } else { return Err(2); } } } //////////////////////////////////////////////////////////////////////////////// // Control flow //////////////////////////////////////////////////////////////////////////////// fn check_for_victory(board: &Board, player: &Square, _point: &Point) -> bool { // The winning patterns in the 1D array are: // // 0 1 2 3 4 5 6 7 8 index // X X X - - - - - - top row // - - - X X X - - - mid row // - - - - - - X X X bot row // X - - X - - X - - lef col // - X - - X - - X - mid col // - - X - - X - - X rig col // X - - - X - - - X lef diagonal // - - X - X - X - - rig diagonal // // TODO: Optimise this by only checking for combinations involving point. // TODO: Make a method of the board type? (board.squares[0] == *player && board.squares[1] == *player && board.squares[2] == *player) || (board.squares[3] == *player && board.squares[4] == *player && board.squares[5] == *player) || (board.squares[6] == *player && board.squares[7] == *player && board.squares[8] == *player) || (board.squares[0] == *player && board.squares[3] == *player && board.squares[6] == *player) || (board.squares[1] == *player && board.squares[4] == *player && board.squares[7] == *player) || (board.squares[2] == *player && board.squares[5] == *player && board.squares[8] == *player) || (board.squares[0] == *player && board.squares[4] == *player && board.squares[8] == *player) || (board.squares[2] == *player && board.squares[4] == *player && board.squares[6] == *player) } fn turn(board:&mut Board, player: &Square) -> bool { let mut point: Point; println!("{}", board); loop { // Ask for input. print!("Player {}: ", player); io::stdout().flush().ok().expect("Could not flush stdout."); // Read stdin for input. // TODO: enforce a maximum size here and allocate on the stack. let mut input = String::with_capacity(4); match io::stdin().read_line(&mut input) { Ok(_r) => (), Err(e) => panic!("Cannot read stdin: {}", e) } // Parse the input to a point. match input.trim().as_ref() { "q" | "quit" | "e" | "exit" => exit(0), "h" | "help" | "i" | "info" => { help(); continue; }, _ => match Point::from_str(&input) { Ok (p) => point = p, Err(_) => { println!("Please enter a valid point, e.g. 0, 1."); continue; }, } } // Try to set the square on the board. match board.set_square(&point, &player) { Ok(_r) => break, Err(1) => println!("That square is not on the board."), Err(2) => println!("That square is already taken."), Err(e) => panic!("Unexpected error when setting a square: {}", e) } } check_for_victory(&board, &player, &point) } fn game() { let players = [Square::Circle, Square::Cross]; let mut board = Board { squares: [Square::None; 9] }; let mut winner: std::option::Option<Square> = None; println!("Welcome to rusty tictactoe! :-)"); for i in 0..board.squares.len() { let player = players[i % 2]; if turn(&mut board, &player) { winner = Some(player); break; } } match winner { None => println!("Stalemate, game over."), Some(player) => println!("Player {} has won! Congratulations!", player), } println!("Thank you for playing."); } fn help() { println!(" To win, place three circles (or crosses) in a line. Specify a square to place a circle or cross by entering 'X, Y', where X and Y are the x-coordinate (column) and y-coordinate (row) respectively. Options: q/quit Exit the game. h/help Display this text. "); } fn main() { use clap::App; App::new(env!("CARGO_PKG_NAME")) .version(env!("CARGO_PKG_VERSION")) .author(env!("CARGO_PKG_AUTHORS")) .about(env!("CARGO_PKG_DESCRIPTION")) .get_matches(); game(); } //////////////////////////////////////////////////////////////////////////////// // Tests //////////////////////////////////////////////////////////////////////////////// #[cfg(test)] mod tests { use super::*; #[test] fn test_point_from_str() { assert_eq!(Point::from_str("0, 0"), Ok(Point { x: 0, y: 0 }), "A string of '0, 0' should be parsed to a Point {{ x:0, y:0 }}."); assert_eq!(Point::from_str(" 1, 2 "), Ok(Point { x: 1, y: 2 }), "Leading and trailing whitespace should be stripped."); assert!(Point::from_str("1,").is_err(), "A missing Y value should produce an error."); assert!(Point::from_str(",4").is_err(), "A missing X value should produce an error."); assert!(Point::from_str("").is_err(), "An empty string should produce an error."); } #[test] fn test_board_point_to_index() { let board = Board { squares: [Square::None; 9] }; assert_eq!(board.point_to_index(&Point {x:0, y:0}), Ok(0), "A Point {{x:0, y:0}} should index the first element."); assert_eq!(board.point_to_index(&Point {x:2, y:0}), Ok(2), "A Point {{x:2, y:0}} should index the third element."); assert_eq!(board.point_to_index(&Point {x:2, y:2}), Ok(8), "A Point {{x:2, y:2}} should index the last element."); assert_eq!(board.point_to_index(&Point {x:0, y:3}), Err(1), "An out-of-bounds y-coordinate should be an error."); assert_eq!(board.point_to_index(&Point {x:3, y:0}), Err(1), "An out-of-bounds x-coordinate should be an error."); assert_eq!(board.point_to_index(&Point {x:3, y:3}), Err(1), "Two out-of-bounds coordinates should be an error."); } #[test] fn test_board_set_square() { let mut board = Board { squares: [Square::None; 9] }; assert_eq!(board.set_square(&Point {x:0, y:0}, &Square::Circle), Ok(0), "Setting an empty square inside the board be OK."); assert_eq!(board.set_square(&Point {x:3, y:3}, &Square::Circle), Err(1), "Setting a square outside the board should fail."); } #[test] fn test_check_for_victory() { assert_eq!(check_for_victory( &Board { squares: [Square::None; 9] }, &Square::Circle, &Point { x:0, y:0 } ), false, "No player can be victorious with an empty board."); assert_eq!(check_for_victory( &Board { squares: [ Square::Circle, Square::Circle, Square::Circle, Square::None, Square::None, Square::None, Square::None, Square::None, Square::None, ] }, &Square::Circle, &Point { x: 0, y: 0 } ), true, "Three in the top row should be victory."); assert_eq!(check_for_victory( &Board { squares: [ Square::Circle, Square::None, Square::None, Square::Circle, Square::None, Square::None, Square::Circle, Square::None, Square::None, ] }, &Square::Circle, &Point { x:0, y:0 } ), true, "Three in the left column should be victory."); assert_eq!(check_for_victory( &Board { squares: [ Square::Circle, Square::None, Square::None, Square::None, Square::Circle, Square::None, Square::None, Square::None, Square::Circle, ] }, &Square::Circle, &Point { x:0, y:0 } ), true, "Three in the left diagonal should be victory."); assert_eq!(check_for_victory( &Board { squares: [ Square::Circle, Square::None, Square::None, Square::None, Square::Circle, Square::None, Square::None, Square::None, Square::Circle, ] }, &Square::Cross, &Point { x:0, y:0 } ), false, "A victory for circle is not a victory for Cross."); } }
use std::path::PathBuf; use ethers::types::{U256, U64}; use gumdrop::Options; #[derive(Debug, Options, Clone)] pub struct Opts { pub help: bool, #[options(help = "Path to json file with the contract addresses")] pub config: PathBuf, #[options(help = "Database file to be used for persistence", default = "data.json")] pub db_file: PathBuf, #[options(help = "Polling interval in milliseconds", default = "1000")] pub interval: u64, #[options(help = "Minimum desired profit per liquidation", default = "0")] pub min_profit: U256, #[options(help = "Block at which to begin monitoring")] pub start_block: Option<U64>, #[options(help = "Ethereum node endpoint (HTTP or WS)", default = "http://localhost:8545")] pub url: String, }
use proc_macro; #[macro_export] macro_rules! vec { ( $( $x:expr ),* ) => { { let mut temp_vec = Vec::new(); $( temp_vec.push($x); )* temp_vec } }; } #[some_attribute] pub fn some_name(input: TokenStream) -> TokenStream { }
//! The tags which can be applied to a stream. use std::collections::{HashMap, HashSet}; use uuid::Uuid; /// A unique identifier for a tag. pub type TagId = Uuid; /// A unique identifier for a tag category. pub type TagCategoryId = Uuid; /// Describes the tags (applied to Streams) in a manifest in a centralized, /// de-duplicated way. Language tags are special; they should have the special, /// pre-defined category, and their text should be a BCP-47 tag. #[derive(Serialize, Deserialize, Debug)] pub struct TagData { // De-duplicated user-defined tags (on streams). Each tag belongs to a category. // For example, a tag might be "voice actor: Barack Obama". In that example, the // category is "voice actor" and the value is "Barack Obama". Other examples could // include "genre: anime" or "rating: PG-13". Tag values are arbitrary strings. tags: HashMap<TagId, (TagCategoryId, String)>, // Categories of tags. tag_categories: HashMap<TagCategoryId, String>, // There are a few "special" categories and tags that are created by default. language_category_id: TagCategoryId, // the category for language tagging default_language_tag: TagId, // applied to name streams to determine which should be displayed // Sets of mutually exclusive tags. // TODO: actually use this tag_sets: Vec<HashSet<TagId>>, } impl Default for TagData { fn default() -> Self { let language_category_id = TagCategoryId::new_v4(); let default_language_tag = TagId::new_v4(); TagData { tags: { let mut tags = HashMap::new(); tags.insert(default_language_tag, (language_category_id, "default".to_string())); tags }, tag_categories: { let mut categories = HashMap::new(); categories.insert(language_category_id, "language".to_string()); categories }, tag_sets: Default::default(), language_category_id, default_language_tag, } } } impl TagData { /// Finds the ID of the special "language" category. pub fn get_language_category(&self) -> TagCategoryId { self.language_category_id } /// Finds the ID of the special "language: default" tag. pub fn get_default_language_tag(&self) -> TagId { self.default_language_tag } /// Looks up a tag's category. O(1). pub fn get_tag_category(&self, id: &TagId) -> Option<TagCategoryId> { self.tags.get(id).map(|(category_id, _)| category_id.clone()) } /// Looks up a tag's text. O(1). pub fn get_tag_value<'a, 'b>(&'a self, id: &'b TagId) -> Option<&'a str> { self.tags.get(id).map(|(_, text)| text.as_str()) } /// Looks up the display text for a tag. O(1). pub fn get_tag_display_text(&self, id: &TagId) -> Option<String> { if let Some((ref category_id, ref text)) = self.tags.get(id) { Some(format!("{}: {}", self.tag_categories.get(category_id).expect("tag has invalid category_id"), text )) } else { None } } /// Gets all tags with the given category. O(# tags). pub fn get_tags_in_category(&self, category: &TagCategoryId) -> HashSet<TagId> { self.tags.iter() .filter(|(_, (c_id, _))| c_id == category) .map(|(id, _)| id.clone()) .collect() } /// Adds a new category of tags. O(1). pub fn add_tag_category(&mut self, category: String) -> TagCategoryId { info!("Adding tag category \"{}\"", category); let new_id = TagCategoryId::new_v4(); self.tag_categories.insert(new_id, category); new_id } /// Adds a new tag. Fails if the category did not exist. If you're adding the tag "actor: Barack Obama", /// then the text should be "Barack Obama". O(1). pub fn add_tag(&mut self, category_id: &TagCategoryId, text: String) -> Option<TagId> { if self.tag_categories.contains_key(category_id) { let new_id = TagId::new_v4(); self.tags.insert(new_id, (category_id.clone(), text)); Some(new_id) } else { None } } /// Given a list of tags and a new tag, determines if the given tag can be added to the /// list. pub fn can_tag_join_collection(&self, _collection: &HashSet<TagId>, _new_tag: &TagId) -> bool { // TODO: implement this info!("can_tag_join_collection unimplemented; returning true"); true } /// Gets the tag with the given text, creating it if it does not exist. O(# tags). pub fn get_or_create_tag(&mut self, category_id: &TagCategoryId, text: &str) -> Option<TagId> { if !self.tag_categories.contains_key(category_id) { warn!("Tried to get or create tag in non-existent category"); return None; } if let Some(tag_id) = self.tags.iter().filter_map(|(id, (s_category_id, ref s_text))| { if (s_category_id == category_id) && (s_text == text) { Some(id) } else { None } }).next() { info!("get_or_create_tag found tag {} in category {} with text \"{}\"", tag_id, category_id, text); return Some(*tag_id); } let new_id = TagId::new_v4(); self.tags.insert(new_id, (category_id.clone(), text.to_string())); info!("get_or_create_tag created tag {} in category {} with text \"{}\"", new_id, category_id, text); Some(new_id) } }
use proconio::input; fn main() { input! { n: usize, words: [String; n], }; let dict = vec!["and", "not", "that", "the", "you"]; for w in words { if dict.contains(&w.as_str()) { println!("Yes"); return; } } println!("No"); }
#[macro_use] extern crate lazy_static; extern crate regex; use regex::Regex; use std::io; fn main() { println!("Celcius <=> Farenheit temperature converter"); lazy_static! { static ref TEMPERATURE_REGEXP: Regex = Regex::new(r"^(\-?\d+\.*\d*)([CF])$").unwrap(); } loop { println!("Please enter the temperature to convert,\nfollowed by C or F (for Celcius or Farenheit)"); let mut input_temperature = String::new(); io::stdin().read_line(&mut input_temperature) .expect("Failed to read line"); let input_temperature = input_temperature.trim(); let is_input_valid = TEMPERATURE_REGEXP.is_match(&input_temperature); if !is_input_valid { println!("Invalid input given: {}", input_temperature); continue; } let cap = TEMPERATURE_REGEXP.captures(&input_temperature).unwrap(); let temperature: f32 = match cap[1].trim().parse() { Ok(num) => num, Err(_) => continue, }; let unit = &cap[2]; let result_temperature = match unit { "C" => { let mut result = format!("{}", ((temperature * 1.8) + 32.0).to_string()); result.push('F'); result }, "F" => { let mut result = format!("{}", ((temperature - 32.0) * 0.5556).to_string()); result.push('C'); result } _ => { continue } }; println!("Converted temperature: {} -> {}", &input_temperature, &result_temperature); } }
//! Sending messages to the RTR client. use std::io; use std::sync::Arc; use futures::{Async, Future}; use tokio::io::{AsyncWrite, WriteAll}; use crate::config::Config; use crate::origins::{AddressOrigins, OriginsDiff}; use super::pdu; use super::serial::Serial; //------------ Sender -------------------------------------------------------- pub enum Sender<A> { Notify(WriteAll<A, pdu::SerialNotify>), Reset(WriteAll<A, pdu::CacheReset>), Diff(Wrapped<A, SendDiff>), Full(Wrapped<A, SendFull>), Error(WriteAll<A, pdu::BoxedError>), } impl<A: AsyncWrite> Sender<A> { pub fn notify(sock: A, version: u8, session: u16, serial: Serial) -> Self { Sender::Notify( pdu::SerialNotify::new(version, session, serial).write(sock) ) } pub fn reset(sock: A, version: u8) -> Self { Sender::Reset(pdu::CacheReset::new(version).write(sock)) } pub fn diff( sock: A, version: u8, session: u16, diff: Arc<OriginsDiff>, timing: Timing, ) -> Self { Sender::Diff(Wrapped::new( sock, version, session, diff.serial(), SendDiff::new(version, diff), timing )) } pub fn full( sock: A, version: u8, session: u16, serial: Serial, current: Arc<AddressOrigins>, timing: Timing, ) -> Self { Sender::Full(Wrapped::new( sock, version, session, serial, SendFull::new(version, current), timing )) } pub fn error(sock: A, error: pdu::BoxedError) -> Self { Sender::Error(error.write(sock)) } fn real_poll(&mut self) -> Result<Async<A>, io::Error> { match *self { Sender::Notify(ref mut fut) => { let (sock, _) = try_ready!(fut.poll()); Ok(Async::Ready(sock)) } Sender::Reset(ref mut fut) => { let (sock, _) = try_ready!(fut.poll()); Ok(Async::Ready(sock)) } Sender::Diff(ref mut fut) => { Ok(Async::Ready(try_ready!(fut.poll()))) } Sender::Full(ref mut fut) => { Ok(Async::Ready(try_ready!(fut.poll()))) } Sender::Error(ref mut fut) => { try_ready!(fut.poll()); // Force the connection to close. Err(io::Error::new(io::ErrorKind::Other, "")) } } } } impl<A: AsyncWrite> Future for Sender<A> { type Item = A; type Error = (); fn poll(&mut self) -> Result<Async<A>, ()> { self.real_poll().map_err(|err| { debug!("RTR write error: {}", err); }) } } //------------ Wrapped --------------------------------------------------- pub enum Wrapped<A, D> { Head(WriteAll<A, pdu::CacheResponse>, Option<(D, pdu::EndOfData)>), Middle(WriteAll<A, pdu::Prefix>, D, Option<pdu::EndOfData>), Tail(WriteAll<A, pdu::EndOfData>), } impl<A: AsyncWrite, D> Wrapped<A, D> { fn new( sock: A, version: u8, session: u16, serial: Serial, iter: D, timing: Timing, ) -> Self { Wrapped::Head( pdu::CacheResponse::new(version, session).write(sock), Some(( iter, pdu::EndOfData::new( version, session, serial, timing.refresh, timing.retry, timing.expire ) )) ) } } impl<A, D> Future for Wrapped<A, D> where A: AsyncWrite, D: Iterator<Item=pdu::Prefix> { type Item = A; type Error = io::Error; fn poll(&mut self) -> Result<Async<Self::Item>, Self::Error> { loop { *self = match *self { Wrapped::Head(ref mut fut, ref mut next) => { let (sock, _) = try_ready!(fut.poll()); let (mut iter, tail) = next.take().unwrap(); match iter.next() { Some(pdu) => { Wrapped::Middle( pdu.write(sock), iter, Some(tail) ) } None => Wrapped::Tail(tail.write(sock)) } } Wrapped::Middle(ref mut fut, ref mut iter, ref mut nx) => { let (sock, _) = try_ready!(fut.poll()); match iter.next() { Some(pdu) => { *fut = pdu.write(sock); continue; } None => { let tail = nx.take().unwrap(); Wrapped::Tail(tail.write(sock)) } } } Wrapped::Tail(ref mut fut) => { let (sock, _) = try_ready!(fut.poll()); return Ok(Async::Ready(sock)) } } } } } //------------ SendDiff ------------------------------------------------------ pub struct SendDiff { version: u8, diff: Arc<OriginsDiff>, announce: bool, next_idx: usize, } impl SendDiff { fn new(version: u8, diff: Arc<OriginsDiff>) -> Self { SendDiff { version, diff, announce: true, next_idx: 0 } } } impl Iterator for SendDiff { type Item = pdu::Prefix; fn next(&mut self) -> Option<Self::Item> { if self.announce { if self.next_idx >= self.diff.announce().len() { self.announce = false; self.next_idx = 1; // We return the 0th item right away. self.diff.withdraw().first().map(|orig| { pdu::Prefix::new(self.version, 0, orig) }) } else { let res = &self.diff.announce()[self.next_idx]; self.next_idx += 1; Some(pdu::Prefix::new(self.version, 1, res)) } } else if self.next_idx >= self.diff.withdraw().len() { None } else { let res = &self.diff.withdraw()[self.next_idx]; self.next_idx += 1; Some(pdu::Prefix::new(self.version, 0, res)) } } } //------------ SendFull ------------------------------------------------------ pub struct SendFull { version: u8, origins: Arc<AddressOrigins>, next_idx: usize, } impl SendFull { pub fn new(version: u8, origins: Arc<AddressOrigins>) -> Self { SendFull { version, origins, next_idx: 0 } } } impl Iterator for SendFull { type Item = pdu::Prefix; fn next(&mut self) -> Option<Self::Item> { if let Some(res ) = self.origins.get(self.next_idx) { self.next_idx += 1; Some(pdu::Prefix::new(self.version, 1, res)) } else { None } } } //------------ Timing -------------------------------------------------------- #[derive(Clone, Copy, Debug)] pub struct Timing { refresh: u32, retry: u32, expire: u32 } impl Timing { pub fn new(config: &Config) -> Self { Timing { refresh: config.refresh.as_secs() as u32, retry: config.retry.as_secs() as u32, expire: config.expire.as_secs() as u32 } } }