text
stringlengths
8
4.13M
// Copyright Jeron Aldaron Lau 2017 - 2020. // Distributed under either the Apache License, Version 2.0 // (See accompanying file LICENSE_APACHE_2_0.txt or copy at // https://apache.org/licenses/LICENSE-2.0), // or the Boost Software License, Version 1.0. // (See accompanying file LICENSE_BOOST_1_0.txt or copy at // https://www.boost.org/LICENSE_1_0.txt) // at your option. This file may not be copied, modified, or distributed except // according to those terms. /// An event from a [`Controller`](crate::Controller). /// /// # Gamepad Types /// ## Standard Gamepad /// A video game controller similar to w3c's "standard gamepad": /// /// ## Flightstick /// A joystick typically used in flight simulations and robotics: /// #[derive(Debug)] #[non_exhaustive] pub enum Event { /* * Connecting and disconnecting (common to all controllers) */ /* */ /// A new controller has just been plugged in. Connect(Box<crate::Controller>), /// Controller unplugged. Disconnect, /* * Events based on the w3 Standard Gamepad (may appear on other gamepads as * well) */ /* Center buttons */ /// Home button (Exit gameplay, usually into a console menu) Home(bool), /// Back / Select / Minus / Stop Button (Escape) Prev(bool), /// Forward / Start / Plus / Play Button (Tab) Next(bool), /* Action pad - action button cluster */ /// A / 1 / 4 / Circle / Return / Left Click. Action A (Main action). ActionA(bool), /// B / 2 / 3 / Cross / Shift. Action B (Secondary action). ActionB(bool), /// C ActionC(bool), /// Y / X / Square / Right Click / H. Horizontal action. ActionH(bool), /// X / Y / Triangle / Space / V. Vertical action (Topmost action button). ActionV(bool), /// Numbered or unlabeled programmable action buttons (If unlabelled, /// numbered from left to right, upper to lower) Action(u16, bool), /* D-PAD */ /// D-pad Up DpadUp(bool), /// D-pad Down DpadDown(bool), /// D-pad Left DpadLeft(bool), /// D-pad Right DpadRight(bool), /* Bumper Triggers (LZ, RZ - 2) */ /// Range(0.0, 1.0) - Left Bumper Trigger (far button if no trigger) - /// "Sneak" (Ctrl) TriggerL(f64), /// Range(0.0, 1.0) - Right Bumper Trigger (far button if no trigger) - /// "Precision Action" (Alt) TriggerR(f64), /* Bumper Buttons (L, R, Z - 1) */ /// Left shoulder button (near button if no trigger) - "Inventory" (E) BumperL(bool), /// Right shoulder button (near button if no trigger) - "Use" (R) BumperR(bool), /* Joystick */ /// Range(-1.0, 1.0) - Main stick horizontal axis (A / D) JoyX(f64), /// Range(-1.0, 1.0) - Main stick vertical / depth axis (W / S) JoyY(f64), /// Range(-1.0, 1.0) - Main stick rotation / yaw axis JoyZ(f64), /// Range(-1.0, 1.0) - Secondary stick X axis (Mouse X Position) CamX(f64), /// Range(-1.0, 1.0) - Secondary stick Y axis (Mouse Y Position) CamY(f64), /// Range(-1.0, 1.0) - Secondary stick Z axis CamZ(f64), /* Joystick Buttons */ /// Left Joystick Button (Middle Click) JoyPush(bool), /// Right Joystick Button (F) CamPush(bool), /* * Special XBox/Steam Controllers Extra Buttons */ /* Paddles */ /// Back right grip button (upper if there are two) PaddleRight(bool), /// Back left grip button (upper if there are two) PaddleLeft(bool), /// Back lower right grip button PaddleRightPinky(bool), /// Back lower left grip button PaddleLeftPinky(bool), /* * Realistic flight simulation stick extra buttons, switches, etc. */ /* Buttons */ /// Autopilot Toggle Button AutopilotToggle(bool), /// Landing Gear Horn Silence Button LandingGearSilence(bool), /* 8-way POV Hat */ /// POV Hat Up PovUp(bool), /// POV Hat Down PovDown(bool), /// POV Hat Left PovLeft(bool), /// POV Hat Right PovRight(bool), /* 4-way Mic Switch */ /// Mic Hat Up MicUp(bool), /// Mic Hat Down MicDown(bool), /// Mic Hat Left MicLeft(bool), /// Mic Hat Right MicRight(bool), /// Mic Hat Push Button MicPush(bool), /// Range(0.0, 1.0) - Slew Control Slew(f64), /// Range(0.0, 1.0) - Stationary throttle (1.0 is forward, 0.0 is backward) Throttle(f64), /// Range(0.0, 1.0) - Left stationary throttle (1.0 is forward, /// 0.0 is backward) ThrottleL(f64), /// Range(0.0, 1.0) - Right stationary throttle (1.0 is forward, 0.0 is /// backward) ThrottleR(f64), /// Left throttle button ThrottleButtonL(bool), /// Engine Fuel Flow Left two-way switch /// - `true` - Normal /// - `false` - Override EngineFuelFlowL(bool), /// Engine Fuel Flow Right two-way switch /// - `true` - Normal /// - `false` - Override EngineFuelFlowR(bool), /// EAC two-way switch /// - `true` - Arm /// - `false` - Off Eac(bool), /// Radar Altimeter two-way switch /// - `true` - Normal /// - `false` - Disabled RadarAltimeter(bool), /// APU two-way switch /// - `true` - Start /// - `false` - Off Apu(bool), /// Autopilot three-way switch Forward. /// - `true` - Forward (Path) /// - `false` - Neutral (Altitude / Heading) AutopilotPath(bool), /// Autopilot three-way switch Backward. /// - `true` - Backward (Alt) /// - `false` - Neutral (Altitude / Heading) AutopilotAlt(bool), /// Flaps three-way switch Forward. /// - `true` - Forward (Up) /// - `false` - Neutral (Maneuver) FlapsUp(bool), /// Flaps three-way switch Backward. /// - `true` - Backward (Down) /// - `false` - Neutral (Maneuver) FlapsDown(bool), /// Left Engine Operate three-way switch Forward. /// - `true` - Forward (Ignition) /// - `false` - Neutral (Normal) EngineLIgnition(bool), /// Left Engine Operate three-way switch Backward. /// - `true` - Backward (Motor) /// - `false` - Neutral (Normal) EngineLMotor(bool), /// Right Engine Operate three-way switch Forward. /// - `true` - Forward (Ignition) /// - `false` - Neutral (Normal) EngineRIgnition(bool), /// Right Engine Operate three-way switch Backward. /// - `true` - Backward (Motor) /// - `false` - Neutral (Normal) EngineRMotor(bool), /// Pinky three-way switch Forward. PinkyForward(bool), /// Pinky three-way switch Backward. PinkyBackward(bool), /// Speedbrake three-way switch Forward. SpeedbrakeForward(bool), /// Speedbrake three-way switch Backward. SpeedbrakeBackward(bool), /// Boat three-way switch Forward. BoatForward(bool), /// Pinky three-way switch Backward. BoatBackward(bool), /// China hat three-way switch Forward. ChinaForward(bool), /// China hat three-way switch Backward. ChinaBackward(bool), /* * Mice-like controllers extra buttons, scroll wheel */ /* Extra Mouse buttons */ /// DPI Switch Dpi(bool), /* Mouse Main */ /// Range(-1.0, 1.0) - Mouse delta position horizontal MouseX(f64), /// Range(-1.0, 1.0) - Mouse delta position vertical MouseY(f64), /// Left click (main click, push button) MousePush(bool), /// Right click (secondary click, push button 2) MouseMenu(bool), /* Mouse Wheel */ /// Range(-1.0, 1.0) - Scroll wheel horizontal WheelX(f64), /// Range(-1.0, 1.0) - Scroll wheel vertical WheelY(f64), /// Middle click (scroll wheel push button) WheelPush(bool), /* * Ignore Events */ /* */ #[doc(hidden)] Nil(bool), } impl std::fmt::Display for Event { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { use Event::*; let pushed = |pushd: &bool| if *pushd { "Pushed" } else { "Released" }; let two = |two: &bool| match two { true => "Forward", false => "Backward", }; let sw = |three: &bool| match three { true => "Enter", false => "Leave", }; match self { Connect(_) => write!(f, "Controller Connected"), Disconnect => write!(f, "Controller Disconnected"), ActionA(p) => write!(f, "ActionA {}", pushed(p)), ActionB(p) => write!(f, "ActionB {}", pushed(p)), ActionC(p) => write!(f, "ActionC {}", pushed(p)), ActionH(p) => write!(f, "ActionH {}", pushed(p)), ActionV(p) => write!(f, "ActionV {}", pushed(p)), DpadUp(p) => write!(f, "DpadUp {}", pushed(p)), DpadDown(p) => write!(f, "DpadDown {}", pushed(p)), DpadLeft(p) => write!(f, "DpadLeft {}", pushed(p)), DpadRight(p) => write!(f, "DpadRight {}", pushed(p)), Prev(p) => write!(f, "Prev {}", pushed(p)), Next(p) => write!(f, "Next {}", pushed(p)), BumperL(p) => write!(f, "BumperL {}", pushed(p)), BumperR(p) => write!(f, "BumperR {}", pushed(p)), TriggerL(v) => write!(f, "TriggerL {}", v), TriggerR(v) => write!(f, "TriggerR {}", v), JoyX(v) => write!(f, "JoyX {}", v), JoyY(v) => write!(f, "JoyY {}", v), JoyZ(v) => write!(f, "JoyZ {}", v), CamX(v) => write!(f, "CamX {}", v), CamY(v) => write!(f, "CamY {}", v), CamZ(v) => write!(f, "CamZ {}", v), JoyPush(p) => write!(f, "JoyPush {}", pushed(p)), CamPush(p) => write!(f, "CamPush {}", pushed(p)), PaddleRight(p) => write!(f, "PaddleRight {}", pushed(p)), PaddleLeft(p) => write!(f, "PaddleLeft {}", pushed(p)), PaddleRightPinky(p) => write!(f, "PaddleRightPinky {}", pushed(p)), PaddleLeftPinky(p) => write!(f, "PaddleLeftPinky {}", pushed(p)), Home(p) => write!(f, "Home {}", pushed(p)), Action(l, p) => write!(f, "Action{} {}", l, pushed(p)), AutopilotToggle(p) => write!(f, "AutopilotToggle {}", pushed(p)), LandingGearSilence(p) => { write!(f, "LandingGearSilence {}", pushed(p)) } PovUp(p) => write!(f, "PovUp {}", pushed(p)), PovDown(p) => write!(f, "PovDown {}", pushed(p)), PovLeft(p) => write!(f, "PovLeft {}", pushed(p)), PovRight(p) => write!(f, "PovRight {}", pushed(p)), MicUp(p) => write!(f, "MicUp {}", pushed(p)), MicDown(p) => write!(f, "MicDown {}", pushed(p)), MicLeft(p) => write!(f, "MicLeft {}", pushed(p)), MicRight(p) => write!(f, "MicRight {}", pushed(p)), MicPush(p) => write!(f, "MicPush {}", pushed(p)), Slew(v) => write!(f, "Slew {}", v), Throttle(v) => write!(f, "Throttle {}", v), ThrottleL(v) => write!(f, "ThrottleL {}", v), ThrottleR(v) => write!(f, "ThrottleR {}", v), ThrottleButtonL(p) => write!(f, "ThrottleButtonL {}", pushed(p)), EngineFuelFlowL(t) => write!(f, "EngineFuelFlowL {}", two(t)), EngineFuelFlowR(t) => write!(f, "EngineFuelFlowR {}", two(t)), Eac(t) => write!(f, "Eac {}", two(t)), RadarAltimeter(t) => write!(f, "RadarAltimeter {}", two(t)), Apu(t) => write!(f, "Apu {}", two(t)), AutopilotPath(p) => write!(f, "AutopilotPath {}", sw(p)), AutopilotAlt(p) => write!(f, "AutopilotAlt {}", sw(p)), FlapsUp(p) => write!(f, "FlapsUp {}", sw(p)), FlapsDown(p) => write!(f, "FlapsDown {}", sw(p)), EngineLIgnition(p) => write!(f, "EngineLIgnition {}", sw(p)), EngineLMotor(p) => write!(f, "EngineLMotor {}", sw(p)), EngineRIgnition(p) => write!(f, "EngineRIgnition {}", sw(p)), EngineRMotor(p) => write!(f, "EngineRMotor {}", sw(p)), PinkyForward(p) => write!(f, "PinkyForward {}", sw(p)), PinkyBackward(p) => write!(f, "PinkyBackward {}", sw(p)), SpeedbrakeForward(p) => write!(f, "SpeedbrakeForward {}", sw(p)), SpeedbrakeBackward(p) => write!(f, "SpeedbrakeBackward {}", sw(p)), BoatForward(p) => write!(f, "BoatForward {}", sw(p)), BoatBackward(p) => write!(f, "BoatBackward {}", sw(p)), ChinaForward(p) => write!(f, "ChinaForward {}", sw(p)), ChinaBackward(p) => write!(f, "ChinaBackward {}", sw(p)), Dpi(p) => write!(f, "Dpi {}", pushed(p)), MouseX(v) => write!(f, "MouseX {}", v), MouseY(v) => write!(f, "MouseY {}", v), MousePush(p) => write!(f, "MousePush {}", pushed(p)), MouseMenu(p) => write!(f, "MouseMenu {}", pushed(p)), WheelX(v) => write!(f, "WheelX {}", v), WheelY(v) => write!(f, "WheelY {}", v), WheelPush(p) => write!(f, "WheelPush {}", pushed(p)), Nil(p) => write!(f, "Nil {}", pushed(p)), } } }
mod deparse; mod errors; mod flatten; mod known_external; mod lex; mod lite_parse; mod parse_keywords; mod parser; mod type_check; pub use deparse::{escape_for_script_arg, escape_quote_string}; pub use errors::ParseError; pub use flatten::{flatten_block, flatten_expression, flatten_pipeline, FlatShape}; pub use known_external::KnownExternal; pub use lex::{lex, Token, TokenContents}; pub use lite_parse::{lite_parse, LiteBlock}; pub use parse_keywords::*; pub use parser::{ is_math_expression_like, parse, parse_block, parse_duration_bytes, parse_expression, parse_external_call, trim_quotes, trim_quotes_str, unescape_unquote_string, Import, }; #[cfg(feature = "plugin")] pub use parse_keywords::parse_register;
/// A `(x, y)` position on screen. pub type Pos = (usize, usize); /// A `(cols, rows)` size. pub type Size = (usize, usize); pub trait HasSize { fn size(&self) -> Size; } pub trait HasPosition { fn origin(&self) -> Pos; fn set_origin(&mut self, new_origin: Pos); } /// A cursor position. pub struct Cursor { pos: Option<Pos>, last_pos: Option<Pos>, } impl Cursor { pub fn new() -> Cursor { Cursor { pos: None, last_pos: None, } } /// Checks whether the current and last coordinates are sequential and returns `true` if they /// are and `false` otherwise. pub fn is_seq(&self) -> bool { if let Some((cx, cy)) = self.pos { if let Some((lx, ly)) = self.last_pos { (lx+1, ly) == (cx, cy) } else { false } } else { false } } pub fn pos(&self) -> Option<Pos> { self.pos } pub fn set_pos(&mut self, newpos: Option<Pos>) { self.last_pos = self.pos; self.pos = newpos; } pub fn invalidate_last_pos(&mut self) { self.last_pos = None; } }
extern crate clap; mod block; mod pow; use clap::{Arg, App}; use block::BlockChain; fn main() { let matches = App::new("Blkchain_3am") .version("1.0.0") .author("Boot-Error <booterror99@gmail.com>") .about("Tiny blockchain implementation") .arg(Arg::with_name("add") .short("a") .long("add") .takes_value(true) .help("Add new block, input some text")) .get_matches(); let new_data = matches.value_of("add").unwrap(); // blockchain let mut bc = BlockChain::new(); bc.add_block(new_data.to_string()); bc.show_blocks(); }
fn consume_u8(b: &Vec<u8>, cursor: usize) -> (u8, usize) { (b[cursor], cursor + 1) } fn consume_u16(b: &Vec<u8>, cursor: usize) -> (u16, usize) { let (left, cursor) = consume_u8(b, cursor); let (right, cursor) = consume_u8(b, cursor); (((left as u16) << 8) | (right as u16), cursor) } fn consume_u32(b: &Vec<u8>, cursor: usize) -> (u32, usize) { let (left, cursor) = consume_u16(b, cursor); let (right, cursor) = consume_u16(b, cursor); (((left as u32) << 16) | (right as u32), cursor) } fn consume_u64(b: &Vec<u8>, cursor: usize) -> (u64, usize) { let (left, cursor) = consume_u32(b, cursor); let (right, cursor) = consume_u32(b, cursor); (((left as u64) << 32) | (right as u64), cursor) } #[derive(Debug)] enum AccessFlag { Public = 0x0001, Final = 0x0010, Super = 0x0020, Interface = 0x0200, Abstract = 0x0400, Synthetic = 0x1000, Annotation = 0x2000, Enum = 0x4000, } #[derive(Debug)] enum Const { Class { name_index: u16, }, Fieldref { class_index: u16, nameandtype_index: u16, }, Methodref { class_index: u16, nameandtype_index: u16, }, InterfaceMethodref { class_index: u16, nameandtype_index: u16, }, String { string_index: u16, }, Integer(u32), Float(f32), Long(u64), Double(f64), NameAndType { name_index: u16, descriptor_index: u16, }, Utf8(String), MethodHandle { reference_kind: u8, reference_index: u16, }, MethodType { descriptor_index: u16, }, InvokeDynamic { bootstrap_method_attr_index: u16, name_and_type_index: u16, }, } impl Const { pub fn print(&self) { match self { Const::Class { .. } => (), Const::Fieldref { .. } => (), Const::Methodref { .. } => (), Const::InterfaceMethodref { .. } => (), Const::String { .. } => print!(""), Const::Integer(i) => print!("Integer: {}\n", i), Const::Float(f) => print!("Float: {}\n", f), Const::Long(l) => print!("Long: {}\n", l), Const::Double(d) => print!("Double: {}\n", d), Const::NameAndType { .. } => (), Const::Utf8(s) => print!("Utf8: {}\n", s), _ => panic!("Unknown tag: {:#?}", self), } } } #[derive(Debug)] enum ConstType { Class = 7, Fieldref = 9, Methodref = 10, InterfaceMethodref = 11, String = 8, Integer = 3, Float = 4, Long = 5, Double = 6, NameAndType = 12, Utf8 = 1, MethodHandle = 15, MethodType = 16, InvokeDynamic = 18, } fn consume_class_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (name_index, cursor) = consume_u16(b, cursor); let c = Const::Class { name_index }; (c, cursor) } fn consume_fieldref_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (class_index, cursor) = consume_u16(b, cursor); let (nameandtype_index, cursor) = consume_u16(b, cursor); let c = Const::Fieldref { class_index, nameandtype_index, }; (c, cursor) } fn consume_methodref_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (class_index, cursor) = consume_u16(b, cursor); let (nameandtype_index, cursor) = consume_u16(b, cursor); let c = Const::Methodref { class_index, nameandtype_index, }; (c, cursor) } fn consume_interfacemethodref_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (class_index, cursor) = consume_u16(b, cursor); let (nameandtype_index, cursor) = consume_u16(b, cursor); let c = Const::InterfaceMethodref { class_index, nameandtype_index, }; (c, cursor) } fn consume_string_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (string_index, cursor) = consume_u16(b, cursor); (Const::String { string_index }, cursor) } fn consume_integer_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (bytes, cursor) = consume_u32(b, cursor); (Const::Integer(bytes), cursor) } fn consume_float_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (bytes, cursor) = consume_u32(b, cursor); (Const::Integer(bytes), cursor) } fn consume_long_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (bytes, cursor) = consume_u64(b, cursor); (Const::Long(bytes), cursor) } fn consume_double_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (bytes, cursor) = consume_u64(b, cursor); (Const::Double(bytes as f64), cursor) } fn consume_nameandtype_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (name_index, cursor) = consume_u16(b, cursor); let (descriptor_index, cursor) = consume_u16(b, cursor); ( Const::NameAndType { name_index, descriptor_index, }, cursor, ) } fn consume_utf8_constant(b: &Vec<u8>, cursor: usize) -> (Const, usize) { let (length, cursor) = consume_u16(b, cursor); let offset = cursor + (length as usize); let c = Const::Utf8(std::str::from_utf8(&b[cursor..offset]).unwrap().to_string()); (c, offset) } impl ConstType { fn consume_constant(&self, b: &Vec<u8>, cursor: usize) -> (Const, usize) { match self { ConstType::Class => consume_class_constant(b, cursor), ConstType::Fieldref => consume_fieldref_constant(b, cursor), ConstType::Methodref => consume_methodref_constant(b, cursor), ConstType::InterfaceMethodref => consume_interfacemethodref_constant(b, cursor), ConstType::String => consume_string_constant(b, cursor), ConstType::Integer => consume_integer_constant(b, cursor), ConstType::Float => consume_float_constant(b, cursor), ConstType::Long => consume_long_constant(b, cursor), ConstType::Double => consume_double_constant(b, cursor), ConstType::NameAndType => consume_nameandtype_constant(b, cursor), ConstType::Utf8 => consume_utf8_constant(b, cursor), _ => panic!("Unknown tag: {:#?}", self), } } } #[derive(Debug)] struct Interface { index: u16, } #[derive(Debug)] struct Field { access_flags: u16, name_index: u16, descriptor_index: u16, attributes: Vec<Attribute>, } impl Field { fn print(&self) {} } #[derive(Debug)] struct Method { access_flags: u16, name_index: u16, descriptor_index: u16, attributes: Vec<Attribute>, } impl Method { fn print(&self) {} } #[derive(Debug)] struct Attribute { name_index: u16, info: Vec<u8>, } const MAGIC: u32 = 0xCAFEBABE; fn consume_constant_pool(b: &Vec<u8>, n: usize, cursor: usize) -> (Vec<Const>, usize) { let mut v: Vec<Const> = Vec::new(); let mut c = cursor; while v.len() < n - 1 { let tag: ConstType = unsafe { std::mem::transmute(b[c]) }; let res = tag.consume_constant(b, c + 1); v.push(res.0); c = res.1; } (v, c) } fn consume_interfaces(b: &Vec<u8>, n: usize, cursor: usize) -> (Vec<Interface>, usize) { let mut v: Vec<Interface> = Vec::new(); while v.len() < n { let (index, _) = consume_u16(b, cursor); v.push(Interface { index }); } return (v, cursor + n * 2); } fn consume_fields(b: &Vec<u8>, n: usize, cursor: usize) -> (Vec<Field>, usize) { let mut v: Vec<Field> = Vec::new(); let mut c = cursor; while v.len() < n { let (access_flags, cursor) = consume_u16(b, c); let (name_index, cursor) = consume_u16(b, cursor); let (descriptor_index, cursor) = consume_u16(b, cursor); let (attributes_count, cursor) = consume_u16(b, cursor); let (attributes, cursor) = consume_attributes(b, attributes_count as usize, cursor); v.push(Field { access_flags, name_index, descriptor_index, attributes, }); c = cursor; } return (v, c); } fn consume_methods(b: &Vec<u8>, n: usize, cursor: usize) -> (Vec<Method>, usize) { let mut v: Vec<Method> = Vec::new(); let mut c = cursor; while v.len() < n { let (access_flags, cursor) = consume_u16(b, c); let (name_index, cursor) = consume_u16(b, cursor); let (descriptor_index, cursor) = consume_u16(b, cursor); let (attributes_count, cursor) = consume_u16(b, cursor); let (attributes, cursor) = consume_attributes(b, attributes_count as usize, cursor); v.push(Method { access_flags, name_index, descriptor_index, attributes, }); c = cursor; } return (v, c); } fn consume_attributes(b: &Vec<u8>, n: usize, cursor: usize) -> (Vec<Attribute>, usize) { let mut v: Vec<Attribute> = Vec::new(); let mut c = cursor; while v.len() < n { let (name_index, cursor) = consume_u16(b, c); let (length, cursor) = consume_u32(b, cursor); v.push(Attribute { name_index: name_index, info: b[cursor..cursor + (length as usize)] .iter() .cloned() .collect(), }); c = cursor + (length as usize); } return (v, c); } #[derive(Debug)] pub struct Class { minor_version: u16, major_version: u16, constant_pool: Vec<Const>, access_flags: u16, this_class: u16, super_class: u16, interfaces: Vec<Interface>, fields: Vec<Field>, methods: Vec<Method>, attributes: Vec<Attribute>, } impl Class { pub fn print(&self) { print!("Major: {}\n", self.major_version); print!("Minor: {}\n", self.minor_version); print!("Access Flags: {}\n", self.access_flags); self.constant_pool[self.this_class as usize].print(); self.constant_pool[self.super_class as usize].print(); for f in self.fields.iter() { f.print(); } for m in self.methods.iter() { m.print(); } } } pub fn parse(data: &Vec<u8>) -> Class { let (magic, cursor) = consume_u32(data, 0); if magic != MAGIC { panic!("Expected Java class file, got {:#?}", magic); } let (minor_version, cursor) = consume_u16(data, cursor); let (major_version, cursor) = consume_u16(data, cursor); let (constant_pool_count, cursor) = consume_u16(data, cursor); let (constants, cursor) = consume_constant_pool(data, constant_pool_count as usize, cursor); let (access_flags, cursor) = consume_u16(data, cursor); let (this_class, cursor) = consume_u16(data, cursor); let (super_class, cursor) = consume_u16(data, cursor); let (interfaces_count, cursor) = consume_u16(data, cursor); let (interfaces, cursor) = consume_interfaces(data, interfaces_count as usize, cursor); let (fields_count, cursor) = consume_u16(data, cursor); let (fields, cursor) = consume_fields(data, fields_count as usize, cursor); let (methods_count, cursor) = consume_u16(data, cursor); let (methods, cursor) = consume_methods(data, methods_count as usize, cursor); let (attributes_count, cursor) = consume_u16(data, cursor); let (attributes, _) = consume_attributes(data, attributes_count as usize, cursor); Class { minor_version: minor_version, major_version: major_version, constant_pool: constants, access_flags: access_flags, this_class: this_class, super_class: super_class, interfaces: interfaces, fields: fields, methods: methods, attributes: attributes, } }
#![allow(dead_code)] #![allow(non_upper_case_globals)] #![allow(non_camel_case_types)] #[cfg(feature = "bindgen")] include!(concat!(env!("OUT_DIR"), "/bindings.rs")); #[cfg(not(feature = "bindgen"))] include!("./bindings.rs"); extern "C" { pub(crate) fn dup(fd: std::os::raw::c_int) -> std::os::raw::c_int; } use crate::{ Language, LookaheadIterator, Node, Parser, Query, QueryCursor, QueryError, Tree, TreeCursor, }; use std::{marker::PhantomData, mem::ManuallyDrop, ptr::NonNull, str}; impl Language { /// Reconstructs a [`Language`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(ptr: *const TSLanguage) -> Language { Language(ptr) } /// Consumes the [`Language`], returning a raw pointer to the underlying C structure. pub fn into_raw(self) -> *const TSLanguage { ManuallyDrop::new(self).0 } } impl Parser { /// Reconstructs a [`Parser`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(ptr: *mut TSParser) -> Parser { Parser(NonNull::new_unchecked(ptr)) } /// Consumes the [`Parser`], returning a raw pointer to the underlying C structure. /// /// # Safety /// /// It's a caller responsibility to adjust parser's state /// like disable logging or dot graphs printing if this /// may cause issues like use after free. pub fn into_raw(self) -> *mut TSParser { ManuallyDrop::new(self).0.as_ptr() } } impl Tree { /// Reconstructs a [`Tree`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(ptr: *mut TSTree) -> Tree { Tree(NonNull::new_unchecked(ptr)) } /// Consumes the [`Tree`], returning a raw pointer to the underlying C structure. pub fn into_raw(self) -> *mut TSTree { ManuallyDrop::new(self).0.as_ptr() } } impl<'tree> Node<'tree> { /// Reconstructs a [`Node`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(raw: TSNode) -> Node<'tree> { Node(raw, PhantomData) } /// Consumes the [`Node`], returning a raw pointer to the underlying C structure. pub fn into_raw(self) -> TSNode { ManuallyDrop::new(self).0 } } impl<'a> TreeCursor<'a> { /// Reconstructs a [`TreeCursor`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(raw: TSTreeCursor) -> TreeCursor<'a> { TreeCursor(raw, PhantomData) } /// Consumes the [`TreeCursor`], returning a raw pointer to the underlying C structure. pub fn into_raw(self) -> TSTreeCursor { ManuallyDrop::new(self).0 } } impl Query { /// Reconstructs a [`Query`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(ptr: *mut TSQuery, source: &str) -> Result<Query, QueryError> { Query::from_raw_parts(ptr, source) } /// Consumes the [`Query`], returning a raw pointer to the underlying C structure. pub fn into_raw(self) -> *mut TSQuery { ManuallyDrop::new(self).ptr.as_ptr() } } impl QueryCursor { /// Reconstructs a [`QueryCursor`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(ptr: *mut TSQueryCursor) -> QueryCursor { QueryCursor { ptr: NonNull::new_unchecked(ptr), } } /// Consumes the [`QueryCursor`], returning a raw pointer to the underlying C structure. pub fn into_raw(self) -> *mut TSQueryCursor { ManuallyDrop::new(self).ptr.as_ptr() } } impl LookaheadIterator { /// Reconstructs a [`LookaheadIterator`] from a raw pointer. /// /// # Safety /// /// `ptr` must be non-null. pub unsafe fn from_raw(ptr: *mut TSLookaheadIterator) -> LookaheadIterator { LookaheadIterator(NonNull::new_unchecked(ptr)) } /// Consumes the [`LookaheadIterator`], returning a raw pointer to the underlying C structure. pub fn into_raw(self) -> *mut TSLookaheadIterator { ManuallyDrop::new(self).0.as_ptr() } }
use azure_core::prelude::*; use azure_storage::core::prelude::*; use azure_storage_queues::prelude::*; use futures::stream::StreamExt; use std::error::Error; use std::num::NonZeroU32; #[tokio::main] async fn main() -> Result<(), Box<dyn Error + Send + Sync>> { // First we retrieve the account name and master key from environment variables. let account = std::env::var("STORAGE_ACCOUNT").expect("Set env variable STORAGE_ACCOUNT first!"); let master_key = std::env::var("STORAGE_MASTER_KEY").expect("Set env variable STORAGE_MASTER_KEY first!"); let http_client = new_http_client(); let storage_account = StorageAccountClient::new_access_key(http_client.clone(), &account, &master_key); let queue_service = storage_account.as_queue_service_client(); println!("getting service stats"); let response = queue_service.get_queue_service_stats().execute().await?; println!("get_queue_service_properties.response == {:#?}", response); println!("getting service properties"); let response = queue_service .get_queue_service_properties() .execute() .await?; println!("get_queue_service_stats.response == {:#?}", response); println!("enumerating queues starting with a"); let response = queue_service .list_queues() .prefix("a") .include_metadata(true) .max_results(NonZeroU32::new(2u32).unwrap()) .execute() .await?; println!("response == {:#?}", response); println!("streaming queues"); let mut stream = Box::pin( queue_service .list_queues() .max_results(NonZeroU32::new(3u32).unwrap()) .stream(), ); while let Some(value) = stream.next().await { let value = value?; let len = value.queues.len(); println!("received {} queues", len); value .queues .iter() .for_each(|queue| println!("{:#?}", queue)); } Ok(()) }
// Vicfred // https://atcoder.jp/contests/abc157/tasks/abc157_b // simulation use std::io; use std::collections::HashSet; fn main() { let mut A = Vec::<Vec<i64>>::new(); for _ in 0..3 { let mut line = String::new(); io::stdin() .read_line(&mut line) .unwrap(); let words: Vec<i64> = line .split_whitespace() .map(|x| x.parse().unwrap()) .collect(); A.push(words); } let mut N = String::new(); io::stdin() .read_line(&mut N) .unwrap(); let N: i64 = N.trim().parse().unwrap(); let mut bingo = HashSet::new(); for _ in 0..N { let mut b = String::new(); io::stdin() .read_line(&mut b) .unwrap(); let b: i64 = b.trim().parse().unwrap(); bingo.insert(b); } let row1: HashSet<_> = [A[0][0], A[0][1], A[0][2]].iter().cloned().collect(); let row2: HashSet<_> = [A[1][0], A[1][1], A[1][2]].iter().cloned().collect(); let row3: HashSet<_> = [A[2][0], A[2][1], A[2][2]].iter().cloned().collect(); let col1: HashSet<_> = [A[0][0], A[1][0], A[2][0]].iter().cloned().collect(); let col2: HashSet<_> = [A[0][1], A[1][1], A[2][1]].iter().cloned().collect(); let col3: HashSet<_> = [A[0][2], A[1][2], A[2][2]].iter().cloned().collect(); let dia1: HashSet<_> = [A[0][0], A[1][1], A[2][2]].iter().cloned().collect(); let dia2: HashSet<_> = [A[0][2], A[1][1], A[2][0]].iter().cloned().collect(); let mut ans = false; if row1.is_subset(&bingo) || row2.is_subset(&bingo) || row3.is_subset(&bingo) || col1.is_subset(&bingo) || col2.is_subset(&bingo) || col3.is_subset(&bingo) || dia1.is_subset(&bingo) || dia2.is_subset(&bingo) { ans = true; } if ans { println!("Yes"); } else { println!("No"); } }
// send command to renderengine use gfx; use glutin; use types::*; use camera::Camera; use std::io::Cursor; use std::sync::{Arc, Mutex, Once, ONCE_INIT}; use std::{mem, thread}; use std::time::Duration; pub enum RenderSystemState{ UnInited, Inited, Exited, } pub struct RenderSystem{ // Since we will be used in many threads, we need to protect // concurrent access pub inner: Arc<Mutex<u8>>, pub sleep_time: Arc<Mutex<u64>>, pub state: Arc<Mutex<RenderSystemState>>, device: Device, factory: Factory, encoder: Encoder, output_color: OutputColor, output_depth: OutputDepth, } impl RenderSystem { pub fn new(device:Device, mut factory: Factory, main_color: OutputColor, main_depth: OutputDepth ) -> Self { let encoder = factory.create_command_buffer().into(); RenderSystem { inner: Arc::new(Mutex::new(0)), sleep_time: Arc::new(Mutex::new(0u64)), state: Arc::new(Mutex::new(RenderSystemState::Inited)), device: device, factory: factory, encoder: encoder, output_color: main_color, output_depth: main_depth, } } fn on_update_begin(&self){ } fn on_update_end(&self){ } fn on_render_begin(&self){ } fn on_render_end(&self) { } fn main_loop(&mut self) { } fn process_input(&mut self, event: &glutin::Event) { // self.scene_system.process_input(event); } pub fn start_up(&mut self) { self.main_loop(); } fn shout_down(&mut self) { *self.state.lock().unwrap() = RenderSystemState::Exited; return; } fn update(&mut self) { self.on_update_begin(); self.on_update_end(); } pub fn render(&mut self, elapsed_time:u64) { match *self.state.lock().unwrap() { RenderSystemState::Exited => return, _ => {}, } self.on_render_begin(); // *self.sleep_time.lock().unwrap() = (1000.0 / 60.0) as u64 - elapsed_time; self.on_render_end(); } #[allow(dead_code)] fn pause(&self) { } // render engine fn get_renderengine(&self) { } } #[cfg(test)] mod tests { use super::*; fn test_load_obj() { } }
extern crate wm_daemons; use wm_daemons::config::{load_config, load_config_path}; use wm_daemons::dbus_listen::{CallbackMap, match_method}; #[macro_use] extern crate clap; use clap::{Arg, App}; extern crate config; use self::config::types::Config; extern crate dbus; use self::dbus::{Connection, BusType}; use std::error::Error; use std::path::Path; struct Context { conf: &Config, } fn try_main() -> Result<(), Box<Error>> { let matches = App::new("ruskey") .version(&crate_version!()) .author("Ben Boeckel <mathstuf@gmail.com>") .about("Implement the SecretService API for keepass databases") .arg(Arg::with_name("CONFIG") .short("c") .long("config") .help("Path to the configuration file") .takes_value(true)) .arg(Arg::with_name("DATABASE_NAME") .short("n") .long("name") .help("Name of the database") .takes_value(true)) .arg(Arg::with_name("DATABASE") .short("d") .long("database") .help("Path to the database") .takes_value(true)) .get_matches(); let conf = try!(if matches.is_present("CONFIG") { load_config_path(Path::new(matches.value_of("CONFIG").unwrap())) } else { load_config("ruskey", "config") }); let conn = try!(Connection::get_private(BusType::Session)); let cbs: CallbackMap<Context> = vec![ // TODO ]; let ctx = Context { conf: &conf, }; let match_str = ""; try!(conn.add_match(match_str)); conn.iter(100).fold(ctx, |inner_ctx, item| { match_method(inner_ctx, &cbs, item) }); Ok(()) } fn main() { if let Err(err) = try_main() { panic!("{}", err.description()); } }
fn main() { println!("cargo:rustc-link-lib=rtlsdr"); }
pub mod dijkstra { use std::collections::BTreeSet; use std::cmp::Ordering; use std::u32; #[derive(Copy, Clone, Eq, PartialEq)] pub struct Distance { pub node: usize, pub cost: u32, } pub struct Edge { pub node: usize, pub cost: u32, } impl Ord for Distance { fn cmp(&self, other: &Distance) -> Ordering { self.cost.cmp(&other.cost) } } impl PartialOrd for Distance { fn partial_cmp(&self, other: &Distance) -> Option<Ordering> { Some(self.cmp(other)) } } pub fn dijkstra(adj_list: &Vec<Vec<Edge>>, start: usize, target: usize) -> u32 { let vertices_num = adj_list.len(); let mut distances: Vec<_> = (0..vertices_num).map(|_| u32::MAX).collect(); let mut heap = BTreeSet::new(); distances[start] = 0; heap.insert(Distance { cost: 0, node: start }); while !heap.is_empty() { let min = heap.iter().min().unwrap().clone(); heap.remove(&min); let Distance{ node, cost } = min; if node == target { return cost; } for edge in adj_list[node].iter() { if distances[node] + edge.cost < distances[edge.node] { heap.remove(&Distance{node: edge.node, cost: distances[edge.node]}); distances[edge.node] = distances[node] + edge.cost; heap.insert(Distance{node: edge.node, cost: distances[edge.node]}); } } } u32::MAX } }
#[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { // TODO: Handle this issue in the sdk // Introduce delay so that dapr grpc port is assigned before app tries to connect std::thread::sleep(std::time::Duration::new(2, 0)); // Get the Dapr port and create a connection let port: u16 = std::env::var("DAPR_GRPC_PORT")?.parse()?; let addr = format!("https://127.0.0.1:{}", port); // Create the client let mut client = dapr::Client::<dapr::client::TonicClient>::connect(addr).await?; let key = String::from("hello"); let val = String::from("world").into_bytes(); let store_name = String::from("statestore"); // save key-value pair in the state store client.save_state(store_name, vec![(key, val)]).await?; println!("Successfully saved!"); let get_response = client.get_state("statestore", "hello", None).await?; println!("Value is {:?}", String::from_utf8_lossy(&get_response.data)); // delete a value from the state store client.delete_state("statestore", "hello", None).await?; // validate if the value was successfully deleted let del_result = client.get_state("statestore", "hello", None).await?; // should print "[]" upon successful delete println!("Deleted value: {:?}", del_result.data); Ok(()) }
mod blend_mode; mod composite; mod line_cap; mod line_join; mod mask; mod text_based; mod text_grouping; mod text_shape; mod transform; pub use self::{ blend_mode::*, composite::*, line_cap::*, line_join::*, mask::*, text_based::*, text_grouping::*, text_shape::*, transform::*, };
use objc::Encode; use objc::Encoding; // https://developer.apple.com/documentation/foundation/nssize #[derive(Clone, Copy, Debug, Default)] #[repr(C)] pub struct NSSize { pub height: f64, pub width: f64, } impl NSSize { pub fn new(width: f64, height: f64) -> Self { Self { width, height } } } unsafe impl Encode for NSSize { fn encode() -> Encoding { let encoding = format!( "{{CGSize={}{}}}", f64::encode().as_str(), f64::encode().as_str(), ); unsafe { Encoding::from_str(&encoding) } } }
#![allow(unused, unused_mut)] use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; use chrono::prelude::*; use num_traits::ToPrimitive; use num_traits::cast::FromPrimitive; use rust_decimal::Decimal; use serde_json; use uuid::Uuid; use postgres; use postgres::rows::{Row, Rows}; use postgres::stmt::Statement; use postgres::transaction::Transaction; use postgres::types::{IsNull, ToSql, Type}; use postgres::{Connection, Result as PsqlResult, TlsMode}; use std::boxed::Box; use std::cell; use std::cell::RefCell; use std::error::Error as StdErr; use std::result; use driver::DriverError; use driver::Result; #[derive(Debug, PartialEq)] pub enum JdbcParameter { Int(MagicInt), String(String), Boolean(bool), Null, Double(MagicFloat), DateTime(DateTime<Utc>), Long(i64), UUID(Uuid), } #[derive(Serialize, Debug)] pub enum JdbcParameterType { Int, String, Boolean, Null, Double, DateTime, Long, UUID, VOID, StringArray } #[derive(Serialize, Deserialize)] pub struct MagicDateTime { pub year: i32, pub month: u32, pub day: u32, pub hour: u32, pub minute: u32, pub seconds: u32, pub millis: u32, } impl JdbcParameter { pub fn paramsToSql(params: Vec<&JdbcParameter>) -> Vec<&ToSql> { params.into_iter().map(|p| JdbcParameter::paramToSql(p)).collect() } pub fn paramToSql(param: &JdbcParameter) -> &ToSql { match param { JdbcParameter::Int(_) => param, JdbcParameter::Null => param, JdbcParameter::Double(_) => param, JdbcParameter::DateTime(_) => param, JdbcParameter::String(ref s) => s, JdbcParameter::Boolean(ref b) => b, JdbcParameter::Long(ref d) => d, JdbcParameter::UUID(ref uid) => uid, } } } #[derive(Debug, PartialEq)] pub struct MagicFloat { value: f64, underlying: RefCell<Option<Type>>, } #[derive(Debug, PartialEq)] pub struct MagicInt { value: i64, underlying: RefCell<Option<Type>>, } impl MagicFloat { fn setType(&self, ty: Type) { self.underlying.replace(Some(ty)); } } impl MagicInt { fn setType(&self, ty: Type) { self.underlying.replace(Some(ty)); } } impl ToSql for JdbcParameter { fn to_sql( &self, ty: &Type, out: &mut Vec<u8>, ) -> result::Result<IsNull, Box<StdErr + Sync + Send>> where Self: Sized, { match self { JdbcParameter::Null => Ok(IsNull::Yes), JdbcParameter::DateTime(ref dt) => { dt.to_sql(ty, out) } JdbcParameter::Int(ref magic) => { let val = magic.underlying.replace(None); match val { Some(t) => { match t { postgres::types::INT2 => out.write_i16::<BigEndian>(magic.value as i16).unwrap(), postgres::types::INT4 => out.write_i32::<BigEndian>(magic.value as i32).unwrap(), postgres::types::INT8 => out.write_i64::<BigEndian>(magic.value).unwrap(), x => error!("[Rust] Unhandled MagicInt OID: {}", x), } } x => panic!("[Rust] No underlying type present for MagicInt."), } Ok(IsNull::No) } JdbcParameter::Double(ref magic) => { let val = magic.underlying.replace(None); match val { Some(t) => { match t { postgres::types::FLOAT8 => out.write_f64::<BigEndian>(magic.value).unwrap(), // Float8 postgres::types::NUMERIC => { Decimal::from_f64(magic.value).unwrap().to_sql(ty, out); }, x => error!("[Rust] Unhandled MagicFloat OID: {}", x), } } x => panic!("[Rust] No underlying type present for MagicFloat."), } Ok(IsNull::No) } x => panic!("ToSql no match {:?}", x), } } fn accepts(ty: &Type) -> bool where Self: Sized, { true } fn to_sql_checked( &self, ty: &Type, out: &mut Vec<u8>, ) -> result::Result<IsNull, Box<StdErr + Sync + Send>> { match self { // Todo: Cloning is inefficient. Alternative? JdbcParameter::Int(ref magic) => magic.setType(ty.clone()), JdbcParameter::Double(ref magic) => magic.setType(ty.clone()), _ => (), }; self.to_sql(ty, out) } } pub fn toJdbcParameterList(str: &String) -> Result<Vec<Vec<JdbcParameter>>> { match serde_json::from_str::<serde_json::Value>(&*str) { Ok(serde_json::Value::Array(elements)) => elements.iter().map(toJdbcParametersInner).collect(), Ok(json) => Err(DriverError::GenericError(String::from(format!( "provided json was not an array of arrays: {}", json )))), Err(e) => Err(DriverError::GenericError(String::from(format!( "json parsing failed: {}", e )))), } } pub fn toJdbcParameters(str: &String) -> Result<Vec<JdbcParameter>> { let json = serde_json::from_str::<serde_json::Value>(&*str)?; toJdbcParametersInner(&json) } fn toJdbcParametersInner(json: &serde_json::Value) -> Result<Vec<JdbcParameter>> { match json { serde_json::Value::Array(elements) => { let x: Result<Vec<JdbcParameter>> = elements.iter().map(jsonToJdbcParameter).collect(); x }, json => Err(DriverError::GenericError(String::from(format!( "provided json was not an array: {}", json )))), } } fn jsonToJdbcParameter(json: &serde_json::Value) -> Result<JdbcParameter> { match json { &serde_json::Value::Object(ref map) => jsonObjectToJdbcParameter(map), x => Err(DriverError::GenericError(format!( "{} is not a valid value for a JdbcParameter", x ))), } } fn parseDiscriminator(d: &str) -> Result<JdbcParameterType> { match d { "Int" => Ok(JdbcParameterType::Int), "String" => Ok(JdbcParameterType::String), "Boolean" => Ok(JdbcParameterType::Boolean), "Null" => Ok(JdbcParameterType::Null), "Double" => Ok(JdbcParameterType::Double), "DateTime" => Ok(JdbcParameterType::DateTime), "Long" => Ok(JdbcParameterType::Long), "UUID" => Ok(JdbcParameterType::UUID), x => Err(DriverError::GenericError(format!("discriminator {} is unhandled",d))) } } fn jsonObjectToJdbcParameter(map: &serde_json::Map<String, serde_json::Value>) -> Result<JdbcParameter> { let discriminator = parseDiscriminator(map.get("discriminator").unwrap().as_str().unwrap())?; let value = map.get("value").unwrap(); match (discriminator, value) { (JdbcParameterType::Int, &serde_json::Value::Number(ref n)) => Ok(JdbcParameter::Int(MagicInt { value: n.as_i64().unwrap(), underlying: RefCell::new(None), })), (JdbcParameterType::String, &serde_json::Value::String(ref s)) => Ok(JdbcParameter::String(s.to_string())), (JdbcParameterType::Boolean, &serde_json::Value::Bool(b)) => Ok(JdbcParameter::Boolean(b)), (JdbcParameterType::Null, &serde_json::Value::Null) => Ok(JdbcParameter::Null), (JdbcParameterType::Double, &serde_json::Value::Number(ref n)) => Ok(JdbcParameter::Double(MagicFloat { value: n.as_f64().unwrap(), underlying: RefCell::new(None), })), (JdbcParameterType::DateTime, x @ &serde_json::Value::Object(_)) => { let date: MagicDateTime = serde_json::from_value(x.clone())?; let dateTime = Utc.ymd(date.year, date.month, date.day).and_hms_milli(date.hour, date.minute, date.seconds, date.millis); Ok(JdbcParameter::DateTime(dateTime)) }, (JdbcParameterType::Long, &serde_json::Value::Number(ref n)) => Ok(JdbcParameter::Long(n.as_i64().unwrap())), (JdbcParameterType::UUID, &serde_json::Value::String(ref uuid)) => Ok(JdbcParameter::UUID(Uuid::parse_str(uuid)?)), (d, v) => Err(DriverError::GenericError(format!("Invalid combination: {:?} value {}", d, v))) } }
use std::env; use std::fs; fn main() { let args: Vec<String> = env::args().collect(); let slope = &args[1].parse::<f64>().unwrap(); let filename = &args[2]; let contents = fs::read_to_string(filename) .expect("Something went wrong reading the file"); let split_contents = contents.lines(); let lines: Vec<&str> = split_contents.collect(); let mut trees_hit = 0; let mut x: f64 = 0.0; for y in 0..lines.len() { if x % 1.0 == 0.0 && hit_tree(x as usize, y, &lines) { trees_hit += 1; } x += slope; let width: f64 = lines[0].len() as f64; if x >= width { x = x - width; } } println!("trees hit: {}", trees_hit); } fn hit_tree(x: usize, y: usize, lines: &Vec<&str>) -> bool { return &lines[y][x..x+1] == "#" }
use std::sync::Arc; use crossbeam::atomic::AtomicCell; use futures::{ executor::ThreadPool, future::{Future, RemoteHandle}, task::SpawnExt, }; pub struct AsyncResult<T: Send> { future: Option<RemoteHandle<T>>, result: Option<T>, ready: Arc<AtomicCell<bool>>, } impl<T: Send> AsyncResult<T> { pub fn new<Fut>(thread_pool: &ThreadPool, future: Fut) -> Self where Fut: Future<Output = T> + Send + 'static, { let is_ready = Arc::new(AtomicCell::new(false)); let inner_is_ready = is_ready.clone(); let future = async move { let output = future.await; inner_is_ready.store(true); output }; let handle = thread_pool.spawn_with_handle(future).unwrap(); Self { future: Some(handle), result: None, ready: is_ready, } } pub fn is_ready(&self) -> bool { self.ready.load() } } impl<T: Send + 'static> AsyncResult<T> { pub fn get_result_if_ready(&mut self) -> Option<&T> { if !self.is_ready() { return None; } if self.result.is_some() { return self.result.as_ref(); } if let Some(future) = self.future.take() { let value = futures::executor::block_on(future); self.result = Some(value); } self.result.as_ref() } pub fn move_result_if_ready(&mut self) { if !self.is_ready() || self.result.is_some() { return; } if let Some(future) = self.future.take() { let value = futures::executor::block_on(future); self.result = Some(value); } } pub fn get_result(&self) -> Option<&T> { self.result.as_ref() } pub fn take_result_if_ready(&mut self) -> Option<T> { if !self.is_ready() { return None; } self.move_result_if_ready(); self.result.take() } }
use lsp_types; use jrsonnet_evaluator; use jrsonnet_parser; use jrsonnet_parser::peg::str::LineCol; pub fn location_to_position(code: &str, line_col: &LineCol) -> lsp_types::Position { let lines = code.split('\n'); let mut offset = line_col.offset; for line in lines { if offset <= line.len() { break; } offset -= line.len() + 1; } return lsp_types::Position { line: line_col.line as u32 - 1, character: offset as u32 - 1, }; } pub fn parse(text: &str) -> Vec<lsp_types::Diagnostic> { let settings = jrsonnet_parser::ParserSettings::default(); let parsed = jrsonnet_parser::parse(&text, &settings); let mut diagnostics = vec![]; match parsed { Ok(ast) => { let context = jrsonnet_evaluator::Context::new(); let _result = jrsonnet_evaluator::evaluate(context, &ast); } Err(err) => { let position_start = location_to_position(text, &err.location); let position_end = lsp_types::Position { line: position_start.line, character: position_start.character + 1, }; diagnostics.push(lsp_types::Diagnostic { range: lsp_types::Range { start: position_start, end: position_end, }, severity: Some(lsp_types::DiagnosticSeverity::Error), message: err.to_string(), ..lsp_types::Diagnostic::default() }); } }; return diagnostics; } #[cfg(test)] mod tests { #[test] fn parse_simple_jsonnet() { let code = r#" { test1: 1, test2: 2.0, test3: 3, } "#; let res = super::parse(&code); assert_eq!(res, vec![]); } #[test] fn parse_simple_jsonnet_parse_error() { let code = r#" { test1: 1, test2: 2.0 test3: 3, } "#; let res = super::parse(&code); assert_eq!(res, vec![]); } }
use wasm_bindgen::prelude::*; #[wasm_bindgen] pub fn solve(a: f64, b: f64) -> f64 { (a.powi(2) + b.powi(2)).sqrt() }
impl Solution { pub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> { let mut end_vec = vec![0 as i32, 1 as i32]; for (i, num1) in nums.iter().enumerate() { for (j, suposed_num2) in nums.iter().enumerate() { if (i != j) { if (num1 + suposed_num2 == target) { end_vec = vec![i as i32, j as i32].to_vec(); } } }; }; return end_vec } }
#[doc = "Reader of register RX_FIFO_CTRL"] pub type R = crate::R<u32, super::RX_FIFO_CTRL>; #[doc = "Writer for register RX_FIFO_CTRL"] pub type W = crate::W<u32, super::RX_FIFO_CTRL>; #[doc = "Register RX_FIFO_CTRL `reset()`'s with value 0"] impl crate::ResetValue for super::RX_FIFO_CTRL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `TRIGGER_LEVEL`"] pub type TRIGGER_LEVEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TRIGGER_LEVEL`"] pub struct TRIGGER_LEVEL_W<'a> { w: &'a mut W, } impl<'a> TRIGGER_LEVEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Reader of field `CLEAR`"] pub type CLEAR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CLEAR`"] pub struct CLEAR_W<'a> { w: &'a mut W, } impl<'a> CLEAR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `FREEZE`"] pub type FREEZE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `FREEZE`"] pub struct FREEZE_W<'a> { w: &'a mut W, } impl<'a> FREEZE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } impl R { #[doc = "Bits 0:7 - Trigger level. When the receiver FIFO has more entries than the number of this field, a receiver trigger event is generated."] #[inline(always)] pub fn trigger_level(&self) -> TRIGGER_LEVEL_R { TRIGGER_LEVEL_R::new((self.bits & 0xff) as u8) } #[doc = "Bit 16 - When '1', the receiver FIFO and receiver shift register are cleared/invalidated. Invalidation will last for as long as this field is '1'. If a quick clear/invalidation is required, the field should be set to '1' and be followed by a set to '0'. If a clear/invalidation is required for an extended time period, the field should be set to '1' during the complete time period."] #[inline(always)] pub fn clear(&self) -> CLEAR_R { CLEAR_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - When '1', hardware writes to the receiver FIFO have no effect. Freeze will not advance the RX FIFO write pointer."] #[inline(always)] pub fn freeze(&self) -> FREEZE_R { FREEZE_R::new(((self.bits >> 17) & 0x01) != 0) } } impl W { #[doc = "Bits 0:7 - Trigger level. When the receiver FIFO has more entries than the number of this field, a receiver trigger event is generated."] #[inline(always)] pub fn trigger_level(&mut self) -> TRIGGER_LEVEL_W { TRIGGER_LEVEL_W { w: self } } #[doc = "Bit 16 - When '1', the receiver FIFO and receiver shift register are cleared/invalidated. Invalidation will last for as long as this field is '1'. If a quick clear/invalidation is required, the field should be set to '1' and be followed by a set to '0'. If a clear/invalidation is required for an extended time period, the field should be set to '1' during the complete time period."] #[inline(always)] pub fn clear(&mut self) -> CLEAR_W { CLEAR_W { w: self } } #[doc = "Bit 17 - When '1', hardware writes to the receiver FIFO have no effect. Freeze will not advance the RX FIFO write pointer."] #[inline(always)] pub fn freeze(&mut self) -> FREEZE_W { FREEZE_W { w: self } } }
mod search; pub use self::search::*; mod help; pub use self::help::*; pub mod servers;
use std::env; use std::ffi::OsString; use std::fs; use std::io::{self, Write}; use std::path::PathBuf; use crate::directories::PROJECT_DIRS; pub fn config_file() -> PathBuf { env::var("BAT_CONFIG_PATH") .ok() .map(PathBuf::from) .filter(|config_path| config_path.is_file()) .unwrap_or_else(|| PROJECT_DIRS.config_dir().join("config")) } pub fn generate_config_file() -> bat::errors::Result<()> { let config_file = config_file(); if config_file.exists() { println!( "A config file already exists at: {}", config_file.to_string_lossy() ); print!("Overwrite? (y/N): "); io::stdout().flush()?; let mut decision = String::new(); io::stdin().read_line(&mut decision)?; if !decision.trim().eq_ignore_ascii_case("Y") { return Ok(()); } } else { let config_dir = config_file.parent(); match config_dir { Some(path) => fs::create_dir_all(path)?, None => { return Ok(Err(format!( "Unable to write config file to: {}", config_file.to_string_lossy() ))?) } } } let default_config = r#"# bat config # Specify desired theme (e.g. "TwoDark"). Issue `bat --list-themes` for a list of all available themes #--theme="TwoDark" # Enable this to use italic text on the terminal (not supported on all terminals): #--italic-text=always # Uncomment the following line to disable automatic paging: #--paging=never # Use C++ syntax for .ino files #--map-syntax "*.ino:C++" # Use ".gitignore"-style highlighting for ".ignore" files #--map-syntax ".ignore:Git Ignore" "#; fs::write(&config_file, default_config)?; println!( "Success! Config file written to {}", config_file.to_string_lossy() ); return Ok(()); } pub fn get_args_from_config_file() -> Result<Vec<OsString>, shell_words::ParseError> { Ok(fs::read_to_string(config_file()) .ok() .map(|content| get_args_from_str(&content)) .transpose()? .unwrap_or_else(|| vec![])) } pub fn get_args_from_env_var() -> Option<Result<Vec<OsString>, shell_words::ParseError>> { env::var("BAT_OPTS").ok().map(|s| get_args_from_str(&s)) } fn get_args_from_str(content: &str) -> Result<Vec<OsString>, shell_words::ParseError> { let args_per_line = content .split('\n') .map(|line| line.trim()) .filter(|line| !line.is_empty()) .filter(|line| !line.starts_with('#')) .map(|line| shell_words::split(line)) .collect::<Result<Vec<_>, _>>()?; Ok(args_per_line .iter() .flatten() .map(|line| line.into()) .collect()) } #[test] fn empty() { let args = get_args_from_str("").unwrap(); assert!(args.is_empty()); } #[test] fn single() { assert_eq!(vec!["--plain"], get_args_from_str("--plain").unwrap()); } #[test] fn multiple() { assert_eq!( vec!["--plain", "--language=cpp"], get_args_from_str("--plain --language=cpp").unwrap() ); } #[test] fn quotes() { assert_eq!( vec!["--theme", "Sublime Snazzy"], get_args_from_str("--theme \"Sublime Snazzy\"").unwrap() ); } #[test] fn multi_line() { let config = " -p --style numbers,changes --color=always "; assert_eq!( vec!["-p", "--style", "numbers,changes", "--color=always"], get_args_from_str(config).unwrap() ); } #[test] fn comments() { let config = " # plain style -p # show line numbers and Git modifications --style numbers,changes # Always show ANSI colors --color=always "; assert_eq!( vec!["-p", "--style", "numbers,changes", "--color=always"], get_args_from_str(config).unwrap() ); }
pub mod api; pub mod handlers; pub mod schema; pub mod storage;
/*! Contains structural aliases for tuples, with shared,mutable,and by value access to every field of the tuple. `Tuple*` traits can be used with any tuple at least as large as the size indicated by the trait. You can use `Tuple3` with any tuple type starting with `(A,B,C`. Eg:`(A,B,C)`,`(A,B,C,D)`,`(A,B,C,D,E)`,etcetera. # Example ``` use structural::field_traits::for_tuples::Tuple4; use structural::{GetFieldExt,Structural, fp}; fn sum_tuple_4(tuple: impl Tuple4<u8, u16, u32, u64>) -> u64 { let (a, b, c, d) = tuple.fields(fp!(0, 1, 2, 3)); *a as u64 + *b as u64 + *c as u64 + *d } assert_eq!(sum_tuple_4((3, 5, 8, 13)), 29); assert_eq!(sum_tuple_4((1, 2, 4, 8, "what?")), 15); assert_eq!(sum_tuple_4((1, 3, 9, 27, "Noooooo", "Impossible!")), 40); assert_eq!(sum_tuple_4(MyTuple4(1, 1, 1, 1)), 4); assert_eq!(sum_tuple_4(MyTuple5(2, 2, 2, 2, "foo".into())), 8); #[derive(Structural)] struct MyTuple4(pub u8,pub u16,pub u32,pub u64); #[derive(Structural)] struct MyTuple5(pub u8,pub u16,pub u32,pub u64, String); ``` */ pub use super::tuple_impls::{ Tuple1, Tuple2, Tuple3, Tuple4, Tuple5, Tuple6, Tuple7, Tuple8, Tuple9, Tuple10, Tuple11, Tuple12, };
#![allow(non_snake_case)] #[allow(unused_imports)] use std::io::{self, Write}; #[allow(unused_imports)] use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}; #[allow(unused_imports)] use std::cmp::{max, min, Ordering}; macro_rules! input { (source = $s:expr, $($r:tt)*) => { let mut iter = $s.split_whitespace(); let mut next = || { iter.next().unwrap() }; input_inner!{next, $($r)*} }; ($($r:tt)*) => { let stdin = std::io::stdin(); let mut bytes = std::io::Read::bytes(std::io::BufReader::new(stdin.lock())); let mut next = move || -> String{ bytes .by_ref() .map(|r|r.unwrap() as char) .skip_while(|c|c.is_whitespace()) .take_while(|c|!c.is_whitespace()) .collect() }; input_inner!{next, $($r)*} }; } macro_rules! input_inner { ($next:expr) => {}; ($next:expr, ) => {}; ($next:expr, $var:ident : $t:tt $($r:tt)*) => { let $var = read_value!($next, $t); input_inner!{$next $($r)*} }; } macro_rules! read_value { ($next:expr, ( $($t:tt),* )) => { ( $(read_value!($next, $t)),* ) }; ($next:expr, [ $t:tt ; $len:expr ]) => { (0..$len).map(|_| read_value!($next, $t)).collect::<Vec<_>>() }; ($next:expr, chars) => { read_value!($next, String).chars().collect::<Vec<char>>() }; ($next:expr, char) => { read_value!($next, String).chars().collect::<Vec<char>>()[0] }; ($next:expr, usize1) => { read_value!($next, usize) - 1 }; ($next:expr, isize1) => { read_value!($next, isize) - 1 }; ($next:expr, $t:ty) => { $next().parse::<$t>().expect("Parse error") }; } macro_rules! printvec { ( $item:expr ) => { for &i in &$item { print!("{} ", i); } println!(""); } } macro_rules! debug { ($($a:expr),*) => { println!(concat!($(stringify!($a), " = {:?}, "),*), $($a),*); } } #[derive(Eq, PartialEq, Clone, Debug)] pub struct Rev<T>(pub T); impl<T: PartialOrd> PartialOrd for Rev<T> { fn partial_cmp(&self, other: &Rev<T>) -> Option<Ordering> { other.0.partial_cmp(&self.0) } } impl<T: Ord> Ord for Rev<T> { fn cmp(&self, other: &Rev<T>) -> Ordering { other.0.cmp(&self.0) } } #[derive(PartialEq, PartialOrd, Clone, Debug)] pub struct Total<T>(pub T); impl<T: PartialEq> Eq for Total<T> {} impl<T: PartialOrd> Ord for Total<T> { fn cmp(&self, other: &Total<T>) -> Ordering { self.0.partial_cmp(&other.0).unwrap() } } #[allow(dead_code)] const MOD: usize = 1000000007; #[allow(dead_code)] fn factorial(n: usize) -> usize { (1..n+1).into_iter().fold(1, |acc, i| acc * i) } #[allow(dead_code)] fn comb(n: usize, r: usize) -> usize { if n - r < r { comb(n, n - r) } else { (1..r+1).into_iter().fold(1, |acc, i| acc * (n - r + i) / i) } } fn main() { let mut fv: Vec<Total<f64>> = vec![]; fv.push(Total(1.5)); fv.push(Total(-0.7)); fv.push(Total(5.0)); fv.push(Total(1.001)); fv.sort(); println!("{:?}", fv); let Total(first) = fv[0]; println!("{}", first); let mut heap = std::collections::BinaryHeap::new(); heap.push(Total(1.5)); heap.push(Total(-0.7)); heap.push(Total(5.0)); heap.push(Total(1.001)); println!("{:?}", heap); println!("{:?}", heap.pop().unwrap()); let Total(second) = heap.pop().unwrap(); println!("{}", second); let mut heap = std::collections::BinaryHeap::new(); heap.push(Rev(18)); heap.push(Rev(-4)); heap.push(Rev(10)); heap.push(Rev(5)); println!("{:?}", heap); let Rev(first) = heap.pop().unwrap(); println!("{}", first); let mut heap = std::collections::BinaryHeap::new(); heap.push(Rev(Total(1.5))); heap.push(Rev(Total(-0.7))); heap.push(Rev(Total(5.0))); heap.push(Rev(Total(1.001))); println!("{:?}", heap); println!("{:?}", heap.pop().unwrap()); let Rev(Total(second)) = heap.pop().unwrap(); println!("{}", second); }
use std::sync::mpsc; use std::sync::Arc; use std::sync::Mutex; use std::thread; // public /// Create a `ThreadPool` struct to manage all worker thread including creating and send message pub struct ThreadPool { workers: Vec<Worker>, sender: mpsc::Sender<Message>, } /// Create a `Job` type alias for a Box that holds each closure and then sending the job down the type Job = Box<dyn FnOnce() + Send + 'static>; // Signaling to the Threads to Stop Listening for Jobs enum Message { /// NewJob: takes a `Job` type value to notify worker thread to process a new job NewJob(Job), /// Terminate: notify worker thread to exit Terminate, } // private struct Worker { /// id: worker idendification /// thread: worker thread handle with `Option` type to moved out by `drop` trait implemented by /// `ThreadPool` id: usize, // thread: thread::JoinHandle<()>, thread: Option<thread::JoinHandle<()>>, } impl Worker { /// Create a new Worker /// /// id: the worker idendification /// receiver: the `mpsc` receive port with the Arc<Mutex>> type to avoid data race /// /// The `new` function will receive the `Message` from `ThreadPool::execute` function and /// behave according to the message type, if `NewJob` type it will spawn a thread wait for in /// a loop /// new job to process , if `Terminate` it will break and exit current created thread fn new(id: usize, receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Worker { let thread = thread::spawn(move || loop { // lock to acquire the mutex and unwrap to panic on any errors // recv to receive a Job from the channel, recv call will block until a job becomes // available on the current thread // The Mutex<T> ensures that only one Woker thread at a time is trying to request a job // // v1: receiver; // v2: let job = receiver.lock().unwrap().recv().unwrap(); // v2: println!("Worker {} got a job; executing.", id); // v2: job(); let message = receiver.lock().unwrap().recv().unwrap(); match message { Message::NewJob(job) => { println!("Worker {} got a job; executing.", id); job(); } Message::Terminate => { println!("Worker {} was told to terminate.", id); break; } } }); Worker { id, thread: Some(thread), } } } impl ThreadPool { /// Create a new ThreadPool /// /// The size is the number of threads in the pool /// /// # Panics /// /// The `new` function will panic if the size is zero pub fn new(size: usize) -> ThreadPool { assert!(size > 0); let (sender, receiver) = mpsc::channel(); let receiver = Arc::new(Mutex::new(receiver)); let mut workers = Vec::with_capacity(size); for id in 0..size { // clone the `Arc` to bump the reference count so the workers can share ownership of // the receiving end workers.push(Worker::new(id, Arc::clone(&receiver))); } ThreadPool { workers, sender } } /// execute function pub fn execute<F>(&self, f: F) where F: FnOnce() + Send + 'static, { let job = Box::new(f); // self.sender.send(job).unwrap(); self.sender.send(Message::NewJob(job)).unwrap(); } } // implementing the Drop Trait on ThreadPool /// Implement the `drop` trait on ThreadPool /// The `drop` function will first end the thread's number's `Message::Terminate` signal and then /// use `thread::join` to wait all worker's thread to exit impl Drop for ThreadPool { fn drop(&mut self) { println!("Sending terminate message to all workers."); for _ in &self.workers { self.sender.send(Message::Terminate).unwrap(); } println!("Shutting down all workers."); for worker in &mut self.workers { println!("Shutting down worker {}", worker.id); // move the thread out of worker by Option type not raw thread::JoinHandle type // v1: worker.thread.join().unwrap(); // take method on Option takes the Some variant out and leaves None in its place if let Some(thread) = worker.thread.take() { thread.join().unwrap(); } } } }
use iced::{scrollable, Element, Length, Sandbox, Scrollable, Settings}; use std::cell::RefCell; use std::io::prelude::*; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::{env, fs, io}; use crate::cssom::{Origin, Stylesheet}; use crate::layout::{font, layout_tree, Dimensions}; use crate::painter; use crate::parser::{css, html}; use crate::style::create_style_tree; use css::CSSParser; use html::HTMLParser; use painter::wrapper::Wrapper; use painter::{build_display_list, DisplayCommand, DisplayList}; #[derive(Debug)] pub enum Message {} pub struct Window { items: DisplayList, height: f32, width: f32, scroll: scrollable::State, } impl<'a> Sandbox for Window { type Message = Message; fn new() -> Self { let (items, height, width) = prepare(); Window { items, height, width, scroll: scrollable::State::new(), } } fn title(&self) -> String { String::from("kamaitachi") } fn update(&mut self, message: Message) { match message {} } fn view(&mut self) -> Element<Message> { let mut wrapper = Wrapper::new(self.height, self.width); font::with_thread_local_font_context(|font_context| { for item in &self.items { wrapper.items.push(match item { DisplayCommand::SolidColor(color, rect) => { painter::create_block(color.clone(), rect.clone()) } DisplayCommand::Text(text, color, rect, font) => painter::create_text( text.into(), color.clone(), rect.clone(), font.clone(), font_context, ), }); } }); Scrollable::new(&mut self.scroll) .width(Length::Fill) .height(Length::Fill) .push(wrapper) .into() } } fn prepare() -> (DisplayList, f32, f32) { let args: Vec<String> = env::args().collect(); if args.is_empty() { panic!("You need to specify entry path."); } let path = Path::new(&args[1]); let mut paths = vec![]; visit_dirs(path, &mut paths).unwrap(); let mut html = String::new(); let mut css_list = vec![]; for path in paths { let ext = path.extension().unwrap(); if html.is_empty() && ext == "html" { fs::File::open(path) .unwrap() .read_to_string(&mut html) .unwrap(); continue; } if ext == "css" { let mut css = String::new(); fs::File::open(path) .unwrap() .read_to_string(&mut css) .unwrap(); css_list.push(css); continue; } } paint(html, css_list) } fn paint(html: String, css_list: Vec<String>) -> (DisplayList, f32, f32) { let dom = HTMLParser::new(html).run(); let mut author_rules = vec![]; for css in css_list { author_rules.extend(CSSParser::new(css).parse_rules(Origin::Author)); } let cssom = Stylesheet::new(author_rules); let styled_node = create_style_tree(&dom, &cssom, None); let mut viewport: Dimensions = Default::default(); viewport.content.width = 1200.0; viewport.content.height = 800.0; let layout_root = layout_tree(&styled_node, Rc::new(RefCell::new(viewport))); build_display_list(&layout_root) } // one possible implementation of walking a directory only visiting files fn visit_dirs(dir: &Path, paths: &mut Vec<PathBuf>) -> io::Result<()> { if dir.is_dir() { for entry in fs::read_dir(dir)? { let entry = entry?; let path = entry.path(); if path.is_dir() { visit_dirs(&path, paths)?; } else { paths.push(path); } } } else { paths.push(dir.to_path_buf()); } Ok(()) } pub fn main() -> iced::Result { let mut settings = Settings::default(); settings.window.size = (1200, 800); Window::run(settings) }
//! A crate exposing common functions helpers for doing proper error handling in a //! FFI context. //! //! //! ## Error Handling Theory //! //! This employs a thread-local variable which holds the most recent error as //! well as some convenience functions for getting/clearing this variable. //! //! The theory is if a function fails then it should return an "obviously //! invalid" value, this is commonly `-1` when returning an integer or `null` //! when returning a pointer. The user can then check for this and consult the //! most recent error for more information... Of course that means all fallible //! operations must update the most recent error if they fail. //! //! > **Note:** This error handling strategy is strongly influenced by [libgit2]'s //! > error handling docs, ported to Rust. As such, it is **strongly recommended** //! > to skim the [error handling docs][docs] themselves. //! //! [docs]: https://github.com/libgit2/libgit2/blob/master/docs/error-handling.md //! [libgit2]: https://github.com/libgit2/libgit2 extern crate libc; use std::error::Error; use std::cell::RefCell; use std::ptr; use std::slice; use std::panic::{self, UnwindSafe}; use std::any::Any; use libc::{c_char, c_int}; thread_local!( static LAST_ERROR: RefCell<Option<Box<Error>>> = RefCell::new(None); ); /// Set the thread-local `LAST_ERROR` variable. pub fn update_last_error<E: Into<Box<Error>> + 'static>(e: E) { let boxed = e.into(); LAST_ERROR.with(|last| { *last.borrow_mut() = Some(boxed); }); } /// Get the last error, clearing the variable in the process. pub fn get_last_error() -> Option<Box<Error>> { LAST_ERROR.with(|last| last.borrow_mut().take()) } /// Write the latest error message to a buffer. /// /// # Returns /// /// This returns the number of bytes written to the buffer. If no bytes were /// written (i.e. there is no last error) then it returns `0`. If the buffer /// isn't big enough or a `null` pointer was passed in, you'll get a `-1`. #[no_mangle] pub unsafe extern "C" fn error_message(buffer: *mut c_char, length: c_int) -> c_int { if buffer.is_null() { return -1; } let buffer = slice::from_raw_parts_mut(buffer as *mut u8, length as usize); // Take the last error, if there isn't one then there's no error message to // display. let err = match get_last_error() { Some(e) => e, None => return 0, }; let error_message = format!("{}", err); let bytes_required = error_message.len() + 1; if buffer.len() < bytes_required { // We don't have enough room. Make sure to return the error so it // isn't accidentally consumed update_last_error(err); return -1; } let data = error_message.as_bytes(); ptr::copy_nonoverlapping(data.as_ptr(), buffer.as_mut_ptr(), data.len()); // zero out the rest of the buffer just in case let rest = &mut buffer[data.len()..]; ptr::write_bytes(rest.as_mut_ptr(), 0, rest.len()); data.len() as c_int } /// Execute some closure, catching any panics and converting them into errors /// so you don't accidentally unwind across the FFI boundary. /// /// # Note /// /// It will need to be possible to convert an opaque `Box<Any + Send + 'static>` /// received from [`std::panic::catch_unwind()`][cu] back into your error type. /// /// If you are using [error-chain] then you can leverage the `error_chain!()` /// macro to generate some of this for you. /// /// ```ignore /// error_chain!{ /// ... /// errors { /// Panic(inner: Box<::std::any::Any + Send + 'static>) { /// description("Thread Panicked") /// display("{}", /// if let Some(s) = inner.downcast_ref::<String>() { /// s.clone() /// } else if let Some(s) = inner.downcast_ref::<&str>() { /// s.to_string() /// } else { /// String::from("Thread Panicked") /// }) /// } /// } /// } /// } /// ``` /// /// When converting from a `Box<Any + Send + 'static>`, the best way to try and /// recover the panic message is to use `Any::downcast_ref()` to try various /// "common" panic message types. Falling back to some sane default if we can't /// figure it out. Luckily almost all panic messages are either `&str` or /// `String`. /// /// /// # Examples /// /// This is a basic example of how you may use `catch_panic()`. It looks a /// little long because you need to define a way to convert a panic message into /// your error type, but that's a one-time cost and the `catch_panic()` call /// itself is trivial. /// /// ``` /// use std::any::Any; /// extern crate ffi_helpers; /// /// fn main() { /// let got: Result<u32, Error> = ffi_helpers::catch_panic(|| { /// let something = None; /// something.unwrap() /// }); /// /// let message = format!("{:?}", got); /// assert_eq!(message, r#"Err(Message("called `Option::unwrap()` on a `None` value"))"#); /// } /// /// #[derive(Debug)] /// enum Error { /// Message(String), /// Unknown, /// } /// /// impl From<Box<Any + Send + 'static>> for Error { /// fn from(other: Box<Any + Send + 'static>) -> Error { /// if let Some(owned) = other.downcast_ref::<String>() { /// Error::Message(owned.clone()) /// } else if let Some(owned) = other.downcast_ref::<String>() { /// Error::Message(owned.to_string()) /// } else { /// Error::Unknown /// } /// } /// } /// ``` /// /// [cu]: https://doc.rust-lang.org/std/panic/fn.catch_unwind.html /// [error-chain]: https://crates.io/crates/error-chain pub fn catch_panic<T, E, F>(func: F) -> Result<T, E> where F: FnOnce() -> Result<T, E> + UnwindSafe, E: From<Box<Any + Send + 'static>>, { panic::catch_unwind(func) .map_err(Into::into) .and_then(|t| t) }
use crate::{ChainConstants, HashOf, HeaderExt, NumberOf, Storage}; use codec::{Decode, Encode}; use frame_support::sp_io::TestExternalities; use scale_info::TypeInfo; use sp_arithmetic::traits::Zero; use sp_consensus_subspace::{KzgExtension, PosExtension}; use sp_runtime::traits::{BlakeTwo256, Header as HeaderT}; use std::collections::{BTreeMap, HashMap}; use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::{BlockWeight, SegmentCommitment, SegmentIndex, SolutionRange}; use subspace_proof_of_space::shim::ShimTable; pub(crate) type PosTable = ShimTable; pub(crate) type Header = sp_runtime::generic::Header<u32, BlakeTwo256>; // Smaller value for testing purposes const MAX_PIECES_IN_SECTOR: u16 = 32; #[derive(Debug)] struct StorageData { constants: ChainConstants<Header>, headers: HashMap<HashOf<Header>, HeaderExt<Header>>, number_to_hashes: HashMap<NumberOf<Header>, Vec<HashOf<Header>>>, best_header: (NumberOf<Header>, HashOf<Header>), finalized_head: Option<(NumberOf<Header>, HashOf<Header>)>, segment_commitments: BTreeMap<SegmentIndex, SegmentCommitment>, } #[derive(Default, Debug, Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] pub(crate) struct TestOverrides { pub(crate) solution_range: Option<SolutionRange>, pub(crate) next_solution_range: Option<SolutionRange>, } #[derive(Debug)] pub(crate) struct MockStorage(StorageData); impl Storage<Header> for MockStorage { fn chain_constants(&self) -> ChainConstants<Header> { self.0.constants.clone() } fn header(&self, query: HashOf<Header>) -> Option<HeaderExt<Header>> { self.0.headers.get(&query).cloned() } fn store_header(&mut self, header_ext: HeaderExt<Header>, as_best_header: bool) { let (number, hash) = (*header_ext.header.number(), header_ext.header.hash()); if self.0.headers.insert(hash, header_ext).is_none() { let mut set = self .0 .number_to_hashes .get(&number) .cloned() .unwrap_or_default(); set.push(hash); self.0.number_to_hashes.insert(number, set); } if as_best_header { self.0.best_header = (number, hash) } } fn best_header(&self) -> HeaderExt<Header> { let (_, hash) = self.0.best_header; self.0.headers.get(&hash).cloned().unwrap() } fn headers_at_number(&self, number: NumberOf<Header>) -> Vec<HeaderExt<Header>> { self.0 .number_to_hashes .get(&number) .unwrap_or(&vec![]) .iter() .map(|hash| self.0.headers.get(hash).cloned().unwrap()) .collect() } fn prune_header(&mut self, pruned_hash: HashOf<Header>) { if let Some(pruned_header) = self.0.headers.remove(&pruned_hash) { let number_to_hashes = self .0 .number_to_hashes .remove(pruned_header.header.number()) .unwrap_or_default() .into_iter() .filter(|hash| *hash != pruned_hash) .collect(); self.0 .number_to_hashes .insert(*pruned_header.header.number(), number_to_hashes); } } fn finalize_header(&mut self, hash: HashOf<Header>) { let header = self.0.headers.get(&hash).unwrap(); self.0.finalized_head = Some((*header.header.number(), header.header.hash())) } fn finalized_header(&self) -> HeaderExt<Header> { self.0 .finalized_head .and_then(|(_, hash)| self.0.headers.get(&hash).cloned()) .unwrap_or_else(|| { self.0 .headers .get( self.0 .number_to_hashes .get(&Zero::zero()) .cloned() .unwrap() .get(0) .unwrap(), ) .cloned() .unwrap() }) } fn store_segment_commitments( &mut self, mut segment_commitments: BTreeMap<SegmentIndex, SegmentCommitment>, ) { self.0.segment_commitments.append(&mut segment_commitments) } fn segment_commitment(&self, segment_index: SegmentIndex) -> Option<SegmentCommitment> { self.0.segment_commitments.get(&segment_index).cloned() } fn number_of_segments(&self) -> u64 { self.0.segment_commitments.len() as u64 } fn max_pieces_in_sector(&self) -> u16 { MAX_PIECES_IN_SECTOR } } impl MockStorage { pub(crate) fn new(constants: ChainConstants<Header>) -> Self { MockStorage(StorageData { constants, headers: Default::default(), number_to_hashes: Default::default(), best_header: (Default::default(), Default::default()), finalized_head: None, segment_commitments: Default::default(), }) } // hack to adjust the solution range pub(crate) fn override_solution_range( &mut self, hash: HashOf<Header>, solution_range: SolutionRange, ) { let mut header = self.0.headers.remove(&hash).unwrap(); header.test_overrides.solution_range = Some(solution_range); self.0.headers.insert(hash, header); } // hack to adjust the next solution range pub(crate) fn override_next_solution_range( &mut self, hash: HashOf<Header>, next_solution_range: SolutionRange, ) { let mut header = self.0.headers.remove(&hash).unwrap(); header.test_overrides.next_solution_range = Some(next_solution_range); self.0.headers.insert(hash, header); } // hack to adjust constants when importing Block #1 pub(crate) fn override_constants(&mut self, constants: ChainConstants<Header>) { self.0.constants = constants; } // hack to adjust the cumulative weight pub(crate) fn override_cumulative_weight(&mut self, hash: HashOf<Header>, weight: BlockWeight) { let mut header = self.0.headers.remove(&hash).unwrap(); header.total_weight = weight; self.0.headers.insert(hash, header); } // hack to store segment commitments pub(crate) fn store_segment_commitment( &mut self, segment_index: SegmentIndex, segment_commitment: SegmentCommitment, ) { self.0 .segment_commitments .insert(segment_index, segment_commitment); } } pub fn new_test_ext() -> TestExternalities { let mut ext = TestExternalities::new_empty(); ext.register_extension(KzgExtension::new(Kzg::new(embedded_kzg_settings()))); ext.register_extension(PosExtension::new::<PosTable>()); ext }
//! High performance FID (Fully Indexable Dictionary) library. //! //! [Master API Docs](https://laysakura.github.io/fid-rs/fid_rs/) //! | //! [Released API Docs](https://docs.rs/crate/fid-rs) //! | //! [Benchmark Results](https://laysakura.github.io/fid-rs/criterion/report/) //! | //! [Changelog](https://github.com/laysakura/fid-rs/blob/master/CHANGELOG.md) //! //! [![Build Status](https://travis-ci.com/laysakura/fid-rs.svg?branch=master)](https://travis-ci.com/laysakura/fid-rs) //! [![Crates.io Version](https://img.shields.io/crates/v/fid-rs.svg)](https://crates.io/crates/fid-rs) //! [![Crates.io Downloads](https://img.shields.io/crates/d/fid-rs.svg)](https://crates.io/crates/fid-rs) //! [![Minimum rustc version](https://img.shields.io/badge/rustc-1.33+-lightgray.svg)](https://github.com/laysakura/fid-rs#rust-version-supports) //! [![License: MIT](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/laysakura/fid-rs/blob/master/LICENSE-MIT) //! [![License: Apache 2.0](https://img.shields.io/badge/license-Apache_2.0-blue.svg)](https://github.com/laysakura/fid-rs/blob/master/LICENSE-APACHE) //! //! # Quickstart //! //! To use fid-rs, add the following to your `Cargo.toml` file: //! //! ```toml //! [dependencies] //! fid-rs = "0.1" # NOTE: Replace to latest minor version. //! ``` //! //! ## Usage Overview //! //! ```rust //! use fid_rs::Fid; //! //! let fid = Fid::from("0100_1"); // Tips: Fid::from::<&str>() ignores '_'. //! //! // Basic operations --------------------- //! assert_eq!(fid[0], false); // [0]1001; 0th bit is '0' (false) //! assert_eq!(fid[1], true); // 0[1]001; 1st bit is '1' (true) //! assert_eq!(fid[4], true); // 0100[1]; 4th bit is '1' (true) //! //! assert_eq!(fid.rank(0), 0); // [0]1001; Range [0, 0] has no '1' //! assert_eq!(fid.rank(3), 1); // [0100]1; Range [0, 3] has 1 '1' //! assert_eq!(fid.rank(4), 2); // [01001]; Range [0, 4] has 2 '1's //! //! assert_eq!(fid.select(0), Some(0)); // []01001; Minimum i where range [0, i] has 0 '1's is i=0 //! assert_eq!(fid.select(1), Some(1)); // 0[1]001; Minimum i where range [0, i] has 1 '1's is i=1 //! assert_eq!(fid.select(2), Some(4)); // 0100[1]; Minimum i where range [0, i] has 2 '1's is i=4 //! assert_eq!(fid.select(3), None); // There is no i where range [0, i] has 3 '1's //! //! // rank0, select0 ----------------------- //! assert_eq!(fid.rank0(0), 1); // [0]1001; Range [0, 0] has no '0' //! assert_eq!(fid.rank0(3), 3); // [0100]1; Range [0, 3] has 3 '0's //! assert_eq!(fid.rank0(4), 3); // [01001]; Range [0, 4] has 3 '0's //! //! assert_eq!(fid.select0(0), Some(0)); // []01001; Minimum i where range [0, i] has 0 '0's is i=0 //! assert_eq!(fid.select0(1), Some(0)); // [0]1001; Minimum i where range [0, i] has 1 '0's is i=0 //! assert_eq!(fid.select0(2), Some(2)); // 01[0]01; Minimum i where range [0, i] has 2 '0's is i=2 //! assert_eq!(fid.select0(4), None); // There is no i where range [0, i] has 4 '0's //! ``` //! //! ## Constructors //! //! ```rust //! use fid_rs::Fid; //! //! // Most human-friendly way: Fid::from::<&str>() //! let fid = Fid::from("0100_1"); //! //! // Complex construction in simple way: Fid::from::<&[bool]>() //! let mut arr = [false; 5]; //! arr[1] = true; //! arr[4] = true; //! let fid = Fid::from(&arr[..]); //! ``` //! //! ## Iterator //! //! ```rust //! use fid_rs::Fid; //! //! let fid = Fid::from("0100_1"); //! //! for bit in fid.iter() { //! println!("{}", bit); //! } //! // => //! // false //! // true //! // false //! // false //! // true //! ``` //! //! ## Utility Methods //! //! ```rust //! use fid_rs::Fid; //! //! let fid = Fid::from("0100_1"); //! //! assert_eq!(fid.len(), 5); //! ``` //! //! # Features //! //! - **Arbitrary length support with minimum working memory**: fid-rs provides virtually _arbitrary size_ of FID. It is carefully designed to use as small memory space as possible. //! - **Parallel build of FID**: Build operations (`Fid::from()`) takes _O(N)_ time. It is parallelized and achieves nearly optimal scale-out. //! - **No memory copy while/after build operations**: After internally creating bit vector representation, any operation does not do memory copy. //! - **Latest benchmark results are always accessible**: fid-rs is continuously benchmarked in Travis CI using [Criterion.rs](https://crates.io/crates/criterion). Graphical benchmark results are published [here](https://laysakura.github.io/fid-rs/criterion/report/). //! //! ## Complexity //! //! When the length of a `Fid` is _N_: //! //! | Operation | Time-complexity | Space-complexity | //! |-----------|-----------------|------------------| //! | [Fid::from::<&str>()](https://laysakura.github.io/fid-rs/fid_rs/fid/struct.Fid.html#implementations) | _O(N)_ | _N + o(N)_ | //! | [Fid::from::<&[bool]>()](https://laysakura.github.io/fid-rs/fid_rs/fid/struct.Fid.html#implementations) | _O(N)_ | _N + o(N)_ | //! | [Index&lt;u64&gt;](https://laysakura.github.io/fid-rs/fid_rs/fid/struct.Fid.html#impl-Index<u64>) | _O(1)_ | _0_ | //! | [Fid::rank()](https://laysakura.github.io/fid-rs/fid_rs/fid/struct.Fid.html#method.rank) | _O(1)_ | _O(1)_ | //! | [Fid::rank0()](https://laysakura.github.io/fid-rs/fid_rs/fid/struct.Fid.html#method.rank0) | _O(1)_ | _O(1)_ | //! | [Fid::select()](https://laysakura.github.io/fid-rs/fid_rs/fid/struct.Fid.html#method.select) | _O(log N)_ | _O(1)_ | //! | [Fid::select0()](https://laysakura.github.io/fid-rs/fid_rs/fid/struct.Fid.html#method.select0) | _O(log N)_ | _O(1)_ | //! //! (Actually, `select()`'s time-complexity can be _O(1)_ with complex implementation but fid-rs, like many other libraries, uses binary search of `rank()`'s result). pub use fid::Fid; pub mod fid; mod internal_data_structure;
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DedicatedHsmOperation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")] pub is_data_action: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<dedicated_hsm_operation::Display>, } pub mod dedicated_hsm_operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DedicatedHsmOperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DedicatedHsmOperation>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiEntityReference { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NetworkInterface { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "privateIpAddress", default, skip_serializing_if = "Option::is_none")] pub private_ip_address: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NetworkProfile { #[serde(default, skip_serializing_if = "Option::is_none")] pub subnet: Option<ApiEntityReference>, #[serde(rename = "networkInterfaces", default, skip_serializing_if = "Vec::is_empty")] pub network_interfaces: Vec<NetworkInterface>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Sku { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<sku::Name>, } pub mod sku { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Name { #[serde(rename = "SafeNet Luna Network HSM A790")] SafeNetLunaNetworkHsmA790, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DedicatedHsmProperties { #[serde(rename = "networkProfile", default, skip_serializing_if = "Option::is_none")] pub network_profile: Option<NetworkProfile>, #[serde(rename = "stampId", default, skip_serializing_if = "Option::is_none")] pub stamp_id: Option<String>, #[serde(rename = "statusMessage", default, skip_serializing_if = "Option::is_none")] pub status_message: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<dedicated_hsm_properties::ProvisioningState>, } pub mod dedicated_hsm_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ProvisioningState { Succeeded, Provisioning, Allocating, Connecting, Failed, CheckingQuota, Deleting, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DedicatedHsm { #[serde(flatten)] pub resource: Resource, pub properties: DedicatedHsmProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DedicatedHsmPatchParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DedicatedHsmListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DedicatedHsm>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ResourceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Resource>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DedicatedHsmError { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<Error>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub innererror: Box<Option<Error>>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, pub location: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<Sku>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub zones: Vec<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, }
#[allow(dead_code)] #[derive(Copy, Clone,PartialEq,Debug)] pub enum Allergen { Eggs = 1, Peanuts = 2, Shellfish = 4, Strawberries = 8, Tomatoes = 16, Chocolate = 32, Pollen = 64, Cats = 128 } pub struct allergy { score: usize } pub fn Allergies(s: usize) -> allergy { allergy { score: s } } impl allergy { pub fn is_allergic_to(&self, allergen : &Allergen) -> bool { (self.score & (*allergen as usize)) != 0 } pub fn allergies(&self) -> Vec<Allergen> { let allergens : Vec<Allergen> = vec![Allergen::Eggs, Allergen::Peanuts, Allergen::Shellfish, Allergen::Strawberries, Allergen::Tomatoes, Allergen::Chocolate, Allergen::Pollen, Allergen::Cats]; allergens.iter() .filter(|i| (self.score & (**i as usize)) != 0) // What the fuck, rust .cloned() .collect() } }
use anyhow::{anyhow, bail, Result}; use csv::Reader; use csv::StringRecord; use log::{debug, info, warn}; use serde::Deserialize; use std::collections::{HashMap, HashSet}; use std::env; use futures::{stream, StreamExt}; use std::fs::File; use std::io::{BufRead, BufReader}; use std::sync::{Arc, Mutex, RwLock}; use std::thread; use std::time::Duration; type Bal = HashMap<usize, Balance>; type ReadWriteLockMap = Arc<HashMap<u8, RwLock<HashMap<usize, Mutex<()>>>>>; #[derive(Debug, Deserialize, PartialEq)] struct Tx { tx_type: String, client: usize, tx_id: usize, amount: f64, // amount: Option<f64>, } #[derive(Debug, PartialEq)] struct Balance { client: usize, available: f64, held: f64, locked: bool, } impl Balance { fn new(client: usize) -> Self { Balance { client, available: 0.0, held: 0.0, locked: false, } } } pub async fn run_stream() -> Result<()> { info!("starting up"); let csv_file = input_filename()?; debug!("csv_file {csv_file}"); let mut bal: Bal = HashMap::new(); let mut tx_amt: HashMap<usize, f64> = HashMap::new(); let mut dispute_txs: HashSet<usize> = HashSet::new(); process_file(csv_file, &mut bal, &mut tx_amt, &mut dispute_txs).await?; print_result(&bal)?; Ok(()) } async fn process_file( csv_file: String, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>, dispute_txs: &mut HashSet<usize>, ) -> Result<()> { let file = File::open(csv_file)?; let reader = BufReader::new(file); let mut read_iter = reader.lines(); // skip header read_iter.next(); // convert iterator to stream. let stream = stream::iter(read_iter); let buf_factor = 5; // create N RW_locks to distribute and reduce the write_lock wait time, here N = 256, u8 type. let mut inner = HashMap::new(); for i in 0..=u8::max_value() { inner.insert(i, RwLock::new(HashMap::new())); } let rw_lock_map = Arc::new(inner); stream .map(|line| tx_from_line(line.unwrap())) .buffered(buf_factor) .map(|x| { let rw_lock_map = Arc::clone(&rw_lock_map); match process_tx_async(x, bal, tx_amt, dispute_txs, rw_lock_map) { Ok(()) => (), Err(e) => { warn!("Error tx: {}", e); // log error into a database or file...etc. } } // return a future for buffered() input. async move { () } }) .buffered(buf_factor) .collect::<Vec<_>>() .await; Ok(()) } async fn tx_from_line(line: String) -> Result<Tx> { // data format: tx_type,client,"tx_id","amount" type Record = (String, usize, usize, Option<f64>); let items = line.split(',').collect::<Vec<&str>>(); let mut record = StringRecord::from(items); record.trim(); let rec: Record = record.deserialize(None)?; // debug!("rec {rec:?}"); let tx = Tx { tx_type: rec.0, client: rec.1, tx_id: rec.2, amount: rec.3.unwrap_or(0.0), }; // debug!("tx {tx:?}"); Ok(tx) } fn process_tx_async( tx: Result<Tx>, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>, dispute_txs: &mut HashSet<usize>, rw_lock_map: ReadWriteLockMap, ) -> Result<()> { let tx = &tx?; let client_id = tx.client; loop { let rw_lock_id = client_id as u8; // id % 256, map client_id to a rw_lock, let rw_lock = &rw_lock_map[&rw_lock_id]; // prefilled, must exists. let client_read_lock = rw_lock.read().expect("RwLock poisoned"); // Assume that the element <client_id here> already exists if let Some(data_lock) = client_read_lock.get(&client_id) { let mut _lock = data_lock.lock().expect("Mutex poisoned"); match &tx.tx_type[..] { "deposit" => deposit(tx, bal, tx_amt)?, "withdrawal" => withdraw(tx, bal, tx_amt)?, "dispute" => dispute(tx, bal, tx_amt, dispute_txs)?, "resolve" => resolve(tx, bal, tx_amt, dispute_txs)?, "chargeback" => chargeback(tx, bal, tx_amt, dispute_txs)?, _ => (), } break; } drop(client_read_lock); let mut client_write_lock = rw_lock.write().expect("RwLock poisoned"); // We use HashMap::entry to handle the case when another thread // inserted the same key, while it is unlocked. thread::sleep(Duration::from_millis(5)); client_write_lock .entry(client_id) .or_insert_with(|| Mutex::new(())); } Ok(()) } pub fn run() -> Result<()> { let csv_file = input_filename()?; debug!("csv_file {csv_file}"); let txs = data_from_csv_trim(&csv_file)?; process_tx(&txs)?; Ok(()) } fn process_tx(txs: &[Tx]) -> Result<()> { let mut bal: Bal = HashMap::new(); let mut tx_amt: HashMap<usize, f64> = HashMap::new(); let mut dispute_txs: HashSet<usize> = HashSet::new(); for tx in txs { match &tx.tx_type[..] { "deposit" => deposit(tx, &mut bal, &mut tx_amt)?, "withdrawal" => withdraw(tx, &mut bal, &mut tx_amt)?, "dispute" => dispute(tx, &mut bal, &mut tx_amt, &mut dispute_txs)?, "resolve" => resolve(tx, &mut bal, &mut tx_amt, &mut dispute_txs)?, "chargeback" => chargeback(tx, &mut bal, &mut tx_amt, &mut dispute_txs)?, _ => (), } } println!("tx_amt {tx_amt:?}"); print_result(&bal)?; Ok(()) } fn deposit(tx: &Tx, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>) -> Result<()> { debug!("deposit {tx:?}"); if !is_tx_valid(tx, bal, tx_amt) { return Ok(()); } let client = tx.client; let client_data = bal.get_mut(&client).ok_or(anyhow!("bad client"))?; client_data.available += tx.amount; tx_amt.insert(tx.tx_id, tx.amount); Ok(()) } fn withdraw(tx: &Tx, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>) -> Result<()> { debug!("withdraw {tx:?}"); if !is_tx_valid(tx, bal, tx_amt) { return Ok(()); } let client = tx.client; let client_data = bal.get_mut(&client).ok_or(anyhow!("bad client"))?; if client_data.available < tx.amount { warn!("Warning! Cleint {} not have enough balance", client); return Ok(()); } client_data.available -= tx.amount; tx_amt.insert(tx.tx_id, -tx.amount); Ok(()) } fn dispute( tx: &Tx, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>, dispute_txs: &mut HashSet<usize>, ) -> Result<()> { debug!("dispute {tx:?}"); let client = tx.client; if !bal.contains_key(&client) { debug!("Warning! Cleint {} not exists", client); return Ok(()); } if !tx_amt.contains_key(&tx.tx_id) { debug!("Warning! tx {} not exists", tx.tx_id); return Ok(()); } let client_data = bal.entry(client).or_insert_with(|| Balance::new(client)); if client_data.locked { debug!("Warning! Cleint {} is locked, return", client); return Ok(()); } if dispute_txs.contains(&tx.tx_id) { debug!("Warning! dispute tx {} already applied", tx.tx_id); return Ok(()); } dispute_txs.insert(tx.tx_id); client_data.available -= tx_amt[&tx.tx_id]; client_data.held += tx_amt[&tx.tx_id]; Ok(()) } fn resolve( tx: &Tx, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>, dispute_txs: &mut HashSet<usize>, ) -> Result<()> { debug!("resolve {tx:?}"); if !dispute_txs.contains(&tx.tx_id) { debug!("Warning! dispute tx {} not exists", tx.tx_id); return Ok(()); } let client = tx.client; if !bal.contains_key(&client) { debug!("Warning! Cleint {} not exists", client); return Ok(()); } let client_data = bal.entry(client).or_insert_with(|| Balance::new(client)); if !tx_amt.contains_key(&tx.tx_id) { debug!("Warning! tx {} not exists", tx.tx_id); return Ok(()); } client_data.available += tx_amt[&tx.tx_id]; client_data.held -= tx_amt[&tx.tx_id]; dispute_txs.remove(&tx.tx_id); Ok(()) } fn chargeback( tx: &Tx, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>, dispute_txs: &mut HashSet<usize>, ) -> Result<()> { debug!("chargeback {tx:?}"); if !dispute_txs.contains(&tx.tx_id) { debug!("Warning! dispute tx {} not exists", tx.tx_id); return Ok(()); } let client = tx.client; if !bal.contains_key(&client) { debug!("Warning! Cleint {} not exists", client); return Ok(()); } let client_data = bal.entry(client).or_insert_with(|| Balance::new(client)); if !tx_amt.contains_key(&tx.tx_id) { debug!("Warning! tx {} not exists", tx.tx_id); return Ok(()); } debug!("client_data {client_data:?}"); debug!("tx_amt {tx_amt:?}"); client_data.held -= tx_amt[&tx.tx_id]; client_data.locked = true; Ok(()) } fn is_tx_valid(tx: &Tx, bal: &mut Bal, tx_amt: &mut HashMap<usize, f64>) -> bool { if tx_amt.contains_key(&tx.tx_id) { debug!("TX {} already applied", tx.tx_id); return false; } let client = tx.client; if &tx.tx_type[..] == "deposit" { bal.entry(client).or_insert_with(|| { warn!("Cleint {} not exists", client); Balance::new(client) }); } if !bal.contains_key(&client) { debug!("Warning! Cleint {} not exists", client); return false; } let client_data = &bal[&client]; if client_data.locked { warn!("Cleint {} is locked, return", client); return false; } true } fn _data_from_csv_no_space(csv_file: &str) -> Result<Vec<Tx>> { // csv file must be cleaned up, without spaces between fields. let mut rdr = Reader::from_path(csv_file)?; let res = rdr .deserialize() .map(|r| r.map_err(|e| anyhow!("{}", e))) .collect::<Result<Vec<Tx>>>(); res } fn data_from_csv_trim(csv_file: &str) -> Result<Vec<Tx>> { // trim extra spacess before deserialize to struct data. let mut rdr = Reader::from_path(csv_file)?; let res = rdr .records() // yield the iterator is a Result<StringRecord, Error> .map(|r| { let mut record = r?; // r type is Result<StringRecord, Error> record.trim(); record.deserialize(None) // -> Result<D> , D is the struct type Tx, }) .map(|r| r.map_err(|e| anyhow!("csv read error: {e}"))) .collect::<Result<Vec<Tx>>>(); res } fn input_filename() -> Result<String> { let args: Vec<String> = env::args().collect(); if args.len() != 2 { bail!("not good arguments.\n\nUsage: command csv_file \n"); } let csv_file = args[1].clone(); Ok(csv_file) } fn print_result(bal: &Bal) -> Result<()> { println!("client, available, held, total, locked"); for client_bal in bal.values() { let Balance { client, available, held, locked, } = client_bal; let total = available + held; println!("{client}, {available}, {held}, {total}, {locked}"); } Ok(()) } fn _rec_from_csv(csv_file: &str) -> Result<()> { let mut rdr = Reader::from_path(csv_file)?; for result in rdr.records() { let record = result?; debug!("{:?}", record); } Ok(()) } fn _data_vec_from_csv(csv_file: &str) -> Result<Vec<Tx>> { let mut rdr = Reader::from_path(csv_file)?; let mut res: Vec<Tx> = vec![]; for row in rdr.deserialize() { let tx: Tx = row?; debug!("{tx:?}",); res.push(tx); } Ok(res) } // pub fn go() -> Result<(), Box<dyn Error>> { // let contents = "abc"; // Ok(()) // } #[cfg(test)] mod tests;
use parser::ParseNode; use parser::GrammarItem; pub trait Visitor<T> { fn visit(&mut self, n: &ParseNode) -> T{ match n.entry { GrammarItem::LiteralInt(_) => self.visit_literal_int(n), GrammarItem::Variable(_) => self.visit_variable(n), GrammarItem::Abstraction(_,_) => self.visit_abstraction(n), GrammarItem::Application(_,_) => self.visit_application(n), GrammarItem::Assignment(_,_) => self.visit_assignment(n), GrammarItem::Program(_) => self.visit_program(n), } } fn visit_program(&mut self, a: &ParseNode) -> T; fn visit_abstraction(&mut self, a: &ParseNode) -> T; fn visit_application(&mut self, a: &ParseNode) -> T; fn visit_assignment(&mut self, a: &ParseNode) -> T; fn visit_literal_int(&mut self, i: &ParseNode) -> T; fn visit_variable(&mut self, v: &ParseNode) -> T; }
use std::env; use std::fs::File; use std::io::Write; use std::path::Path; fn main() { let directory = env::var("OUT_DIR").unwrap(); let path = Path::new(&directory).join("type.rs"); let mut file = File::create(path).unwrap(); file.write(b"type Test = ();\n").unwrap(); }
use std::borrow::Borrow; use std::collections::HashMap; use std::fmt::{Display,Debug,Error,Formatter}; use std::iter::Peekable; use std::ops::Deref; #[derive(PartialEq, Eq, Hash)] pub struct ByteString(pub Vec<u8>); impl Deref for ByteString { type Target = [u8]; fn deref(&self) -> &[u8] { let ByteString(ref s) = *self; s.as_slice() } } impl Display for ByteString { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { Display::fmt(&String::from_utf8_lossy(self.deref()).into_owned(), f) } } impl Debug for ByteString { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { Display::fmt(&self, f) } } impl Borrow<[u8]> for ByteString { fn borrow(&self) -> &[u8] { self.deref() } } #[derive(PartialEq, Debug)] pub enum BenObject { I(i64), S(ByteString), L(Vec<BenObject>), D(HashMap<ByteString, BenObject>) } impl BenObject { pub fn as_int(&self) -> Option<i64> { match *self { BenObject::I(x) => Some(x), _ => None } } pub fn as_str(&self) -> Option<&ByteString> { match *self { BenObject::S(ref x) => Some(x), _ => None } } pub fn as_list(&self) -> Option<&Vec<BenObject>> { match *self { BenObject::L(ref x) => Some(x), _ => None } } pub fn as_dict(&self) -> Option<&HashMap<ByteString, BenObject>> { match *self { BenObject::D(ref x) => Some(x), _ => None } } pub fn decode<I>(bytes: &mut I) -> Result<Self, String> where I: Iterator<Item=u8> { Self::decode_benobject(&mut bytes.peekable()) } fn decode_benobject<I>(bytes: &mut Peekable<I>) -> Result<Self, String> where I: Iterator<Item=u8> { match bytes.peek() { Some(&c) => match c as char { 'd' => Self::decode_bendict(bytes), 'i' => Self::decode_benint(bytes), 'l' => Self::decode_benlist(bytes), _ => Self::decode_benstring(bytes) }, None => Err("BenObject not found".to_string()) } } fn decode_bendict<I>(bytes: &mut Peekable<I>) -> Result<Self, String> where I: Iterator<Item=u8> { assert_eq!(bytes.next().unwrap(), 'd' as u8); let mut hash = HashMap::new(); while bytes.peek() != Some(&('e' as u8)) { let benstr = Self::decode_benstring(bytes); if benstr.is_err() { return benstr } let key = match benstr.unwrap() { BenObject::S(k) => k, _ => panic!("unexpected type") }; let benobj = Self::decode_benobject(bytes); if benobj.is_err() { return benobj } hash.insert(key, benobj.unwrap()); } if Self::skip_if_match(bytes, 'e') { Ok(BenObject::D(hash)) } else { Err("parsing dict failed: expected 'e'".to_string()) } } fn decode_benint<I>(bytes: &mut Peekable<I>) -> Result<Self, String> where I: Iterator<Item=u8> { assert_eq!(bytes.next().unwrap(), 'i' as u8); let sign = if Self::skip_if_match(bytes, '-') { -1 } else { 1 }; let val = sign * Self::decode_uint(bytes) as i64; if Self::skip_if_match(bytes, 'e') { Ok(BenObject::I(val)) } else { Err("parsing integer failed: expected 'e'".to_string()) } } fn decode_benlist<I>(bytes: &mut Peekable<I>) -> Result<Self, String> where I: Iterator<Item=u8> { assert_eq!(bytes.next().unwrap(), 'l' as u8); let mut vec = Vec::new(); while bytes.peek() != Some(&('e' as u8)) { let benobj = Self::decode_benobject(bytes); if benobj.is_err() { return benobj } vec.push(benobj.unwrap()) } if Self::skip_if_match(bytes, 'e') { Ok(BenObject::L(vec)) } else { Err("parsing list failed: expected 'e'".to_string()) } } fn decode_benstring<I>(bytes: &mut Peekable<I>) -> Result<Self, String> where I: Iterator<Item=u8> { let len = Self::decode_uint(bytes) as usize; if !Self::skip_if_match(bytes, ':') { return Err("parsing string failed: expected ':'".to_string()) } let buf = bytes.by_ref().take(len).collect::<Vec<_>>(); if buf.len() == len { Ok(BenObject::S(ByteString(buf))) } else { Err("parsing string failed: length mismatches".to_string()) } } fn skip_if_match<I>(bytes: &mut Peekable<I>, ch: char) -> bool where I: Iterator<Item=u8> { if bytes.peek() == Some(&(ch as u8)) { bytes.next(); true } else { false } } fn decode_uint<I>(bytes: &mut Peekable<I>) -> u64 where I: Iterator<Item=u8> { let mut num = 0; while bytes.peek().map_or(false, |c| (*c as char).is_digit(10)) { num *= 10; num += (bytes.next().unwrap() - '0' as u8) as u64 } num } }
use std::ops::{Add, Sub}; struct Tuple { x:f64, y:f64, z:f64, w:f64 } pub fn almost_equal(a:f64,b:f64)->bool{ return (a-b).abs()<0.0001 } impl Tuple { pub fn new_tuple(x: f64, y: f64,z: f64,w:f64) -> Tuple { Tuple { x: x, y: y,z:z,w:w } // this is fine, as we're in the same module } pub fn new_point(x: f64, y: f64,z: f64) -> Tuple { Tuple { x: x, y: y,z:z,w:1.0 } // this is fine, as we're in the same module } pub fn new_vector(x: f64, y: f64,z: f64) -> Tuple { Tuple { x: x, y: y,z:z,w:0.0 } // this is fine, as we're in the same module } pub fn is_point(&self) -> bool { return almost_equal(self.w,1.0) } pub fn is_vector(&self) -> bool { return almost_equal(self.w,0.0) } } impl PartialEq for Tuple { fn eq(&self, other: &Self) -> bool { return almost_equal(self.x,other.x) && almost_equal(self.y,other.y) &&almost_equal(self.z,other.z) &&almost_equal(self.w,other.w) } } impl Add for Tuple { type Output = Tuple; fn add(self, other: Tuple) -> Tuple { Tuple { x: self.x+other.x, y: self.y+other.y, z:self.z+other.z, w:self.w+other.w } } } impl Sub for Tuple { type Output = Tuple; fn sub(self, other: Tuple) -> Tuple { Tuple { x: self.x-other.x, y: self.y-other.y, z:self.z-other.z, w:self.w-other.w } } } #[cfg(test)] mod tests { use crate::tuple::{Tuple, almost_equal}; #[test] fn tuple_point_creation() { let t=Tuple::new_tuple(4.3,-4.2,3.1,1.0); assert!(almost_equal(t.x,4.3)); assert!(almost_equal(t.y,-4.2)); assert!(almost_equal(t.z,3.1)); assert!(almost_equal(t.w,1.0)); assert!(t.is_point()); assert!(!t.is_vector()); } #[test] fn tuple_vector_creation() { let t=Tuple::new_tuple(4.3,-4.2,3.1,0.0); assert!(almost_equal(t.x,4.3)); assert!(almost_equal(t.y,-4.2)); assert!(almost_equal(t.z,3.1)); assert!(almost_equal(t.w,0.0)); assert!(!t.is_point()); assert!(t.is_vector()); } #[test] fn point_creation() { let t=Tuple::new_point(4.3,-4.0,3.0); assert!(t==Tuple::new_tuple(4.3,-4.0,3.0,1.0)); } #[test] fn vector_creation() { let t=Tuple::new_vector(4.3,-4.0,3.0); assert!(t==Tuple::new_tuple(4.3,-4.0,3.0,0.0)); } #[test] fn tuple_add() { let a=Tuple::new_tuple(3.0,-2.0,5.0,1.0); let b=Tuple::new_tuple(-2.0,3.0,1.0,0.0); let c:Tuple=a+b; assert!(c==Tuple::new_tuple(1.0,1.0,6.0,1.0)); } #[test] fn tuple_sub() { let a=Tuple::new_point(3.0,2.0,1.0); let b=Tuple::new_point(5.0,6.0,7.0); let c:Tuple=a-b; assert!(c==Tuple::new_vector(-2.0,-4.0,-6.0)); } #[test] fn test_sub_point_and_vector() { let a=Tuple::new_point(3.0,2.0,1.0); let b=Tuple::new_vector(5.0,6.0,7.0); let c:Tuple=a-b; assert!(c==Tuple::new_point(-2.0,-4.0,-6.0)); } #[test] fn test_vector_and_vector() { let a=Tuple::new_vector(3.0,2.0,1.0); let b=Tuple::new_vector(5.0,6.0,7.0); let c:Tuple=a-b; assert!(c==Tuple::new_vector(-2.0,-4.0,-6.0)); } }
extern crate serde_json; #[test] fn serialize_test() { let x = O(linked_hash_map![ "foo".to_string() => Some(Box::new(A( vec![ Some(Box::new(I(42))), Some(Box::new(F(42.0))), Some(Box::new(S("Hello".to_string()))) ]))), "bar".to_string() => Some(Box::new(S("bar".to_string()))), "car".to_string() => None ]); assert_eq!( &format!("{}", serde_json::to_string(&x).unwrap()), r#"{"foo":[42,42.0,"Hello"],"bar":"bar","car":null}"# ) }
//! Wrapper around route url string, and associated history state. use crate::route_service::RouteService; use serde::Deserialize; use serde::Serialize; use stdweb::unstable::TryFrom; use stdweb::JsSerialize; use stdweb::Value; use std::ops::Deref; use yew::agent::Transferable; /// Any state that can be stored by the History API must meet the criteria of this trait. pub trait RouteState: Clone + Default + JsSerialize + TryFrom<Value> + 'static {} impl<T> RouteState for T where T: Clone + Default + JsSerialize + TryFrom<Value> + 'static {} /// The representation of a route, segmented into different sections for easy access. #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] pub struct RouteInfo<T> { /// The route string pub route: String, /// The state pub state: Option<T>, } impl<T> RouteInfo<T> { /// Gets the current route from the route service. /// /// # Note /// It does not get the current state. /// That is only provided via events. /// See [RouteService.register_callback](struct.RouteService.html#method.register_callback) to acquire state. pub fn current_route(route_service: &RouteService<T>) -> Self { RouteInfo { route: route_service.get_route(), state: None, } } } impl<T> From<String> for RouteInfo<T> { fn from(string: String) -> RouteInfo<T> { RouteInfo { route: string, state: None, } } } impl<T> From<&str> for RouteInfo<T> { fn from(string: &str) -> RouteInfo<T> { RouteInfo { route: string.to_string(), state: None, } } } impl<T> Deref for RouteInfo<T> { type Target = str; fn deref(&self) -> &Self::Target { &self.route } } impl<T> Transferable for RouteInfo<T> where for<'de> T: Serialize + Deserialize<'de> {}
use qas::prelude::*; qas!("tests/c/arithmetic.c"); #[cfg(test)] #[test] fn main() { assert_eq!(add(29, 41), 70); assert_eq!(mulf(2.5, 4.0), 10.0); assert_eq!(divd(6.9, 3.0), 2.3000000000000003); }
use std::env; use std::io; use std::io::Read; use std::fs::File; const MEMORY_SIZE: usize = 32768; fn increment_with_overflow(value: &mut u8) { if *value == u8::max_value() { *value = 0; } else { *value += 1; } } fn decrement_with_overflow(value: &mut u8) { if *value == 0 { *value = u8::max_value(); } else { *value -= 1; } } fn read_code_from_file(filename: &str) -> String { let mut file = File::open(filename) .expect("Could not open file"); let mut content = String::new(); file.read_to_string(&mut content) .expect("Could not read file"); content } enum Instruction { Increment, Decrement, MoveRight, MoveLeft, Putchar, Getchar, LoopStart, LoopEnd } impl Instruction { fn parse(character: char) -> Option<Instruction> { match character { '+' => Some(Instruction::Increment), '-' => Some(Instruction::Decrement), '>' => Some(Instruction::MoveRight), '<' => Some(Instruction::MoveLeft), '.' => Some(Instruction::Putchar), ',' => Some(Instruction::Getchar), '[' => Some(Instruction::LoopStart), ']' => Some(Instruction::LoopEnd), _ => None } } } struct Interpreter { memory: [u8; MEMORY_SIZE], pointer: usize, stack: Vec<usize> } impl Interpreter { fn new() -> Interpreter { Interpreter { memory: [0; MEMORY_SIZE], pointer: 0, stack: vec![] } } fn parse(code: &str) -> Vec<Instruction> { let mut instructions = vec![]; for character in code.chars() { if let Some(instruction) = Instruction::parse(character) { instructions.push(instruction); } } instructions } fn execute(&mut self, instructions: &Vec<Instruction>) { let mut index = 0; while index < instructions.len() { match instructions[index] { Instruction::Increment => increment_with_overflow(&mut self.memory[self.pointer]), Instruction::Decrement => decrement_with_overflow(&mut self.memory[self.pointer]), Instruction::MoveLeft => self.pointer -= 1, Instruction::MoveRight => self.pointer += 1, Instruction::Putchar => print!("{}", self.memory[self.pointer] as char), Instruction::Getchar => self.memory[self.pointer] = io::stdin().bytes().next().unwrap().unwrap(), // NIY Instruction::LoopStart => { if self.memory[self.pointer] != 0 { self.stack.push(index); } else { let mut loops_counter = 0; index += 1; loop { match instructions[index] { Instruction::LoopStart => { loops_counter += 1; }, Instruction::LoopEnd => { if loops_counter == 0 { break; } loops_counter -= 1; }, _ => () } index += 1; } } }, Instruction::LoopEnd => { index = match self.stack.pop() { Some(idx) => idx - 1, // Index will be incremented None => panic!("Invalid code!") } } } index += 1; } } } fn main() { let args: Vec<String> = env::args().collect(); if args.len() != 2 { println!("Incorrect usage!"); println!("Use: {} file.bf", args[0]); } else { let code = read_code_from_file(&args[1]); let mut interpreter = Interpreter::new(); interpreter.execute(&Interpreter::parse(&code)); } }
pub mod thread_simple_demo; pub mod channel; pub mod mutex_demo; /* 将程序中的计算拆分进多个线程可以改善性能,因为程序可以同时进行多个任务,不过这也会增加复杂性。 因为线程是同时运行的,所以无法预先保证不同线程中的代码的执行顺序。这会导致诸如此类的问题: 竞争状态(Race conditions),多个线程以不一致的顺序访问数据或资源 死锁(Deadlocks),两个线程相互等待对方停止使用其所拥有的资源,这会阻止它们继续运行 只会发生在特定情况且难以稳定重现和修复的 bug. 起初,Rust 团队认为确保内存安全和防止并发问题是两个分别需要不同方法应对的挑战。 随着时间的推移,团队发现所有权和类型系统是一系列解决内存安全 和 并发问题的强有力的工具! 通过改进所有权和类型检查,Rust 很多并发错误都是 编译时 错误,而非运行时错误。 因此,相比花费大量时间尝试重现运行时并发 bug 出现的特定情况, Rust 会拒绝编译不正确的代码并提供解释问题的错误信息。 因此,你可以在开发时而不是不慎部署到生产环境后修复代码。 线程安全虽然处处接触到内存,但跟内存安全还不是一回事. 内存安全可以被定义为:不访问任何未定义的内存。如:避免缓冲区溢出,避免引用未初始化等。 线程安全被定义为:多个线程访问类时,无论采取何种调度方式,主调代码中也不需要额外的同步和协同,都能表现出正确的行为。 同样是对内存进行操作,线程安全考虑的是其中包含数据的安全性,而非内存地址本身的问题。 memory safety: https://en.wikipedia.org/wiki/Memory_safety */
//! This crate provides three derive macros for [`eosio_core`] traits. //! //! # Examples //! //! ``` //! use eosio_core::{Read, Write, NumBytes}; //! //! #[derive(Read, Write, NumBytes, PartialEq, Debug)] //! #[eosio_core_root_path = "::eosio_core"] //! struct Thing(u8); //! //! let thing = Thing(30); //! //! // Number of bytes //! assert_eq!(thing.num_bytes(), 1); //! //! // Read bytes //! assert_eq!(thing, Thing::read(&mut [30_u8], &mut 0).unwrap()); //! //! // Write bytes //! let mut bytes = vec![0_u8; 1]; //! thing.write(&mut bytes, &mut 0).unwrap(); //! assert_eq!(vec![30], bytes); //! ``` //! //! [`eosio_core`]: https://crates.io/crates/eosio_core #![allow(clippy::unimplemented)] extern crate proc_macro; mod derive_num_bytes; mod derive_read; mod derive_table; mod derive_write; mod derive_digest; mod derive_serialize_data; use crate::proc_macro::TokenStream; use proc_macro2::Span; use syn::{DeriveInput, Lit, LitStr, Meta, Path}; /// Derive the `Digest` trait #[inline] #[proc_macro_derive(Digest, attributes(eosio_core_root_path))] pub fn derive_digest(input: TokenStream) -> TokenStream { crate::derive_digest::expand(input) } /// Derive the `SerializeData` trait #[inline] #[proc_macro_derive(SerializeData, attributes(eosio_core_root_path))] pub fn derive_serialize_data(input: TokenStream) -> TokenStream { crate::derive_serialize_data::expand(input) } /// Derive the `Write` trait #[inline] #[proc_macro_derive(Write, attributes(eosio_core_root_path))] pub fn derive_write(input: TokenStream) -> TokenStream { crate::derive_write::expand(input) } /// Derive the `Read` trait #[inline] #[proc_macro_derive(Read, attributes(eosio_core_root_path))] pub fn derive_read(input: TokenStream) -> TokenStream { crate::derive_read::expand(input) } /// Derive the `NumBytes` trait #[inline] #[proc_macro_derive(NumBytes, attributes(eosio_core_root_path))] pub fn derive_num_bytes(input: TokenStream) -> TokenStream { crate::derive_num_bytes::expand(input) } /// TODO docs #[inline] #[proc_macro_derive( Table, attributes(table_name, primary, secondary, singleton) )] pub fn derive_table(input: TokenStream) -> TokenStream { crate::derive_table::expand(input) } /// The default root path using the `eosio` crate. #[cfg(feature = "internal-use-only-root-path-is-eosio")] const DEFAULT_ROOT_PATH: &str = "::eosio"; /// The default root path using the `eosio_core` crate. #[cfg(not(feature = "internal-use-only-root-path-is-eosio"))] const DEFAULT_ROOT_PATH: &str = "::eosio_core"; /// Get the root path for types/traits. pub(crate) fn root_path(input: &DeriveInput) -> Path { let litstr = input .attrs .iter() .fold(None, |acc, attr| match attr.parse_meta() { Ok(meta) => { let name = meta.path().get_ident(); if name.as_ref().expect("please add trait root path").to_string() == "eosio_core_root_path" { match meta { Meta::NameValue(meta) => match meta.lit { Lit::Str(s) => Some(s), _ => panic!("eosio_core_path must be a lit str"), }, _ => acc, } } else { acc } } Err(_) => acc, }) .unwrap_or_else(|| LitStr::new(DEFAULT_ROOT_PATH, Span::call_site())); litstr .parse_with(Path::parse_mod_style) .expect("bad path for eosio_core_root_path") }
use std::fs::File; use std::io::Read; use ascii::{AsciiChar, AsciiString}; fn expand(line: &str) -> String { let mut res = String::new(); // Add Dots around the input from left and from right to deal with // the corner cases let mut input = AsciiString::new(); input.push(AsciiChar::Dot); input.push_str(&AsciiString::from_ascii(line).unwrap()); input.push(AsciiChar::Dot); let slice = input.as_slice(); for index in 1 .. slice.len()-1 { let left_trap = slice[index - 1] == AsciiChar::Caret; //let center_trap = slice[index] == AsciiChar::Caret; let right_trap = slice[index + 1] == AsciiChar::Caret; // Rules: // left and center are traps, right is not -> trap // center and right are traps, left is not -> trap // only left is trap -> trap // only right is trap -> trap // else clear if (left_trap && !right_trap) || (!left_trap && right_trap) { res.push('^'); } else { res.push('.'); } } res } fn count_safe(first: &str, lines: u32) -> u32 { let mut total = 0; let mut current = first.to_string(); for _i in 0 .. lines { total += current.chars().filter(|c| *c == '.').count(); current = expand(&current); } total as u32 } fn main() { let mut file = File::open("input18.txt").unwrap(); let mut text = String::new(); file.read_to_string(&mut text).unwrap(); let line = text.lines().nth(0).unwrap(); println!("Part 1: {}", count_safe(&line, 40)); // There is probably some memoization potential // in 400000 repetitions. Not implemented yet though. println!("Part 2: {}", count_safe(&line, 400000)); } #[cfg(test)] mod tests { use super::*; #[test] fn test_expand() { assert_eq!(expand("..^^."), ".^^^^"); assert_eq!(expand(".^^^^"), "^^..^"); assert_eq!(expand(".^^.^.^^^^"), "^^^...^..^"); } #[test] fn test_example() { assert_eq!(count_safe(".^^.^.^^^^", 10), 38); } }
extern crate lbasi; use lbasi::*; #[test] fn test_operator_identification() { assert_eq!(interpreter::Token { kind: interpreter::TokenType::Add, value: "+".to_string(), }, interpreter::Token::op('+')); assert_eq!(interpreter::Token { kind: interpreter::TokenType::Subtract, value: "-".to_string(), }, interpreter::Token::op('-')); assert_eq!(interpreter::Token { kind: interpreter::TokenType::Multiply, value: "*".to_string(), }, interpreter::Token::op('*')); assert_eq!(interpreter::Token { kind: interpreter::TokenType::Divide, value: "/".to_string(), }, interpreter::Token::op('/')); } #[test] fn test_interpreter() { interpreter_test("5 - 3", 2); interpreter_test("4 - 2 - 4", -2); interpreter_test("10 - 2 - 4", 4); interpreter_test("5 + 3", 8); interpreter_test("3 + 5 + 6 + 9", 23); interpreter_test("33 + 25 + 10", 68); interpreter_test("22 - 11 + 10 - 21", 0); interpreter_test("3 + 4 - 2", 5); interpreter_test("2 * 3", 6); interpreter_test("1 * 3", 3); interpreter_test("4 / 2", 2); interpreter_test("20 / 2", 10); } fn interpreter_test(expr: &str, expected: i32) { let answer = interpreter::run(expr.to_string().clone()) .expect("Got an error running interpreter"); assert!(answer == expected, "expr = {} answer = {} expected = {}", expr, answer, expected) }
use std::collections::BTreeMap; use fopply::{binding::*, parsing::*, read_fpl, utils::char_index::*}; #[test] fn test() { assert!(parser::expr("(a+b)+c").is_ok()); assert!(parser::expr("a+b+c").is_ok()); assert!(parser::expr("a+sin(b)").is_ok()); assert!(parser::expr("a+$f(b)").is_ok()); assert!(parser::expr("$true+$f(1)-an").is_ok()); let mut any_function_bindings = ManualAnyFunctionBinding::new(BTreeMap::default()); let expression = parser::expr("part(b = 0, a, a*part($true, 1, $undefined))").unwrap(); let expression = clear_parsing_info(expression); let formula = parser::formula("part(cond, then, else) <-> part(not(cond), else, then)").unwrap(); let mut bindings = BindingStorage::default(); find_bindings(expression, &clear_parsing_info(formula.left), &mut bindings, &mut any_function_bindings).unwrap(); let result = apply_bindings(clear_parsing_info(formula.right), &bindings, &any_function_bindings); let should_be = parser::expr("part(not(b = 0), a*part($true, 1, $undefined), a)").unwrap(); let should_be = clear_parsing_info(should_be); assert_eq!(result, should_be); } #[test] fn test2() { let expression = parser::expr("a").unwrap(); let expression = clear_parsing_info(expression); let formula = parser::formula("part(x, a, a) <-> a").unwrap(); let mut any_function_bindings = ManualAnyFunctionBinding::new(BTreeMap::default()); let mut bindings = BindingStorage::default(); bindings.add(parser::binding("x := b = 0").unwrap()); find_bindings(expression, &clear_parsing_info(formula.right), &mut bindings, &mut any_function_bindings).unwrap(); let result = apply_bindings(clear_parsing_info(formula.left), &bindings, &any_function_bindings); let should_be = parser::expr("part(b = 0, a, a)").unwrap(); let should_be = clear_parsing_info(should_be); assert_eq!(result, should_be); } #[test] fn test3() { let expression = parser::expr("part(not(b = 0), a*part($true, 1, $undefined), a)").unwrap(); let expression = clear_parsing_info(expression); let formula = parser::formula( "part(cond, $f(part(cond2, then2, else2)), else) <-> part(cond, $f(part(cond2 & cond, then2, else2)), else)", ) .unwrap(); let mut binding_map = BTreeMap::new(); let (key, value) = parser::function_binding("$f(x) := a*x").unwrap(); binding_map.insert(key, value); let mut any_function_bindings = ManualAnyFunctionBinding::new(binding_map); let mut bindings = BindingStorage::default(); find_bindings(expression, &clear_parsing_info(formula.left), &mut bindings, &mut any_function_bindings).unwrap(); let result = apply_bindings(clear_parsing_info(formula.right), &bindings, &any_function_bindings); let should_be = parser::expr("part(not(b = 0), a*part($true & not(b = 0), 1, $undefined), a)").unwrap(); let should_be = clear_parsing_info(should_be); assert_eq!(result, should_be); } macro_rules! same { ($a:expr, $b:expr) => { assert_eq!(clear_parsing_info(parser::expr($a).unwrap()), clear_parsing_info(parser::expr($b).unwrap())); }; } #[test] fn associativity() { same!("a+b+c", "a+(b+c)"); same!("a+b*c", "a+(b*c)"); same!("a*b+c", "(a*b)+c"); same!("a*b*c", "a*(b*c)"); same!("a^b^c", "(a^b)^c"); } #[test] fn priority() { same!("a*b+c*d", "(a*b)+(c*d)"); same!("a^b*c^d", "(a^b)*(c^d)"); } // #[test] fn parsing_info() { macro_rules! debug_unwrap { ($name:ident( $($arg:expr),* )) => {{ let mut debug_string = String::from(stringify!($name)); debug_string.push_str("("); $( debug_string.push_str(concat!(stringify!($arg), " = ")); debug_string.push_str(format!("{:?}", $arg).as_ref()); debug_string.push_str(", "); )* if debug_string.ends_with(", ") { debug_string.pop(); debug_string.pop(); } debug_string.push_str(")"); $name($($arg),*).unwrap_or_else(|| panic!(debug_string)) }}; } let string = "part(b = 0, a, a *part($true, 1, $undefined))"; // let string = "a+b+c+d^f*e"; let parsed = parser::expr(string).unwrap(); let (_, positions) = process_expression_parsing(parsed); let positions: Vec<_> = positions .into_iter() .map(|(pos, range)| { let new_range = debug_unwrap!(get_char_range(string, range)); (pos, new_range) }) .collect(); for (pos, range) in positions { use std::iter; println!( "{eq}\n{spaces_before}{arrows}{spaces_after} - {position:?}", eq = string, spaces_before = iter::repeat(' ').take(range.start.0).collect::<String>(), arrows = iter::repeat('^').take(range.end.0 - range.start.0).collect::<String>(), spaces_after = iter::repeat(' ').take(string.len() - range.end.0).collect::<String>(), position = pos, ) } } #[test] fn read_math_fpl() { read_fpl(&std::fs::read_to_string("fpl/math.fpl").unwrap()).unwrap(); }
use std::str::FromStr; use juice_sdk_rs::{client, transports, types::H256, Result}; #[tokio::main] async fn main() -> Result<()> { let transport = transports::http::Http::new("http://10.1.1.40:7009")?; let client = client::Client::new(transport, true); let receipt = client .transaction_receipt( String::from("sys"), H256::from_str("0x0166fcae120e3d7e34324dcbb93335677dcf39076c6de9c03ead1d7f72ddde31") .unwrap(), ) .await?; println!("{}", serde_json::to_string_pretty(&receipt).unwrap()); Ok(()) }
extern crate regex; use self::regex::Regex; use std::collections::BinaryHeap; use token_analysis::token; fn tokenize_source_code( source_code: String, ) -> Vec<Result<Vec<token::Token<'static>>, token::TokenError>> { source_code .split(";") .map(|line_of_code| tokenize_line(String::from(line_of_code), &token::RULES)) .collect::<Vec<_>>() } fn addTokensToBinaryHeap<'a>( line_of_code: &String, match_heap: &mut BinaryHeap<token::TokenMatch<'a>>, token_rules: &'a Vec<token::TokenRules>, ) { token_rules .iter() .map(|token_match_rule| { Regex::new(&token_match_rule.regex_rule) .map(|regex| { let matched_rules = regex.find_iter(&line_of_code); matched_rules .map(|t| { match_heap.push(token::TokenMatch { literal: String::from(t.as_str()), begin_segmet: t.start().clone(), end_segment: t.end().clone(), rule: token_match_rule, }); }) .collect::<Vec<_>>(); }) .map_err(|_| ()); }) .collect::<Vec<_>>(); } fn slice_line_of_code<'a>(first_match: token::TokenMatch, line_of_code: &String) -> String { let mut mutable_line_of_code = line_of_code.clone(); mutable_line_of_code.split_off(first_match.begin_segmet); let end_range = line_of_code.clone().split_off(first_match.end_segment); mutable_line_of_code.push_str(&end_range); mutable_line_of_code } fn tokenize_line<'a>( line_of_code: String, token_rules: &'a Vec<token::TokenRules>, ) -> Result<Vec<token::Token<'a>>, token::TokenError> { if line_of_code.len() <= 0 { Err(token::TokenError::EmptyLineOfCode) } else { let mut tokens = vec![]; let mut match_heap = BinaryHeap::new(); addTokensToBinaryHeap(&line_of_code, &mut match_heap, token_rules); match_heap .pop() .ok_or_else(|| token::TokenError::NoMatch) .and_then(|first_match| { tokens.push(token::Token { content: first_match.literal.clone(), token_type: &first_match.rule.token_type, }); tokenize_line(slice_line_of_code(first_match, &line_of_code), token_rules) .map(|tokenized| { tokens.extend(tokenized); Ok(tokens.clone()) }) .unwrap_or(Ok(tokens.clone())) }) } } #[test] fn test_ending_terminator() { let tokens = tokenize_line(String::from(";;"), &token::RULES).expect("expect tokens"); assert_eq!(tokens.len(), 2); let first_token = tokens.first().expect("expected token after first"); assert_eq!(first_token.content, ";"); assert_eq!(first_token.token_type, &token::TokenType::EndingTerminator); } #[test] fn test_alphabetic() { let tokens = tokenize_line(String::from("thisforme;again"), &token::RULES).expect("expect tokens"); assert_eq!(tokens.len(), 3); let first_token = tokens.first().expect("expected token after first"); assert_eq!(first_token.content, "again"); assert_eq!(first_token.token_type, &token::TokenType::Alphabetic); let last_token = tokens.get(2).expect("expected token after get"); assert_eq!(last_token.content, "thisforme"); assert_eq!(last_token.token_type, &token::TokenType::Alphabetic); } #[test] fn test_assignment_symbol() { let tokens = tokenize_line(String::from("thisforme;="), &token::RULES).expect("expect tokens"); assert_eq!(tokens.len(), 3); let last_token = tokens.get(0).expect("expected token after get"); assert_eq!(last_token.content, "="); assert_eq!(last_token.token_type, &token::TokenType::AssignmentSymbol); } #[test] fn test_numeric_symbol() { let tokens = tokenize_line(String::from("thisforme;=1234"), &token::RULES).expect("expect tokens"); assert_eq!(tokens.len(), 4); let last_token = tokens.get(0).expect("expected token after get"); assert_eq!(last_token.content, "1234"); assert_eq!(last_token.token_type, &token::TokenType::Numeric); } #[test] fn test_newline_symbol() { let tokens = tokenize_line(String::from("thisforme;=1234\n"), &token::RULES).expect("expect tokens"); assert_eq!(tokens.len(), 5); let last_token = tokens.get(0).expect("expected token after get"); assert_eq!(last_token.content, "\n"); assert_eq!(last_token.token_type, &token::TokenType::Newline); } #[test] fn test_space() { let tokens = tokenize_line(String::from(" try"), &token::RULES).expect("expect tokens"); assert_eq!(tokens.len(), 3); let last_token = tokens.get(2).expect("expected token after get"); assert_eq!(last_token.content, " "); assert_eq!(last_token.token_type, &token::TokenType::Space); let middle_token = tokens.get(1).expect("expected token after get"); assert_eq!(middle_token.content, " "); assert_eq!(middle_token.token_type, &token::TokenType::Space); let first_token = tokens.get(0).expect("expected token after get"); assert_eq!(first_token.content, "try"); assert_eq!(first_token.token_type, &token::TokenType::Alphabetic); } #[test] fn test_single_quote() { let tokens = tokenize_line(String::from("' tryme'"), &token::RULES).expect("expect tokens"); assert_eq!(tokens.len(), 4); let last_token = tokens.get(3).expect("expected token after get"); assert_eq!(last_token.content, "'"); assert_eq!(last_token.token_type, &token::TokenType::SinlgeQuote); let middle_token = tokens.get(2).expect("expected token after get"); assert_eq!(middle_token.content, " "); assert_eq!(middle_token.token_type, &token::TokenType::Space); let first_token = tokens.get(0).expect("expected token after get"); assert_eq!(first_token.content, "'"); assert_eq!(first_token.token_type, &token::TokenType::SinlgeQuote); } #[test] fn test_artihmetic_symbol() { let tokens = tokenize_line(String::from("stuff<try>*okay-;+this/j:"), &token::RULES) .expect("expect tokens"); assert_eq!(tokens.len(), 11); let mut test = vec!["/", "+", "-", "*", ">", "<", ":"]; test.reverse(); let filtered_tokens = tokens .iter() .filter(|filt| test.contains(&&*filt.content)) .collect::<Vec<_>>(); assert_eq!(filtered_tokens.len(), test.len()); filtered_tokens .iter() .map(|t| { assert_eq!(t.content, test.pop().expect("error unwrapping test data")); assert_eq!(t.token_type, &token::TokenType::Arithmentic); }) .collect::<Vec<_>>(); }
use proconio::input; fn main() { input! { n:u32, } let mut ans = 1; for i in 1..10 { ans = 111 * i; if ans >= n { break; } } println!("{}", ans); }
use crate::hitable::Hitable; use rand::random; use crate::vec3::Vec3; use crate::material::Material; pub fn final_scene() -> Vec<Hitable> { // Randomly generate a number of small spheres. let mut small_spheres: Vec<Hitable> = vec![]; let radius = 0.2; for a in -11..11 { for b in -11..11 { let choose_material = random::<f64>(); let centre = Vec3::new( a as f64 + 0.9 * random::<f64>(), radius, b as f64 + 0.9 * random::<f64>() ); if (centre - Vec3::new(4.0, 0.2, 0.0 )).length() > 0.9 { if choose_material < 0.8 { // Create a diffuse sphere small_spheres.push(Hitable::sphere( centre, radius, Material::lambertian( random::<f64>() * random::<f64>(), random::<f64>() * random::<f64>(), random::<f64>() * random::<f64>(), ) )) } else if choose_material < 0.95 { // Create a metal sphere small_spheres.push(Hitable::sphere( centre, radius, Material::metal( 0.5 * (1.0 + random::<f64>()), 0.5 * (1.0 + random::<f64>()), 0.5 * (1.0 + random::<f64>()), 0.5, ) )); } else { // Create a glass sphere small_spheres.push(Hitable::sphere( centre, radius, Material::dielectric(1.5) )) } } else { } } }; let ground = Hitable::sphere(Vec3::new(0.0, -1000.0, 0.0), 1000.0, Material::lambertian(0.5, 0.5, 0.5)); // Three more spheres that sit in the centre of the image. let glass_sphere = Hitable::sphere(Vec3::new(0.0, 1.0, 0.0), 1.0, Material::dielectric(1.5)); let matte_sphere = Hitable::sphere(Vec3::new(-4.0, 1.0, 0.0), 1.0, Material::lambertian(0.4, 0.2, 0.1)); let metal_sphere = Hitable::sphere(Vec3::new(4.0, 1.0, 0.0), 1.0, Material::metal(0.7, 0.6, 0.5, 0.0)); let all_spheres: Vec<Hitable> = vec![ small_spheres, vec![ground, glass_sphere, matte_sphere, metal_sphere] ].into_iter().flatten().collect(); return all_spheres; }
//! Basic library that has code that is shared between userspace and kernel #![no_std] #![feature(asm)] #![feature(allocator_api)] #![feature(alloc_prelude)] #![feature(alloc_error_handler)] #![feature(const_fn_trait_bound)] extern crate alloc; pub mod atomic; pub mod cell; pub mod collections; pub mod futex; pub mod mem; pub mod misc; pub mod ptr; mod uses; use mem::Allocation; static mut UTIL_CALLS: Option<&'static dyn UtilCalls> = None; pub trait UtilCalls { fn futex_new(&self) -> usize; fn futex_destroy(&self, id: usize); fn block(&self, id: usize); fn unblock(&self, id: usize); fn alloc(&self, size: usize) -> Option<Allocation>; fn dealloc(&self, mem: Allocation); } fn futex_new() -> usize { unsafe { UTIL_CALLS.as_ref().unwrap().futex_new() } } fn futex_destroy(id: usize) { unsafe { UTIL_CALLS.as_ref().unwrap().futex_destroy(id) } } fn block(id: usize) { unsafe { UTIL_CALLS.as_ref().unwrap().block(id); } } fn unblock(id: usize) { unsafe { UTIL_CALLS.as_ref().unwrap().unblock(id); } } fn alloc(size: usize) -> Option<Allocation> { unsafe { UTIL_CALLS.as_ref().unwrap().alloc(size) } } fn dealloc(mem: Allocation) { unsafe { UTIL_CALLS.as_ref().unwrap().dealloc(mem); } } /// safety: can only be called singel threaded, and cannot call any other library functions untill after this returns pub unsafe fn init(calls: &'static dyn UtilCalls) { UTIL_CALLS = Some(calls); #[cfg(not(feature = "kernel"))] mem::init(); }
use std::io::{self, Write}; fn main() -> io::Result<()> { io::stdout().write(b"hello world\n")?; Ok(()) }
use crate::{ config::Config, db_conn::DbConn, with_config, with_db_conn, with_reqwest_client, ConfirmQueryParams, InstallQueryParams, }; use reqwest::Client; use std::sync::Arc; use warp::{filters::BoxedFilter, Filter}; fn path_prefix_install() -> BoxedFilter<()> { warp::path("shopify_install").boxed() } fn path_prefix_confirm() -> BoxedFilter<()> { warp::path("shopify_confirm").boxed() } pub fn shopify_install( config: Arc<Config>, db_conn: Arc<DbConn>, ) -> BoxedFilter<(InstallQueryParams, Arc<Config>, Arc<DbConn>)> { warp::get() .and(path_prefix_install()) .and(warp::query::query::<InstallQueryParams>()) .and(with_config(config)) .and(with_db_conn(db_conn)) .boxed() } pub fn shopify_confirm( config: Arc<Config>, db_conn: Arc<DbConn>, client: Arc<Client>, ) -> BoxedFilter<(ConfirmQueryParams, Arc<Config>, Arc<DbConn>, Arc<Client>)> { warp::get() .and(path_prefix_confirm()) .and(warp::query::query::<ConfirmQueryParams>()) .and(with_config(config)) .and(with_db_conn(db_conn)) .and(with_reqwest_client(client)) .boxed() }
#[doc = "Reader of register PWR_BUCK_CTL"] pub type R = crate::R<u32, super::PWR_BUCK_CTL>; #[doc = "Writer for register PWR_BUCK_CTL"] pub type W = crate::W<u32, super::PWR_BUCK_CTL>; #[doc = "Register PWR_BUCK_CTL `reset()`'s with value 0x05"] impl crate::ResetValue for super::PWR_BUCK_CTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x05 } } #[doc = "Reader of field `BUCK_OUT1_SEL`"] pub type BUCK_OUT1_SEL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `BUCK_OUT1_SEL`"] pub struct BUCK_OUT1_SEL_W<'a> { w: &'a mut W, } impl<'a> BUCK_OUT1_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07); self.w } } #[doc = "Reader of field `BUCK_EN`"] pub type BUCK_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `BUCK_EN`"] pub struct BUCK_EN_W<'a> { w: &'a mut W, } impl<'a> BUCK_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `BUCK_OUT1_EN`"] pub type BUCK_OUT1_EN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `BUCK_OUT1_EN`"] pub struct BUCK_OUT1_EN_W<'a> { w: &'a mut W, } impl<'a> BUCK_OUT1_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:2 - Voltage output selection for vccbuck1 output. This register is only reset by XRES/POR/BOD/OVP/HIBERNATE. When increasing the voltage, it can take up to 200us for the output voltage to settle. When decreasing the voltage, the settling time depends on the load current. 0: 0.85V 1: 0.875V 2: 0.90V 3: 0.95V 4: 1.05V 5: 1.10V 6: 1.15V 7: 1.20V"] #[inline(always)] pub fn buck_out1_sel(&self) -> BUCK_OUT1_SEL_R { BUCK_OUT1_SEL_R::new((self.bits & 0x07) as u8) } #[doc = "Bit 30 - Master enable for buck converter. This register is only reset by XRES/POR/BOD/OVP/HIBERNATE."] #[inline(always)] pub fn buck_en(&self) -> BUCK_EN_R { BUCK_EN_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Enable for vccbuck1 output. The value in this register is ignored unless PWR_BUCK_CTL.BUCK_EN==1. This register is only reset by XRES/POR/BOD/OVP/HIBERNATE. The regulator takes up to 600us to charge the external capacitor. If there is additional load current while charging, this will increase the startup time. The SAS specifies the required sequence when transitioning vccd from the LDO to SIMO Buck output #1."] #[inline(always)] pub fn buck_out1_en(&self) -> BUCK_OUT1_EN_R { BUCK_OUT1_EN_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:2 - Voltage output selection for vccbuck1 output. This register is only reset by XRES/POR/BOD/OVP/HIBERNATE. When increasing the voltage, it can take up to 200us for the output voltage to settle. When decreasing the voltage, the settling time depends on the load current. 0: 0.85V 1: 0.875V 2: 0.90V 3: 0.95V 4: 1.05V 5: 1.10V 6: 1.15V 7: 1.20V"] #[inline(always)] pub fn buck_out1_sel(&mut self) -> BUCK_OUT1_SEL_W { BUCK_OUT1_SEL_W { w: self } } #[doc = "Bit 30 - Master enable for buck converter. This register is only reset by XRES/POR/BOD/OVP/HIBERNATE."] #[inline(always)] pub fn buck_en(&mut self) -> BUCK_EN_W { BUCK_EN_W { w: self } } #[doc = "Bit 31 - Enable for vccbuck1 output. The value in this register is ignored unless PWR_BUCK_CTL.BUCK_EN==1. This register is only reset by XRES/POR/BOD/OVP/HIBERNATE. The regulator takes up to 600us to charge the external capacitor. If there is additional load current while charging, this will increase the startup time. The SAS specifies the required sequence when transitioning vccd from the LDO to SIMO Buck output #1."] #[inline(always)] pub fn buck_out1_en(&mut self) -> BUCK_OUT1_EN_W { BUCK_OUT1_EN_W { w: self } } }
#[doc = "Reader of register ATT0"] pub type R = crate::R<u32, super::ATT0>; #[doc = "Writer for register ATT0"] pub type W = crate::W<u32, super::ATT0>; #[doc = "Register ATT0 `reset()`'s with value 0x0124"] impl crate::ResetValue for super::ATT0 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0124 } } #[doc = "Reader of field `UR`"] pub type UR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `UR`"] pub struct UR_W<'a> { w: &'a mut W, } impl<'a> UR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `UW`"] pub type UW_R = crate::R<bool, bool>; #[doc = "Write proxy for field `UW`"] pub struct UW_W<'a> { w: &'a mut W, } impl<'a> UW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `UX`"] pub type UX_R = crate::R<bool, bool>; #[doc = "Reader of field `PR`"] pub type PR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PR`"] pub struct PR_W<'a> { w: &'a mut W, } impl<'a> PR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `PW`"] pub type PW_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PW`"] pub struct PW_W<'a> { w: &'a mut W, } impl<'a> PW_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `PX`"] pub type PX_R = crate::R<bool, bool>; #[doc = "Reader of field `NS`"] pub type NS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `NS`"] pub struct NS_W<'a> { w: &'a mut W, } impl<'a> NS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `PC_MASK_0`"] pub type PC_MASK_0_R = crate::R<bool, bool>; #[doc = "Reader of field `PC_MASK_15_TO_1`"] pub type PC_MASK_15_TO_1_R = crate::R<u16, u16>; #[doc = "Write proxy for field `PC_MASK_15_TO_1`"] pub struct PC_MASK_15_TO_1_W<'a> { w: &'a mut W, } impl<'a> PC_MASK_15_TO_1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !(0x7fff << 9)) | (((value as u32) & 0x7fff) << 9); self.w } } #[doc = "Reader of field `REGION_SIZE`"] pub type REGION_SIZE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `REGION_SIZE`"] pub struct REGION_SIZE_W<'a> { w: &'a mut W, } impl<'a> REGION_SIZE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x1f << 24)) | (((value as u32) & 0x1f) << 24); self.w } } #[doc = "Reader of field `PC_MATCH`"] pub type PC_MATCH_R = crate::R<bool, bool>; #[doc = "Write proxy for field `PC_MATCH`"] pub struct PC_MATCH_W<'a> { w: &'a mut W, } impl<'a> PC_MATCH_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `ENABLED`"] pub type ENABLED_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ENABLED`"] pub struct ENABLED_W<'a> { w: &'a mut W, } impl<'a> ENABLED_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bit 0 - User read enable: '0': Disabled (user, read acceses are NOT allowed). '1': Enabled (user, read acceses are allowed)."] #[inline(always)] pub fn ur(&self) -> UR_R { UR_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - User write enable: '0': Disabled (user, write acceses are NOT allowed). '1': Enabled (user, write acceses are allowed)."] #[inline(always)] pub fn uw(&self) -> UW_R { UW_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - User execute enable: '0': Disabled (user, execute acceses are NOT allowed). '1': Enabled (user, execute acceses are allowed)."] #[inline(always)] pub fn ux(&self) -> UX_R { UX_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Privileged read enable: '0': Disabled (privileged, read acceses are NOT allowed). '1': Enabled (privileged, read acceses are allowed)."] #[inline(always)] pub fn pr(&self) -> PR_R { PR_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - Privileged write enable: '0': Disabled (privileged, write acceses are NOT allowed). '1': Enabled (privileged, write acceses are allowed)."] #[inline(always)] pub fn pw(&self) -> PW_R { PW_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - Privileged execute enable: '0': Disabled (privileged, execute acceses are NOT allowed). '1': Enabled (privileged, execute acceses are allowed)."] #[inline(always)] pub fn px(&self) -> PX_R { PX_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - Non-secure: '0': Secure (secure accesses allowed, non-secure access NOT allowed). '1': Non-secure (both secure and non-secure accesses allowed)."] #[inline(always)] pub fn ns(&self) -> NS_R { NS_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 8 - This field specifies protection context identifier based access control for protection context '0'."] #[inline(always)] pub fn pc_mask_0(&self) -> PC_MASK_0_R { PC_MASK_0_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bits 9:23 - This field specifies protection context identifier based access control. Bit i: protection context i+1 enable. If '0', protection context i+1 access is disabled; i.e. not allowed. If '1', protection context i+1 access is enabled; i.e. allowed."] #[inline(always)] pub fn pc_mask_15_to_1(&self) -> PC_MASK_15_TO_1_R { PC_MASK_15_TO_1_R::new(((self.bits >> 9) & 0x7fff) as u16) } #[doc = "Bits 24:28 - This field specifies the region size: '0'-'6': Undefined. '7': 256 B region '8': 512 B region '9': 1 KB region '10': 2 KB region '11': 4 KB region '12': 8 KB region '13': 16 KB region '14': 32 KB region '15': 64 KB region '16': 128 KB region '17': 256 KB region '18': 512 KB region '19': 1 MB region '20': 2 MB region '21': 4 MB region '22': 8 MB region '23': 16 MB region '24': 32 MB region '25': 64 MB region '26': 128 MB region '27': 256 MB region '28': 512 MB region '39': 1 GB region '30': 2 GB region '31': 4 GB region"] #[inline(always)] pub fn region_size(&self) -> REGION_SIZE_R { REGION_SIZE_R::new(((self.bits >> 24) & 0x1f) as u8) } #[doc = "Bit 30 - This field specifies if the PC field participates in the 'matching' process or the 'access evaluation' process: '0': PC field participates in 'access evalution'. '1': PC field participates in 'matching'. Note that it is possible to define different access control for multiple protection contexts by using multiple protection structures with the same address region and PC_MATCH set to '1'."] #[inline(always)] pub fn pc_match(&self) -> PC_MATCH_R { PC_MATCH_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Region enable: '0': Disabled. A disabled region will never result in a match on the bus transfer address. '1': Enabled. Note: a disabled address region performs logic gating to reduce dynamic power consumption."] #[inline(always)] pub fn enabled(&self) -> ENABLED_R { ENABLED_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - User read enable: '0': Disabled (user, read acceses are NOT allowed). '1': Enabled (user, read acceses are allowed)."] #[inline(always)] pub fn ur(&mut self) -> UR_W { UR_W { w: self } } #[doc = "Bit 1 - User write enable: '0': Disabled (user, write acceses are NOT allowed). '1': Enabled (user, write acceses are allowed)."] #[inline(always)] pub fn uw(&mut self) -> UW_W { UW_W { w: self } } #[doc = "Bit 3 - Privileged read enable: '0': Disabled (privileged, read acceses are NOT allowed). '1': Enabled (privileged, read acceses are allowed)."] #[inline(always)] pub fn pr(&mut self) -> PR_W { PR_W { w: self } } #[doc = "Bit 4 - Privileged write enable: '0': Disabled (privileged, write acceses are NOT allowed). '1': Enabled (privileged, write acceses are allowed)."] #[inline(always)] pub fn pw(&mut self) -> PW_W { PW_W { w: self } } #[doc = "Bit 6 - Non-secure: '0': Secure (secure accesses allowed, non-secure access NOT allowed). '1': Non-secure (both secure and non-secure accesses allowed)."] #[inline(always)] pub fn ns(&mut self) -> NS_W { NS_W { w: self } } #[doc = "Bits 9:23 - This field specifies protection context identifier based access control. Bit i: protection context i+1 enable. If '0', protection context i+1 access is disabled; i.e. not allowed. If '1', protection context i+1 access is enabled; i.e. allowed."] #[inline(always)] pub fn pc_mask_15_to_1(&mut self) -> PC_MASK_15_TO_1_W { PC_MASK_15_TO_1_W { w: self } } #[doc = "Bits 24:28 - This field specifies the region size: '0'-'6': Undefined. '7': 256 B region '8': 512 B region '9': 1 KB region '10': 2 KB region '11': 4 KB region '12': 8 KB region '13': 16 KB region '14': 32 KB region '15': 64 KB region '16': 128 KB region '17': 256 KB region '18': 512 KB region '19': 1 MB region '20': 2 MB region '21': 4 MB region '22': 8 MB region '23': 16 MB region '24': 32 MB region '25': 64 MB region '26': 128 MB region '27': 256 MB region '28': 512 MB region '39': 1 GB region '30': 2 GB region '31': 4 GB region"] #[inline(always)] pub fn region_size(&mut self) -> REGION_SIZE_W { REGION_SIZE_W { w: self } } #[doc = "Bit 30 - This field specifies if the PC field participates in the 'matching' process or the 'access evaluation' process: '0': PC field participates in 'access evalution'. '1': PC field participates in 'matching'. Note that it is possible to define different access control for multiple protection contexts by using multiple protection structures with the same address region and PC_MATCH set to '1'."] #[inline(always)] pub fn pc_match(&mut self) -> PC_MATCH_W { PC_MATCH_W { w: self } } #[doc = "Bit 31 - Region enable: '0': Disabled. A disabled region will never result in a match on the bus transfer address. '1': Enabled. Note: a disabled address region performs logic gating to reduce dynamic power consumption."] #[inline(always)] pub fn enabled(&mut self) -> ENABLED_W { ENABLED_W { w: self } } }
use alloc::boxed::Box; use system::error::{Error, Result, EPERM, ESPIPE}; use system::syscall::Stat; /// Resource seek #[derive(Copy, Clone, Debug)] pub enum ResourceSeek { /// Start point Start(usize), /// Current point Current(isize), /// End point End(isize), } /// A system resource #[allow(unused_variables)] pub trait Resource { /// Duplicate the resource /// Returns `EPERM` if the operation is not supported. fn dup(&self) -> Result<Box<Resource>> { Err(Error::new(EPERM)) } /// Return the path of this resource /// Returns `EPERM` if the operation is not supported. fn path(&self, buf: &mut [u8]) -> Result<usize> { Err(Error::new(EPERM)) } /// Read data to buffer /// Returns `EPERM` if the operation is not supported. fn read(&mut self, buf: &mut [u8]) -> Result<usize> { Err(Error::new(EPERM)) } /// Write to resource /// Returns `EPERM` if the operation is not supported. fn write(&mut self, buf: &[u8]) -> Result<usize> { Err(Error::new(EPERM)) } /// Seek to the given offset /// Returns `ESPIPE` if the operation is not supported. fn seek(&mut self, pos: ResourceSeek) -> Result<usize> { Err(Error::new(ESPIPE)) } /// Get informations about the resource, such as mode and size /// Returns `EPERM` if the operation is not supported. fn stat(&self, stat: &mut Stat) -> Result<()> { Err(Error::new(EPERM)) } /// Sync all buffers /// Returns `EPERM` if the operation is not supported. fn sync(&mut self) -> Result<()> { Err(Error::new(EPERM)) } /// Truncate to the given length /// Returns `EPERM` if the operation is not supported. fn truncate(&mut self, len: usize) -> Result<()> { Err(Error::new(EPERM)) } }
use nu_engine::env_to_string; use nu_protocol::ast::Call; use nu_protocol::engine::{Command, EngineState, Stack}; use nu_protocol::{ Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Value, }; #[derive(Clone)] pub struct Env; impl Command for Env { fn name(&self) -> &str { "env" } fn usage(&self) -> &str { "Display current environment variables" } fn signature(&self) -> nu_protocol::Signature { Signature::build("env").category(Category::Env) } fn run( &self, engine_state: &EngineState, stack: &mut Stack, call: &Call, _input: PipelineData, ) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> { let span = call.head; let mut env_vars: Vec<(String, Value)> = stack.get_env_vars(engine_state).into_iter().collect(); env_vars.sort_by(|(name1, _), (name2, _)| name1.cmp(name2)); let mut values = vec![]; for (name, val) in env_vars { let mut cols = vec![]; let mut vals = vec![]; let raw_val = match env_to_string(&name, &val, engine_state, stack) { Ok(raw) => Value::string(raw, span), Err(ShellError::EnvVarNotAString(..)) => Value::nothing(span), Err(e) => return Err(e), }; let val_type = val.get_type(); cols.push("name".into()); vals.push(Value::string(name, span)); cols.push("type".into()); vals.push(Value::string(format!("{}", val_type), span)); cols.push("value".into()); vals.push(val); cols.push("raw".into()); vals.push(raw_val); values.push(Value::Record { cols, vals, span }); } Ok(Value::List { vals: values, span }.into_pipeline_data()) } fn examples(&self) -> Vec<Example> { vec![ Example { description: "Display current path environment variable", example: "env | where name == PATH", result: None, }, Example { description: "Check whether the env variable `MY_ENV_ABC` exists", example: r#"env | any name == MY_ENV_ABC"#, result: Some(Value::test_bool(false)), }, Example { description: "Another way to check whether the env variable `PATH` exists", example: r#"'PATH' in (env).name"#, result: Some(Value::test_bool(true)), }, ] } }
#[doc = "Reader of register RCC_MC_APB4ENSETR"] pub type R = crate::R<u32, super::RCC_MC_APB4ENSETR>; #[doc = "Writer for register RCC_MC_APB4ENSETR"] pub type W = crate::W<u32, super::RCC_MC_APB4ENSETR>; #[doc = "Register RCC_MC_APB4ENSETR `reset()`'s with value 0"] impl crate::ResetValue for super::RCC_MC_APB4ENSETR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "LTDCEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LTDCEN_A { #[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"] B_0X0 = 0, #[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"] B_0X1 = 1, } impl From<LTDCEN_A> for bool { #[inline(always)] fn from(variant: LTDCEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `LTDCEN`"] pub type LTDCEN_R = crate::R<bool, LTDCEN_A>; impl LTDCEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> LTDCEN_A { match self.bits { false => LTDCEN_A::B_0X0, true => LTDCEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == LTDCEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == LTDCEN_A::B_0X1 } } #[doc = "Write proxy for field `LTDCEN`"] pub struct LTDCEN_W<'a> { w: &'a mut W, } impl<'a> LTDCEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: LTDCEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(LTDCEN_A::B_0X0) } #[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(LTDCEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "DSIEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DSIEN_A { #[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"] B_0X0 = 0, #[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"] B_0X1 = 1, } impl From<DSIEN_A> for bool { #[inline(always)] fn from(variant: DSIEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DSIEN`"] pub type DSIEN_R = crate::R<bool, DSIEN_A>; impl DSIEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DSIEN_A { match self.bits { false => DSIEN_A::B_0X0, true => DSIEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == DSIEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == DSIEN_A::B_0X1 } } #[doc = "Write proxy for field `DSIEN`"] pub struct DSIEN_W<'a> { w: &'a mut W, } impl<'a> DSIEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DSIEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(DSIEN_A::B_0X0) } #[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(DSIEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "DDRPERFMEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DDRPERFMEN_A { #[doc = "0: Writing has no effect, reading means\r\n that the APB clock is disabled"] B_0X0 = 0, #[doc = "1: Writing enables the APB clock,\r\n reading means that the APB clock is\r\n enabled"] B_0X1 = 1, } impl From<DDRPERFMEN_A> for bool { #[inline(always)] fn from(variant: DDRPERFMEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `DDRPERFMEN`"] pub type DDRPERFMEN_R = crate::R<bool, DDRPERFMEN_A>; impl DDRPERFMEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DDRPERFMEN_A { match self.bits { false => DDRPERFMEN_A::B_0X0, true => DDRPERFMEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == DDRPERFMEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == DDRPERFMEN_A::B_0X1 } } #[doc = "Write proxy for field `DDRPERFMEN`"] pub struct DDRPERFMEN_W<'a> { w: &'a mut W, } impl<'a> DDRPERFMEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DDRPERFMEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that the APB clock is disabled"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(DDRPERFMEN_A::B_0X0) } #[doc = "Writing enables the APB clock, reading means that the APB clock is enabled"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(DDRPERFMEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "USBPHYEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum USBPHYEN_A { #[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"] B_0X0 = 0, #[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"] B_0X1 = 1, } impl From<USBPHYEN_A> for bool { #[inline(always)] fn from(variant: USBPHYEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `USBPHYEN`"] pub type USBPHYEN_R = crate::R<bool, USBPHYEN_A>; impl USBPHYEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> USBPHYEN_A { match self.bits { false => USBPHYEN_A::B_0X0, true => USBPHYEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == USBPHYEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == USBPHYEN_A::B_0X1 } } #[doc = "Write proxy for field `USBPHYEN`"] pub struct USBPHYEN_W<'a> { w: &'a mut W, } impl<'a> USBPHYEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: USBPHYEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(USBPHYEN_A::B_0X0) } #[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(USBPHYEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "STGENROEN\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum STGENROEN_A { #[doc = "0: Writing has no effect, reading means\r\n that the peripheral clocks are\r\n disabled"] B_0X0 = 0, #[doc = "1: Writing enables the peripheral\r\n clocks, reading means that the peripheral clocks\r\n are enabled"] B_0X1 = 1, } impl From<STGENROEN_A> for bool { #[inline(always)] fn from(variant: STGENROEN_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `STGENROEN`"] pub type STGENROEN_R = crate::R<bool, STGENROEN_A>; impl STGENROEN_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> STGENROEN_A { match self.bits { false => STGENROEN_A::B_0X0, true => STGENROEN_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == STGENROEN_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == STGENROEN_A::B_0X1 } } #[doc = "Write proxy for field `STGENROEN`"] pub struct STGENROEN_W<'a> { w: &'a mut W, } impl<'a> STGENROEN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: STGENROEN_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Writing has no effect, reading means that the peripheral clocks are disabled"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(STGENROEN_A::B_0X0) } #[doc = "Writing enables the peripheral clocks, reading means that the peripheral clocks are enabled"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(STGENROEN_A::B_0X1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } impl R { #[doc = "Bit 0 - LTDCEN"] #[inline(always)] pub fn ltdcen(&self) -> LTDCEN_R { LTDCEN_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 4 - DSIEN"] #[inline(always)] pub fn dsien(&self) -> DSIEN_R { DSIEN_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 8 - DDRPERFMEN"] #[inline(always)] pub fn ddrperfmen(&self) -> DDRPERFMEN_R { DDRPERFMEN_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 16 - USBPHYEN"] #[inline(always)] pub fn usbphyen(&self) -> USBPHYEN_R { USBPHYEN_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 20 - STGENROEN"] #[inline(always)] pub fn stgenroen(&self) -> STGENROEN_R { STGENROEN_R::new(((self.bits >> 20) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - LTDCEN"] #[inline(always)] pub fn ltdcen(&mut self) -> LTDCEN_W { LTDCEN_W { w: self } } #[doc = "Bit 4 - DSIEN"] #[inline(always)] pub fn dsien(&mut self) -> DSIEN_W { DSIEN_W { w: self } } #[doc = "Bit 8 - DDRPERFMEN"] #[inline(always)] pub fn ddrperfmen(&mut self) -> DDRPERFMEN_W { DDRPERFMEN_W { w: self } } #[doc = "Bit 16 - USBPHYEN"] #[inline(always)] pub fn usbphyen(&mut self) -> USBPHYEN_W { USBPHYEN_W { w: self } } #[doc = "Bit 20 - STGENROEN"] #[inline(always)] pub fn stgenroen(&mut self) -> STGENROEN_W { STGENROEN_W { w: self } } }
use std::sync::Arc; use std::collections::LinkedList; use std::mem; use role::Kind; mod atomic_cell; use self::atomic_cell::AtomicCell; pub trait Notify { /// Resume execution context. /// /// It should be no-op to notify currently executing context. fn notify(self); } #[derive(Debug)] pub struct Queue<T: Notify> { remote: Arc<AtomicCell<List<T>>>, local: List<T>, pocket: Box<List<T>>, inbox: Arc<AtomicCell<List<T>>>, } #[derive(Debug)] struct List<T> { kind: Option<Kind>, list: LinkedList<Msg<T>>, } #[derive(Debug)] struct Msg<T> { value: Option<T>, inbox: Arc<AtomicCell<List<T>>>, } impl<T: Notify> List<T> { fn new(inbox: Arc<AtomicCell<Self>>) -> Self { let mut list = LinkedList::new(); list.push_front(Msg { value: None, inbox, }); List { kind: None, list, } } fn empty() -> Self { List { kind: None, list: LinkedList::new(), } } /// Assume this list has single node with `None` value, and init. fn init(&mut self, kind: Kind, value: T) { debug_assert!(self.kind.is_none()); debug_assert!(self.list.len() == 1); let msg = self.list.front_mut().unwrap(); debug_assert!(msg.value.is_none()); self.kind = Some(kind); msg.value = Some(value); } fn len(&self) -> usize { self.list.len() } fn is_empty(&self) -> bool { self.list.is_empty() } fn append(&mut self, other: &mut Self) { self.list.append(&mut other.list); } fn pop_list(&mut self) -> Option<Self> { if self.is_empty() { return None } let tail = self.list.split_off(1); Some(List { kind: None, list: mem::replace(&mut self.list, tail), }) } /// de-initialize all nodes and gives them back to their inbox fn notify_all(&mut self, pocket: &mut Box<Self>) { while let Some(mut list) = self.pop_list() { let inbox = { let msg = &mut list.list.front_mut().unwrap(); msg.value.take().unwrap().notify(); Arc::clone(&msg.inbox) }; mem::swap(&mut list, pocket); inbox.swap(pocket); debug_assert!(pocket.is_empty()); mem::swap(&mut list, pocket); } } } impl<T: Notify> Queue<T> { pub fn new() -> Self { let inbox = Arc::new(AtomicCell::new(List::empty().into())); Queue { remote: Arc::new(AtomicCell::new(List::empty().into())), local: List::new(inbox.clone()), pocket: List::empty().into(), inbox, } } pub fn wait_or_notify(&mut self, kind: Kind, value: T) { debug_assert!(self.pocket.is_empty()); self.local.init(kind, value); // fetch remote to pocket self.remote.swap(&mut self.pocket); loop { let mut to_notify = List::empty(); // merge `self.local` to `self.pocket` match (self.local.kind, self.pocket.kind) { // contains different kind (Some(Kind::Send), Some(Kind::Receive)) | (Some(Kind::Receive), Some(Kind::Send)) => { // now self.local cannot be longer than self.pocket if self.local.len() > self.pocket.len() { mem::swap(&mut self.local, &mut *self.pocket); } let remain = List { kind: self.pocket.kind, list: self.pocket.list.split_off(self.local.len()), }; let mut remote = mem::replace(&mut *self.pocket, remain); to_notify.append(&mut self.local); to_notify.append(&mut remote); } // both contain same kind or either one is empty _ => { if self.pocket.kind.is_none() { self.pocket.kind = self.local.kind; } self.pocket.append(&mut self.local); } } // push to remote self.remote.swap(&mut self.pocket); to_notify.notify_all(&mut self.pocket); // is remote changed since last fetch? if self.pocket.is_empty() { return } } } } impl<T: Notify> Default for Queue<T> { fn default() -> Self { Self::new() } } impl<T: Notify> Clone for Queue<T> { fn clone(&self) -> Self { let inbox = Arc::new(AtomicCell::new(List::empty().into())); Queue { remote: self.remote.clone(), local: List::new(inbox.clone()), pocket: List::empty().into(), inbox, } } }
use std::num::ParseIntError; pub fn int(string: &str) -> Result<i32, ParseIntError> { string.parse::<i32>() }
#[doc = "Power Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [power_ctl](power_ctl) module"] pub type POWER_CTL = crate::Reg<u32, _POWER_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _POWER_CTL; #[doc = "`read()` method returns [power_ctl::R](power_ctl::R) reader structure"] impl crate::Readable for POWER_CTL {} #[doc = "`write(|w| ..)` method takes [power_ctl::W](power_ctl::W) writer structure"] impl crate::Writable for POWER_CTL {} #[doc = "Power Control Register"] pub mod power_ctl; #[doc = "USB IO Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [usbio_ctl](usbio_ctl) module"] pub type USBIO_CTL = crate::Reg<u32, _USBIO_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _USBIO_CTL; #[doc = "`read()` method returns [usbio_ctl::R](usbio_ctl::R) reader structure"] impl crate::Readable for USBIO_CTL {} #[doc = "`write(|w| ..)` method takes [usbio_ctl::W](usbio_ctl::W) writer structure"] impl crate::Writable for USBIO_CTL {} #[doc = "USB IO Control Register"] pub mod usbio_ctl; #[doc = "Flow Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [flow_ctl](flow_ctl) module"] pub type FLOW_CTL = crate::Reg<u32, _FLOW_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _FLOW_CTL; #[doc = "`read()` method returns [flow_ctl::R](flow_ctl::R) reader structure"] impl crate::Readable for FLOW_CTL {} #[doc = "`write(|w| ..)` method takes [flow_ctl::W](flow_ctl::W) writer structure"] impl crate::Writable for FLOW_CTL {} #[doc = "Flow Control Register"] pub mod flow_ctl; #[doc = "LPM Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [lpm_ctl](lpm_ctl) module"] pub type LPM_CTL = crate::Reg<u32, _LPM_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LPM_CTL; #[doc = "`read()` method returns [lpm_ctl::R](lpm_ctl::R) reader structure"] impl crate::Readable for LPM_CTL {} #[doc = "`write(|w| ..)` method takes [lpm_ctl::W](lpm_ctl::W) writer structure"] impl crate::Writable for LPM_CTL {} #[doc = "LPM Control Register"] pub mod lpm_ctl; #[doc = "LPM Status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [lpm_stat](lpm_stat) module"] pub type LPM_STAT = crate::Reg<u32, _LPM_STAT>; #[allow(missing_docs)] #[doc(hidden)] pub struct _LPM_STAT; #[doc = "`read()` method returns [lpm_stat::R](lpm_stat::R) reader structure"] impl crate::Readable for LPM_STAT {} #[doc = "LPM Status register"] pub mod lpm_stat; #[doc = "USB SOF, BUS RESET and EP0 Interrupt Status\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_sie](intr_sie) module"] pub type INTR_SIE = crate::Reg<u32, _INTR_SIE>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_SIE; #[doc = "`read()` method returns [intr_sie::R](intr_sie::R) reader structure"] impl crate::Readable for INTR_SIE {} #[doc = "`write(|w| ..)` method takes [intr_sie::W](intr_sie::W) writer structure"] impl crate::Writable for INTR_SIE {} #[doc = "USB SOF, BUS RESET and EP0 Interrupt Status"] pub mod intr_sie; #[doc = "USB SOF, BUS RESET and EP0 Interrupt Set\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_sie_set](intr_sie_set) module"] pub type INTR_SIE_SET = crate::Reg<u32, _INTR_SIE_SET>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_SIE_SET; #[doc = "`read()` method returns [intr_sie_set::R](intr_sie_set::R) reader structure"] impl crate::Readable for INTR_SIE_SET {} #[doc = "`write(|w| ..)` method takes [intr_sie_set::W](intr_sie_set::W) writer structure"] impl crate::Writable for INTR_SIE_SET {} #[doc = "USB SOF, BUS RESET and EP0 Interrupt Set"] pub mod intr_sie_set; #[doc = "USB SOF, BUS RESET and EP0 Interrupt Mask\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_sie_mask](intr_sie_mask) module"] pub type INTR_SIE_MASK = crate::Reg<u32, _INTR_SIE_MASK>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_SIE_MASK; #[doc = "`read()` method returns [intr_sie_mask::R](intr_sie_mask::R) reader structure"] impl crate::Readable for INTR_SIE_MASK {} #[doc = "`write(|w| ..)` method takes [intr_sie_mask::W](intr_sie_mask::W) writer structure"] impl crate::Writable for INTR_SIE_MASK {} #[doc = "USB SOF, BUS RESET and EP0 Interrupt Mask"] pub mod intr_sie_mask; #[doc = "USB SOF, BUS RESET and EP0 Interrupt Masked\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_sie_masked](intr_sie_masked) module"] pub type INTR_SIE_MASKED = crate::Reg<u32, _INTR_SIE_MASKED>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_SIE_MASKED; #[doc = "`read()` method returns [intr_sie_masked::R](intr_sie_masked::R) reader structure"] impl crate::Readable for INTR_SIE_MASKED {} #[doc = "USB SOF, BUS RESET and EP0 Interrupt Masked"] pub mod intr_sie_masked; #[doc = "Select interrupt level for each interrupt source\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_lvl_sel](intr_lvl_sel) module"] pub type INTR_LVL_SEL = crate::Reg<u32, _INTR_LVL_SEL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_LVL_SEL; #[doc = "`read()` method returns [intr_lvl_sel::R](intr_lvl_sel::R) reader structure"] impl crate::Readable for INTR_LVL_SEL {} #[doc = "`write(|w| ..)` method takes [intr_lvl_sel::W](intr_lvl_sel::W) writer structure"] impl crate::Writable for INTR_LVL_SEL {} #[doc = "Select interrupt level for each interrupt source"] pub mod intr_lvl_sel; #[doc = "High priority interrupt Cause register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_cause_hi](intr_cause_hi) module"] pub type INTR_CAUSE_HI = crate::Reg<u32, _INTR_CAUSE_HI>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_CAUSE_HI; #[doc = "`read()` method returns [intr_cause_hi::R](intr_cause_hi::R) reader structure"] impl crate::Readable for INTR_CAUSE_HI {} #[doc = "High priority interrupt Cause register"] pub mod intr_cause_hi; #[doc = "Medium priority interrupt Cause register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_cause_med](intr_cause_med) module"] pub type INTR_CAUSE_MED = crate::Reg<u32, _INTR_CAUSE_MED>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_CAUSE_MED; #[doc = "`read()` method returns [intr_cause_med::R](intr_cause_med::R) reader structure"] impl crate::Readable for INTR_CAUSE_MED {} #[doc = "Medium priority interrupt Cause register"] pub mod intr_cause_med; #[doc = "Low priority interrupt Cause register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intr_cause_lo](intr_cause_lo) module"] pub type INTR_CAUSE_LO = crate::Reg<u32, _INTR_CAUSE_LO>; #[allow(missing_docs)] #[doc(hidden)] pub struct _INTR_CAUSE_LO; #[doc = "`read()` method returns [intr_cause_lo::R](intr_cause_lo::R) reader structure"] impl crate::Readable for INTR_CAUSE_LO {} #[doc = "Low priority interrupt Cause register"] pub mod intr_cause_lo; #[doc = "DFT control\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [dft_ctl](dft_ctl) module"] pub type DFT_CTL = crate::Reg<u32, _DFT_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DFT_CTL; #[doc = "`read()` method returns [dft_ctl::R](dft_ctl::R) reader structure"] impl crate::Readable for DFT_CTL {} #[doc = "`write(|w| ..)` method takes [dft_ctl::W](dft_ctl::W) writer structure"] impl crate::Writable for DFT_CTL {} #[doc = "DFT control"] pub mod dft_ctl;
use octocrab::models::issues::*; use reqwest::Url; use serde::*; use crate::*; type DateTime = chrono::DateTime<chrono::Utc>; #[derive(Serialize, Debug)] pub struct CommentRec { pub id: u64, pub node_id: String, pub url: Url, pub html_url: Url, pub body: Option<String>, pub body_text: Option<String>, pub body_html: Option<String>, pub user_id: i64, pub created_at: DateTime, pub updated_at: Option<DateTime>, pub sdc_repository: String, } impl RepositryAware for CommentRec { fn set_repository(&mut self, name: String) { self.sdc_repository = name; } } impl From<Comment> for CommentRec { fn from(from: Comment) -> CommentRec { CommentRec { id: from.id, node_id: from.node_id, url: from.url, html_url: from.html_url, body: from.body, body_text: from.body_text, body_html: from.body_html, user_id: from.user.id, created_at: from.created_at, updated_at: from.updated_at, sdc_repository: String::default(), } } } pub struct CommentFetcher { owner: String, name: String, since: Option<DateTime>, octocrab: octocrab::Octocrab, } impl CommentFetcher { pub fn new( owner: String, name: String, since: Option<DateTime>, octocrab: octocrab::Octocrab, ) -> Self { Self { owner, name, since, octocrab, } } } impl UrlConstructor for CommentFetcher { fn reponame(&self) -> String { format!("{}/{}", self.owner, self.name) } fn entrypoint(&self) -> Option<Url> { let param = Params { since: self.since, ..Default::default() }; let route = format!( "repos/{owner}/{repo}/issues/comments?{query}", owner = &self.owner, repo = &self.name, query = param.to_query(), ); self.octocrab.absolute_url(route).ok() } } impl LoopWriter for CommentFetcher { type Model = Comment; type Record = CommentRec; } impl CommentFetcher { pub async fn fetch<T: std::io::Write>(&self, mut wtr: csv::Writer<T>) -> octocrab::Result<()> { let mut next: Option<Url> = self.entrypoint(); while let Some(page) = self.octocrab.get_page(&next).await? { next = self.write_and_continue(page, &mut wtr); } Ok(()) } }
#[doc = "Register `MPCBB2_VCTR7` reader"] pub type R = crate::R<MPCBB2_VCTR7_SPEC>; #[doc = "Register `MPCBB2_VCTR7` writer"] pub type W = crate::W<MPCBB2_VCTR7_SPEC>; #[doc = "Field `B224` reader - B224"] pub type B224_R = crate::BitReader; #[doc = "Field `B224` writer - B224"] pub type B224_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B225` reader - B225"] pub type B225_R = crate::BitReader; #[doc = "Field `B225` writer - B225"] pub type B225_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B226` reader - B226"] pub type B226_R = crate::BitReader; #[doc = "Field `B226` writer - B226"] pub type B226_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B227` reader - B227"] pub type B227_R = crate::BitReader; #[doc = "Field `B227` writer - B227"] pub type B227_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B228` reader - B228"] pub type B228_R = crate::BitReader; #[doc = "Field `B228` writer - B228"] pub type B228_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B229` reader - B229"] pub type B229_R = crate::BitReader; #[doc = "Field `B229` writer - B229"] pub type B229_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B230` reader - B230"] pub type B230_R = crate::BitReader; #[doc = "Field `B230` writer - B230"] pub type B230_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B231` reader - B231"] pub type B231_R = crate::BitReader; #[doc = "Field `B231` writer - B231"] pub type B231_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B232` reader - B232"] pub type B232_R = crate::BitReader; #[doc = "Field `B232` writer - B232"] pub type B232_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B233` reader - B233"] pub type B233_R = crate::BitReader; #[doc = "Field `B233` writer - B233"] pub type B233_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B234` reader - B234"] pub type B234_R = crate::BitReader; #[doc = "Field `B234` writer - B234"] pub type B234_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B235` reader - B235"] pub type B235_R = crate::BitReader; #[doc = "Field `B235` writer - B235"] pub type B235_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B236` reader - B236"] pub type B236_R = crate::BitReader; #[doc = "Field `B236` writer - B236"] pub type B236_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B237` reader - B237"] pub type B237_R = crate::BitReader; #[doc = "Field `B237` writer - B237"] pub type B237_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B238` reader - B238"] pub type B238_R = crate::BitReader; #[doc = "Field `B238` writer - B238"] pub type B238_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B239` reader - B239"] pub type B239_R = crate::BitReader; #[doc = "Field `B239` writer - B239"] pub type B239_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B240` reader - B240"] pub type B240_R = crate::BitReader; #[doc = "Field `B240` writer - B240"] pub type B240_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B241` reader - B241"] pub type B241_R = crate::BitReader; #[doc = "Field `B241` writer - B241"] pub type B241_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B242` reader - B242"] pub type B242_R = crate::BitReader; #[doc = "Field `B242` writer - B242"] pub type B242_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B243` reader - B243"] pub type B243_R = crate::BitReader; #[doc = "Field `B243` writer - B243"] pub type B243_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B244` reader - B244"] pub type B244_R = crate::BitReader; #[doc = "Field `B244` writer - B244"] pub type B244_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B245` reader - B245"] pub type B245_R = crate::BitReader; #[doc = "Field `B245` writer - B245"] pub type B245_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B246` reader - B246"] pub type B246_R = crate::BitReader; #[doc = "Field `B246` writer - B246"] pub type B246_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B247` reader - B247"] pub type B247_R = crate::BitReader; #[doc = "Field `B247` writer - B247"] pub type B247_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B248` reader - B248"] pub type B248_R = crate::BitReader; #[doc = "Field `B248` writer - B248"] pub type B248_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B249` reader - B249"] pub type B249_R = crate::BitReader; #[doc = "Field `B249` writer - B249"] pub type B249_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B250` reader - B250"] pub type B250_R = crate::BitReader; #[doc = "Field `B250` writer - B250"] pub type B250_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B251` reader - B251"] pub type B251_R = crate::BitReader; #[doc = "Field `B251` writer - B251"] pub type B251_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B252` reader - B252"] pub type B252_R = crate::BitReader; #[doc = "Field `B252` writer - B252"] pub type B252_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B253` reader - B253"] pub type B253_R = crate::BitReader; #[doc = "Field `B253` writer - B253"] pub type B253_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B254` reader - B254"] pub type B254_R = crate::BitReader; #[doc = "Field `B254` writer - B254"] pub type B254_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B255` reader - B255"] pub type B255_R = crate::BitReader; #[doc = "Field `B255` writer - B255"] pub type B255_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - B224"] #[inline(always)] pub fn b224(&self) -> B224_R { B224_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - B225"] #[inline(always)] pub fn b225(&self) -> B225_R { B225_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - B226"] #[inline(always)] pub fn b226(&self) -> B226_R { B226_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - B227"] #[inline(always)] pub fn b227(&self) -> B227_R { B227_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - B228"] #[inline(always)] pub fn b228(&self) -> B228_R { B228_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - B229"] #[inline(always)] pub fn b229(&self) -> B229_R { B229_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - B230"] #[inline(always)] pub fn b230(&self) -> B230_R { B230_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - B231"] #[inline(always)] pub fn b231(&self) -> B231_R { B231_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - B232"] #[inline(always)] pub fn b232(&self) -> B232_R { B232_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - B233"] #[inline(always)] pub fn b233(&self) -> B233_R { B233_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - B234"] #[inline(always)] pub fn b234(&self) -> B234_R { B234_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - B235"] #[inline(always)] pub fn b235(&self) -> B235_R { B235_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - B236"] #[inline(always)] pub fn b236(&self) -> B236_R { B236_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - B237"] #[inline(always)] pub fn b237(&self) -> B237_R { B237_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - B238"] #[inline(always)] pub fn b238(&self) -> B238_R { B238_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - B239"] #[inline(always)] pub fn b239(&self) -> B239_R { B239_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - B240"] #[inline(always)] pub fn b240(&self) -> B240_R { B240_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - B241"] #[inline(always)] pub fn b241(&self) -> B241_R { B241_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - B242"] #[inline(always)] pub fn b242(&self) -> B242_R { B242_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - B243"] #[inline(always)] pub fn b243(&self) -> B243_R { B243_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - B244"] #[inline(always)] pub fn b244(&self) -> B244_R { B244_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - B245"] #[inline(always)] pub fn b245(&self) -> B245_R { B245_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - B246"] #[inline(always)] pub fn b246(&self) -> B246_R { B246_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - B247"] #[inline(always)] pub fn b247(&self) -> B247_R { B247_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 24 - B248"] #[inline(always)] pub fn b248(&self) -> B248_R { B248_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - B249"] #[inline(always)] pub fn b249(&self) -> B249_R { B249_R::new(((self.bits >> 25) & 1) != 0) } #[doc = "Bit 26 - B250"] #[inline(always)] pub fn b250(&self) -> B250_R { B250_R::new(((self.bits >> 26) & 1) != 0) } #[doc = "Bit 27 - B251"] #[inline(always)] pub fn b251(&self) -> B251_R { B251_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 28 - B252"] #[inline(always)] pub fn b252(&self) -> B252_R { B252_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - B253"] #[inline(always)] pub fn b253(&self) -> B253_R { B253_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - B254"] #[inline(always)] pub fn b254(&self) -> B254_R { B254_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - B255"] #[inline(always)] pub fn b255(&self) -> B255_R { B255_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - B224"] #[inline(always)] #[must_use] pub fn b224(&mut self) -> B224_W<MPCBB2_VCTR7_SPEC, 0> { B224_W::new(self) } #[doc = "Bit 1 - B225"] #[inline(always)] #[must_use] pub fn b225(&mut self) -> B225_W<MPCBB2_VCTR7_SPEC, 1> { B225_W::new(self) } #[doc = "Bit 2 - B226"] #[inline(always)] #[must_use] pub fn b226(&mut self) -> B226_W<MPCBB2_VCTR7_SPEC, 2> { B226_W::new(self) } #[doc = "Bit 3 - B227"] #[inline(always)] #[must_use] pub fn b227(&mut self) -> B227_W<MPCBB2_VCTR7_SPEC, 3> { B227_W::new(self) } #[doc = "Bit 4 - B228"] #[inline(always)] #[must_use] pub fn b228(&mut self) -> B228_W<MPCBB2_VCTR7_SPEC, 4> { B228_W::new(self) } #[doc = "Bit 5 - B229"] #[inline(always)] #[must_use] pub fn b229(&mut self) -> B229_W<MPCBB2_VCTR7_SPEC, 5> { B229_W::new(self) } #[doc = "Bit 6 - B230"] #[inline(always)] #[must_use] pub fn b230(&mut self) -> B230_W<MPCBB2_VCTR7_SPEC, 6> { B230_W::new(self) } #[doc = "Bit 7 - B231"] #[inline(always)] #[must_use] pub fn b231(&mut self) -> B231_W<MPCBB2_VCTR7_SPEC, 7> { B231_W::new(self) } #[doc = "Bit 8 - B232"] #[inline(always)] #[must_use] pub fn b232(&mut self) -> B232_W<MPCBB2_VCTR7_SPEC, 8> { B232_W::new(self) } #[doc = "Bit 9 - B233"] #[inline(always)] #[must_use] pub fn b233(&mut self) -> B233_W<MPCBB2_VCTR7_SPEC, 9> { B233_W::new(self) } #[doc = "Bit 10 - B234"] #[inline(always)] #[must_use] pub fn b234(&mut self) -> B234_W<MPCBB2_VCTR7_SPEC, 10> { B234_W::new(self) } #[doc = "Bit 11 - B235"] #[inline(always)] #[must_use] pub fn b235(&mut self) -> B235_W<MPCBB2_VCTR7_SPEC, 11> { B235_W::new(self) } #[doc = "Bit 12 - B236"] #[inline(always)] #[must_use] pub fn b236(&mut self) -> B236_W<MPCBB2_VCTR7_SPEC, 12> { B236_W::new(self) } #[doc = "Bit 13 - B237"] #[inline(always)] #[must_use] pub fn b237(&mut self) -> B237_W<MPCBB2_VCTR7_SPEC, 13> { B237_W::new(self) } #[doc = "Bit 14 - B238"] #[inline(always)] #[must_use] pub fn b238(&mut self) -> B238_W<MPCBB2_VCTR7_SPEC, 14> { B238_W::new(self) } #[doc = "Bit 15 - B239"] #[inline(always)] #[must_use] pub fn b239(&mut self) -> B239_W<MPCBB2_VCTR7_SPEC, 15> { B239_W::new(self) } #[doc = "Bit 16 - B240"] #[inline(always)] #[must_use] pub fn b240(&mut self) -> B240_W<MPCBB2_VCTR7_SPEC, 16> { B240_W::new(self) } #[doc = "Bit 17 - B241"] #[inline(always)] #[must_use] pub fn b241(&mut self) -> B241_W<MPCBB2_VCTR7_SPEC, 17> { B241_W::new(self) } #[doc = "Bit 18 - B242"] #[inline(always)] #[must_use] pub fn b242(&mut self) -> B242_W<MPCBB2_VCTR7_SPEC, 18> { B242_W::new(self) } #[doc = "Bit 19 - B243"] #[inline(always)] #[must_use] pub fn b243(&mut self) -> B243_W<MPCBB2_VCTR7_SPEC, 19> { B243_W::new(self) } #[doc = "Bit 20 - B244"] #[inline(always)] #[must_use] pub fn b244(&mut self) -> B244_W<MPCBB2_VCTR7_SPEC, 20> { B244_W::new(self) } #[doc = "Bit 21 - B245"] #[inline(always)] #[must_use] pub fn b245(&mut self) -> B245_W<MPCBB2_VCTR7_SPEC, 21> { B245_W::new(self) } #[doc = "Bit 22 - B246"] #[inline(always)] #[must_use] pub fn b246(&mut self) -> B246_W<MPCBB2_VCTR7_SPEC, 22> { B246_W::new(self) } #[doc = "Bit 23 - B247"] #[inline(always)] #[must_use] pub fn b247(&mut self) -> B247_W<MPCBB2_VCTR7_SPEC, 23> { B247_W::new(self) } #[doc = "Bit 24 - B248"] #[inline(always)] #[must_use] pub fn b248(&mut self) -> B248_W<MPCBB2_VCTR7_SPEC, 24> { B248_W::new(self) } #[doc = "Bit 25 - B249"] #[inline(always)] #[must_use] pub fn b249(&mut self) -> B249_W<MPCBB2_VCTR7_SPEC, 25> { B249_W::new(self) } #[doc = "Bit 26 - B250"] #[inline(always)] #[must_use] pub fn b250(&mut self) -> B250_W<MPCBB2_VCTR7_SPEC, 26> { B250_W::new(self) } #[doc = "Bit 27 - B251"] #[inline(always)] #[must_use] pub fn b251(&mut self) -> B251_W<MPCBB2_VCTR7_SPEC, 27> { B251_W::new(self) } #[doc = "Bit 28 - B252"] #[inline(always)] #[must_use] pub fn b252(&mut self) -> B252_W<MPCBB2_VCTR7_SPEC, 28> { B252_W::new(self) } #[doc = "Bit 29 - B253"] #[inline(always)] #[must_use] pub fn b253(&mut self) -> B253_W<MPCBB2_VCTR7_SPEC, 29> { B253_W::new(self) } #[doc = "Bit 30 - B254"] #[inline(always)] #[must_use] pub fn b254(&mut self) -> B254_W<MPCBB2_VCTR7_SPEC, 30> { B254_W::new(self) } #[doc = "Bit 31 - B255"] #[inline(always)] #[must_use] pub fn b255(&mut self) -> B255_W<MPCBB2_VCTR7_SPEC, 31> { B255_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mpcbb2_vctr7::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mpcbb2_vctr7::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct MPCBB2_VCTR7_SPEC; impl crate::RegisterSpec for MPCBB2_VCTR7_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`mpcbb2_vctr7::R`](R) reader structure"] impl crate::Readable for MPCBB2_VCTR7_SPEC {} #[doc = "`write(|w| ..)` method takes [`mpcbb2_vctr7::W`](W) writer structure"] impl crate::Writable for MPCBB2_VCTR7_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets MPCBB2_VCTR7 to value 0"] impl crate::Resettable for MPCBB2_VCTR7_SPEC { const RESET_VALUE: Self::Ux = 0; }
//! Example on spawning tasks with different priority. #![no_main] #![no_std] use rtic::app; use stm32_rtic_defmt as _; // global logger + panicking-behavior + memory layout #[app(device = stm32f1xx_hal::pac)] const APP: () = { struct Resources { // Resources go here! } #[init(spawn = [low_prio_task, high_prio_task])] fn init(cx: init::Context) { // Spawn the low priority task first and the the high priority task. cx.spawn.low_prio_task().ok(); cx.spawn.high_prio_task().ok(); // Even though it is spawned later it will run first! defmt::info!("Hello from init!"); } #[idle] fn idle(_cx: idle::Context) -> ! { defmt::info!("Hello from idle!"); loop { continue; } } #[task] fn low_prio_task(_cx: low_prio_task::Context) { defmt::info!("Low prio task!"); } #[task(priority = 2)] fn high_prio_task(_cx: high_prio_task::Context) { defmt::info!("High prio task!"); } // Here we list unused interrupt vectors that can be used to dispatch software tasks // // One needs one free interrupt per priority level used in software tasks. extern "C" { fn TIM2(); fn TIM3(); } };
use maplit::hashmap; use crate::dom::{element::Element, listener::ListenerRef}; #[test] fn test_name_change() { let old = Element { id: vec![0], name: "div".into(), ..Default::default() }; let new = Element { id: vec![0], name: "p".into(), ..Default::default() }; let cs = old.diff(Some(&new)); insta::assert_debug_snapshot!("test_name_change", cs); } #[test] fn test_single_element_attribute_change() { let old = Element { id: vec![0], name: "div".into(), attributes: hashmap! { "class".into() => "one".into(), "attribute-which-doesn-t-exist-after-diffing".into() => "1".into() }, ..Default::default() }; let new = Element { id: vec![0], name: "div".into(), attributes: hashmap! { "class".into() => "two".into(), "new-attribute-added-after-diffing".into() => "value".into() }, ..Default::default() }; let cs = old.diff(Some(&new)); assert_eq!(cs.ops.len(), 3); assert!(cs.ops.iter().any(|op| { match &op.instruction { crate::dom::element::diff::changeset::Instruction::SetAttribute { key, value } => { key.to_string() == "new-attribute-added-after-diffing".to_string() && value.to_string() == "value".to_string() } _ => false, } })); assert!(cs.ops.iter().any(|op| { match &op.instruction { crate::dom::element::diff::changeset::Instruction::SetAttribute { key, value } => { key.to_string() == "class".to_string() && value.to_string() == "two".to_string() } _ => false, } })); } #[test] fn test_single_element_text_change() { let old = Element { id: vec![0], name: "p".into(), text: Some("the cat sat on the mat".into()), ..Default::default() }; let new = Element { id: vec![0], name: "p".into(), text: Some("the mat sat on the cat".into()), ..Default::default() }; let c = old.diff(Some(&new)); insta::assert_debug_snapshot!("test_single_element_text_change", c); } #[test] fn test_add_child_change() { let old = Element { id: vec![0], name: "div".into(), children: vec![Element { id: vec![0, 0], key: Some("a".to_string()), name: "p".into(), text: Some("the cat sat on the mat".into()), ..Default::default() }], ..Default::default() }; let new = Element { id: vec![0], name: "div".into(), children: vec![ Element { id: vec![0, 0], name: "p".into(), key: Some("a".to_string()), text: Some("the cat sat on the mat".into()), ..Default::default() }, Element { id: vec![0, 1], name: "p".into(), key: Some("b".to_string()), text: Some("the mat sat on the cat".into()), ..Default::default() }, ], ..Default::default() }; let c = old.diff(Some(&new)); insta::assert_debug_snapshot!("test_add_child_change", c); } #[test] fn test_add_child_before_change() { let old = Element { id: vec![0], name: "div".into(), children: vec![Element { id: vec![0, 0], key: Some("a".to_string()), name: "p".into(), text: Some("the cat sat on the mat".into()), ..Default::default() }], ..Default::default() }; let new = Element { id: vec![0], name: "div".into(), children: vec![ Element { id: vec![0, 0], name: "p".into(), key: Some("b".to_string()), text: Some("the mat sat on the cat".into()), ..Default::default() }, Element { id: vec![0, 1], name: "p".into(), key: Some("a".to_string()), text: Some("the cat sat on the mat".into()), ..Default::default() }, ], ..Default::default() }; let c = old.diff(Some(&new)); insta::assert_debug_snapshot!("test_add_child_before_change", c); } #[test] fn test_more_complex_diff() { let old = Element { id: vec![0], name: std::borrow::Cow::Borrowed("div"), attributes: hashmap! { "class".into() => "message-list".into(), }, listeners: vec![], children: vec![ Element { id: vec![0, 0], name: std::borrow::Cow::Borrowed("div"), attributes: hashmap! {}, listeners: vec![], children: vec![Element { id: vec![0, 0, 0], name: std::borrow::Cow::Borrowed("input"), attributes: hashmap! {}, listeners: vec![ListenerRef::new("msg-input", "input")], children: vec![], text: None, key: None, }], text: None, key: None, }, Element { id: vec![0, 1], name: std::borrow::Cow::Borrowed("div"), attributes: hashmap! {}, listeners: vec![], children: vec![Element { id: vec![0, 1, 0], name: std::borrow::Cow::Borrowed("button"), attributes: hashmap! {}, listeners: vec![ListenerRef::new("msg-submit", "click")], children: vec![], text: Some(std::borrow::Cow::Borrowed("Send message")), key: None, }], text: None, key: None, }, ], text: None, key: None, }; let new = Element { id: vec![0], name: std::borrow::Cow::Borrowed("div"), attributes: hashmap! { "class".into() => "message-list".into(), }, listeners: vec![], children: vec![ Element { id: vec![0, 0], name: std::borrow::Cow::Borrowed("div"), attributes: hashmap! {}, listeners: vec![], children: vec![Element { id: vec![0, 0, 0], name: std::borrow::Cow::Borrowed("input"), attributes: hashmap! {}, listeners: vec![ListenerRef::new( "msg-input".to_string(), "input".to_string(), )], children: vec![], text: None, key: None, }], text: None, key: None, }, Element { id: vec![0, 1], name: std::borrow::Cow::Borrowed("div"), attributes: hashmap! {}, listeners: vec![], children: vec![Element { id: vec![0, 1, 0], name: std::borrow::Cow::Borrowed("button"), attributes: hashmap! {}, listeners: vec![ListenerRef::new( "msg-submit".to_string(), "click".to_string(), )], children: vec![], text: Some(std::borrow::Cow::Borrowed("Send message")), key: None, }], text: None, key: None, }, Element { id: vec![0, 2], name: std::borrow::Cow::Borrowed("div"), attributes: hashmap! { "class".into() => "message-container".into(), }, listeners: vec![], children: vec![ Element { id: vec![0, 2, 0], name: std::borrow::Cow::Borrowed("p"), attributes: hashmap! { "class".into() => "message-sent-at".into(), }, listeners: vec![], children: vec![], text: Some(std::borrow::Cow::Borrowed("1970-01-25 06:34:13")), key: None, }, Element { id: vec![0, 2, 1], name: std::borrow::Cow::Borrowed("p"), attributes: hashmap! { "class".into() => "message-author".into(), }, listeners: vec![], children: vec![], text: Some(std::borrow::Cow::Borrowed("[username not set]")), key: None, }, Element { id: vec![0, 2, 2], name: std::borrow::Cow::Borrowed("p"), attributes: hashmap! { "class".into() => "message-contents".into(), }, listeners: vec![], children: vec![], text: Some(std::borrow::Cow::Borrowed("sending message")), key: None, }, ], text: None, key: None, }, ], text: None, key: None, }; let c = old.diff(Some(&new)); insta::assert_debug_snapshot!(c); } #[test] fn test_delete_child_change() {}
use crate::component::{CompositeSprite, CompositeSpriteAnimation, CompositeSurfaceCache}; use core::{ app::AppLifeCycle, ecs::{Comp, Universe, WorldRef}, Scalar, }; pub type CompositeSpriteAnimationSystemResources<'a> = ( WorldRef, &'a AppLifeCycle, Comp<&'a mut CompositeSprite>, Comp<&'a mut CompositeSpriteAnimation>, Comp<&'a mut CompositeSurfaceCache>, ); pub fn composite_sprite_animation_system(universe: &mut Universe) { let (world, lifecycle, ..) = universe.query_resources::<CompositeSpriteAnimationSystemResources>(); let dt = lifecycle.delta_time_seconds() as Scalar; for (_, (sprite, animation, cache)) in world .query::<( &mut CompositeSprite, &mut CompositeSpriteAnimation, Option<&mut CompositeSurfaceCache>, )>() .iter() { if animation.dirty { animation.dirty = false; if let Some((name, phase, _, _)) = &animation.current { if let Some(anim) = animation.animations.get(name) { if let Some(frame) = anim.frames.get(*phase as usize) { sprite.set_sheet_frame(Some((anim.sheet.clone(), frame.clone()))); if let Some(cache) = cache { cache.rebuild(); } } } } } animation.process(dt); } }
use amethyst::{ prelude::*, assets::{Handle, Prefab, PrefabData, PrefabLoader, RonFormat, ProgressCounter}, controls::ControlTagPrefab, derive::PrefabData, core::transform::Transform, ecs::{Entity, Write}, renderer::{camera::CameraPrefab, light::LightPrefab}, utils::auto_fov::AutoFov, Error, }; use serde::{Deserialize, Serialize}; #[derive(Default)] struct Scene { handle: Option<Handle<Prefab<ScenePrefabData>>>, } #[derive(Default, Deserialize, Serialize, PrefabData)] #[serde(default)] pub struct ScenePrefabData { transform: Option<Transform>, camera: Option<CameraPrefab>, auto_fov: Option<AutoFov>, light: Option<LightPrefab>, fly_tag: Option<ControlTagPrefab>, } pub fn init(world : &mut World, progress : &mut ProgressCounter) { world.exec( |(loader, mut scene): (PrefabLoader<'_, ScenePrefabData>, Write<'_, Scene>)| { scene.handle = Some(loader.load( "prefabs/scene.ron", RonFormat, progress )); }, ); } pub fn create(world : &mut World) { let scene_handle = world.read_resource::<Scene>() .handle.as_ref().unwrap().clone(); world.create_entity() .with(scene_handle) .build(); }
//! The server's API //! //! ## Overview //! //! Spidey exposes its API as a RESTful HTTP service, running over a local //! network socket and returning results as JSON objects. This type of API was //! picked for two reasons: //! //! 1. I already knew how to do it (which aids coding); //! 2. There already exist tools to send requests and view responses (which //! aids debugging). //! //! Neither of these might be particularly *good* reasons, but the alternative //! would be some brittle ad-hoc text format that would be even harder to write //! and debug. //! //! Anyway, the responses are formatted (mostly) to the [JSON //! API](http://jsonapi.org) conventions. In practice, this means the //! following: //! //! - Successful responses are contained in a top-level object with one `data` //! key; //! - Failure responses are contained in a top-level object with one `errors` //! key, with each error as its own object containing information about the //! error. //! //! This module contains traits to automatically serialise results into one of //! these two formats. //! //! //! ## Successes //! //! Any command that gets run needs to implement the `api::Endpoint` trait, //! producing an `api::Result` of a data type that's serialisable to JSON. This //! will get automatically wrapped up into a `data` top-level object if it's //! successful. But aside from these restrictions, an endpoint is free to do //! anything it chooses. //! //! For example, here's an implementation of a sample endpoint that returns an //! array of the program's command-line arguments: //! //! ```rust //! use rustful::Context; //! use std::env; //! use serve::api; //! //! pub struct Command; //! //! impl api::Endpoint for Command { //! type Data = Vec<String>; //! //! fn get_results(&self, context: Context) -> api::Result<Self::Data> { //! let args = env::args().collect(); //! api::Result::Data(args) //! } //! } //! ``` //! //! The `Command` struct has no options or environment, so here it's an empty //! struct. The `Data` associated type on the `Endpoint` implementation is //! `Vec<String>`, which will be serialised into a JSON array of strings. //! Finally, although there's no way for getting the command-line arguments to //! *fail*, it has to be wrapped in an `api::Result` type, here using the //! `api::Result::Data` variant to mean a successful outcome. //! //! Executing the command for this endpoint will produce a response similar to //! the following: //! //! ```json //! { //! "data": [ //! "target/debug/app", //! "serve", //! "styles.css", //! ] //! } //! ``` //! //! ## Errors //! //! Any error that gets returned needs to be of the `api::Error` type. Keeping //! with [JSON API's opinion on errors](http://jsonapi.org/format/#errors), our //! error objects can have the following fields defined for them: //! //! - `status`, the HTTP status as a string; //! - `detail`, a human-readable explanation of the error; //! - `source/parameter`, the name of the parameter that was erroneous //! (if applicable). //! //! While the latter two fields are optional, the first field, the HTTP status, //! is automatically generated from the status code produced by the error //! object. This is possible because each variant of error says which HTTP //! status code it should respond with: a wrong ID being asked for could return //! **404 Not Found**, or a number failing to be parsed could return **422 //! Unprocessable Entity**. //! //! To save each and every endpoint from having to do its own error-checking //! logic, there exists the `ToAPIResult` trait to convert an `Option` or //! `Result` value into its equivalent `api::Result` value. For example, a //! value of type `Result<T, ParseIntError>` can assume that it came about //! because a string failed to be parsed into an integer, and can be turned //! into the `api::Result` variant for a failed parse, automatically filling in //! the name of the parameter in the error message. //! //! Similar to the `Result` type in libstd and libcore, there is an `api_try!` //! macro that will automatically return early when encountering an error. This //! allows errors to be propagated, while keeping the code nice and simple. //! //! Here's a more complete, but still pointless, example of an endpoint that //! adds two numbers together: //! //! ```rust //! use rustful::Context; //! use std::env; //! use serve::api; //! use serse::api::ToAPIResult; //! //! pub struct Command; //! //! impl api::Endpoint for Command { //! type Data = u32; //! //! fn get_results(&self, context: Context) -> api::Result<u32> { //! let a_str = context.variables.get("a").unwrap(); //! let a = api_try!(a_str.parse().api_result("a")); //! //! let b_str = context.variables.get("b").unwrap(); //! let b = api_try!(b_str.parse().api_result("b")); //! //! api::Result::Data(a + b) //! } //! } //! ``` //! //! The argument to `api_result` is the name of the parameter that should be //! specified in the error object. Note that there is **no** error case for //! failing to get a variable from the Rustful context - if either of those //! calls fail, it's a bug in the router, and should be fixed! use std::borrow::Cow; use std::num; use std::result; use std::string::ToString; use rustful::{Context, StatusCode}; use serde; use files::LoadedFile; /// An **Endpoint** gets run when the user sends an HTTP request to the server /// and the router accepts it as a valid request. pub trait Endpoint: Send + Sync { /// The value that gets written back as an HTTP response. This needs to be /// `Serialize` so it can be turned into JSON. type Data: serde::Serialize; /// Run this endpoint-specific code, returning a `Results` object /// containing data to be used in the response. fn get_results(&self, context: Context) -> Result<Self::Data>; } /// The **Result** type provides API-specific error handling. /// /// This is very similar to the `Result` type in libcore and libstd: there's /// one variant for success, and another for failure. Unlike the standard /// type, the failure case is locked to the `Error` type. pub enum Result<D> { Data(D), Errors(Error), } /// Helper macro for unwrapping `Result` values, when the variant of the /// result is of the `Result::Errors` type. /// /// This is basically the API-module equivalent of the `try!` macro in libcore /// and libstd. macro_rules! api_try { ($e:expr) => ({ use $crate::serve::api::Result::{Data, Errors}; match $e { Data(e) => e, Errors(e) => return Errors(e), } }) } impl<D> serde::Serialize for Result<D> where D: serde::Serialize { fn serialize<S>(&self, serializer: &mut S) -> result::Result<(), S::Error> where S: serde::Serializer { // Both the `data` and `errors` variants need to be contained in an // object in the JSON with that as its name. The `SingleMapVisitor` // is just a Serde way of doing this: it'll produce one map // element, which *this* serialiser can just `visit_map` into. struct SingleMapVisitor<'a, D: 'a> { value: &'a D, name: &'static str, sent: bool, } impl<'a, D> serde::ser::MapVisitor for SingleMapVisitor<'a, D> where D: serde::Serialize { fn visit<S>(&mut self, serializer: &mut S) -> result::Result<Option<()>, S::Error> where S: serde::Serializer { if self.sent == false { self.sent = true; Ok(Some(try!(serializer.visit_map_elt(self.name, &self.value)))) } else { Ok(None) } } } match *self { Result::Data(ref data) => serializer.visit_map(SingleMapVisitor { value: data, name: "data", sent: false }), Result::Errors(ref error) => serializer.visit_map(SingleMapVisitor { value: error, name: "errors", sent: false }), } } } /// Converts a general 'error' object, such as `Option` or `Result`, into one /// suitable for use in an `api::Result`. pub trait ToAPIResult<D> { fn api_result(self, parameter_name: &'static str) -> Result<D>; } // Optional `LoadedFile`s return 'File Not Found' errors. impl<'a> ToAPIResult<&'a LoadedFile> for Option<&'a LoadedFile> { fn api_result(self, parameter_name: &'static str) -> Result<&'a LoadedFile> { match self { Some(file) => Result::Data(file), None => Result::Errors(Error::FileNotFound { parameter: parameter_name }), } } } impl<'a> ToAPIResult<&'a mut LoadedFile> for Option<&'a mut LoadedFile> { fn api_result(self, parameter_name: &'static str) -> Result<&'a mut LoadedFile> { match self { Some(file) => Result::Data(file), None => Result::Errors(Error::FileNotFound { parameter: parameter_name }), } } } // Failed string parses return 'Unprocessable Entity' errors. impl<NUM> ToAPIResult<NUM> for result::Result<NUM, num::ParseIntError> where NUM: Copy { fn api_result(self, parameter_name: &'static str) -> Result<NUM> { match self { Ok(num) => Result::Data(num), Err(e) => Result::Errors(Error::ParseIntError { detail: e, parameter: parameter_name }), } } } // Missing parameters also return 'Unprocessable Entity' errors. impl<'a> ToAPIResult<Cow<'a, str>> for Option<Cow<'a, str>> { fn api_result(self, parameter_name: &'static str) -> Result<Cow<'a, str>> { match self { Some(s) => Result::Data(s), None => Result::Errors(Error::MissingQuery { parameter: parameter_name }), } } } /// An error that occurred while processing in an `Endpoint` that should be /// returned to the client program and displayed. #[derive(PartialEq, Debug)] pub enum Error { FileNotFound { parameter: &'static str, }, MissingQuery { parameter: &'static str, }, ParseIntError { detail: num::ParseIntError, parameter: &'static str, }, } impl Error { /// Gets the HTTP status code to return for this error. This is only /// defined on `Error` objects, as successful results will always return /// **200 OK**. pub fn status_code(&self) -> StatusCode { match *self { Error::FileNotFound { .. } => StatusCode::NotFound, Error::MissingQuery { .. } => StatusCode::UnprocessableEntity, Error::ParseIntError { .. } => StatusCode::UnprocessableEntity, } } /// This error's HTTP status code, as a human-readable string. fn status_string(&self) -> String { self.status_code().to_string() } /// Gets the details of this error message, in human-readable format, if /// available. What this is depends on the error: for example, a failed /// integer parse will have the description of the `ParseIntError` here. fn detail(&self) -> Option<String> { match *self { Error::ParseIntError { ref detail, .. } => Some(detail.to_string()), _ => None, } } /// If the error originated from the request URI, returns the parameter /// that caused it. fn source_parameter<'a>(&'a self) -> Option<&'a &'static str> { match *self { Error::FileNotFound { ref parameter, .. } => Some(parameter), Error::ParseIntError { ref parameter, .. } => Some(parameter), Error::MissingQuery { .. } => None, } } } // The JSON API spec requires errors to be objects, which is fine, but also // requires the `source` field to be an object, which is also fine, but a // little trickier. // // The custom serialiser for `Error` puts the parameter in a `source` object // for you, but also doesn't add `null` fields for any values that aren't // present for this error (the default Serde serialiser will do this). impl serde::Serialize for Error { fn serialize<S>(&self, serializer: &mut S) -> result::Result<(), S::Error> where S: serde::Serializer { serializer.visit_struct("error", ErrorMapVisitor { value: self, state: 0, }) } } struct ErrorMapVisitor<'a> { value: &'a Error, state: u8, } impl<'a> serde::ser::MapVisitor for ErrorMapVisitor<'a> { fn visit<S>(&mut self, serializer: &mut S) -> result::Result<Option<()>, S::Error> where S: serde::Serializer { if self.state == 0 { self.state += 1; let status = self.value.status_string(); try!(serializer.visit_map_elt("status", &status)); return Ok(Some(())); } if self.state == 1 { self.state += 1; if let Some(detail) = self.value.detail() { try!(serializer.visit_map_elt("detail", &detail)); return Ok(Some(())); } // otherwise, fall through } if self.state == 2 { self.state += 1; if let Some(parameter) = self.value.source_parameter() { try!(serializer.visit_map_elt("source", &SourceInfo { parameter: parameter })); return Ok(Some(())); } // otherwise, fall through again } Ok(None) } } /// Little helper struct to work with the `Error` serialiser above. #[derive(Serialize)] struct SourceInfo<'a> { parameter: &'a &'static str, }
use priv_prelude::*; use super::*; /// A TCP packet #[derive(Clone, PartialEq)] pub struct TcpPacket { buffer: Bytes, } impl fmt::Debug for TcpPacket { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { struct Kind(pub u8); impl fmt::Debug for Kind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let Kind(b) = *self; let s = match b & 0x17 { 0x00 => "-", 0x01 => "FIN", 0x02 => "SYN", 0x03 => "SYN | FIN", 0x04 => "RST", 0x05 => "RST | FIN", 0x06 => "RST | SYN", 0x07 => "RST | SYN | FIN", 0x10 => "ACK", 0x11 => "ACK | FIN", 0x12 => "ACK | SYN", 0x13 => "ACK | SYN | FIN", 0x14 => "ACK | RST", 0x15 => "ACK | RST | FIN", 0x16 => "ACK | RST | SYN", 0x17 => "ACK | RST | SYN | FIN", _ => unreachable!(), }; write!(f, "{}", s) } } let payload = self.payload(); f .debug_struct("TcpPacket") .field("source_port", &self.source_port()) .field("dest_port", &self.dest_port()) .field("seq_num", &self.seq_num()) .field("ack_num", &self.ack_num()) .field("window_size", &self.window_size()) .field("kind", &Kind(self.buffer[13])) .field("payload", &payload) .finish() } } /// The fields of a TCP header #[derive(Debug, Clone, Copy)] pub struct TcpFields { /// The source port pub source_port: u16, /// The destination port pub dest_port: u16, /// The sequence number pub seq_num: u32, /// The ACK number pub ack_num: u32, /// The window size pub window_size: u16, /// Is this a SYN packet? pub syn: bool, /// Is this an ACK packet? pub ack: bool, /// Is this a FIN packet? pub fin: bool, /// Is this an RST packet? pub rst: bool, } impl TcpFields { /// Get the length of the header described by this `TcpFields` pub fn header_len(&self) -> usize { 20 } } fn set_fields_v4(buffer: &mut [u8], fields: TcpFields, source_ip: Ipv4Addr, dest_ip: Ipv4Addr) { NetworkEndian::write_u16(&mut buffer[0..2], fields.source_port); NetworkEndian::write_u16(&mut buffer[2..4], fields.dest_port); NetworkEndian::write_u32(&mut buffer[4..8], fields.seq_num); NetworkEndian::write_u32(&mut buffer[8..12], fields.ack_num); buffer[12] = 0x50; buffer[13] = { (if fields.syn { 0x02 } else { 0 }) | (if fields.ack { 0x10 } else { 0 }) | (if fields.fin { 0x01 } else { 0 }) | (if fields.rst { 0x04 } else { 0 }) }; NetworkEndian::write_u16(&mut buffer[14..16], fields.window_size); NetworkEndian::write_u16(&mut buffer[16..18], 0); NetworkEndian::write_u16(&mut buffer[18..20], 0); let checksum = !checksum::combine(&[ checksum::pseudo_header_ipv4( source_ip, dest_ip, 6, buffer.len() as u32, ), checksum::data(&buffer[..]), ]); NetworkEndian::write_u16(&mut buffer[16..18], checksum); } impl TcpPacket { /// Allocate a new TCP packet from the given fields and payload pub fn new_from_fields_v4( fields: TcpFields, source_ip: Ipv4Addr, dest_ip: Ipv4Addr, payload: Bytes, ) -> TcpPacket { // NOTE: this will break when TCP options are added let len = 20 + payload.len(); let mut buffer = unsafe { BytesMut::uninit(len) }; TcpPacket::write_to_buffer_v4(&mut buffer, fields, source_ip, dest_ip, payload); TcpPacket { buffer: buffer.freeze(), } } /// Write a TCP packet to the given empty buffer. The buffer must have the exact correct size. pub fn write_to_buffer_v4( buffer: &mut [u8], fields: TcpFields, source_ip: Ipv4Addr, dest_ip: Ipv4Addr, payload: Bytes, ) { // NOTE: this will break when TCP options are added buffer[20..].clone_from_slice(&payload); set_fields_v4(buffer, fields, source_ip, dest_ip); } /// Get the fields of this packet pub fn fields(&self) -> TcpFields { TcpFields { source_port: self.source_port(), dest_port: self.dest_port(), seq_num: self.seq_num(), ack_num: self.ack_num(), window_size: self.window_size(), syn: self.is_syn(), ack: self.is_ack(), fin: self.is_fin(), rst: self.is_rst(), } } /// Parse a TCP packet from a byte buffer pub fn from_bytes(buffer: Bytes) -> TcpPacket { TcpPacket { buffer, } } /// Set the header fields of a TCP packet pub fn set_fields_v4(&mut self, fields: TcpFields, source_ip: Ipv4Addr, dest_ip: Ipv4Addr) { let buffer = mem::replace(&mut self.buffer, Bytes::new()); let mut buffer = BytesMut::from(buffer); set_fields_v4(&mut buffer, fields, source_ip, dest_ip); self.buffer = buffer.freeze(); } /// Get the source port of the packet. pub fn source_port(&self) -> u16 { NetworkEndian::read_u16(&self.buffer[0..2]) } /// Get the destination port of the packet. pub fn dest_port(&self) -> u16 { NetworkEndian::read_u16(&self.buffer[2..4]) } /// Get the sequence number of the packet pub fn seq_num(&self) -> u32 { NetworkEndian::read_u32(&self.buffer[4..8]) } /// Get the ack number of the packet pub fn ack_num(&self) -> u32 { NetworkEndian::read_u32(&self.buffer[8..12]) } /// Get the ack number of the packet pub fn window_size(&self) -> u16 { NetworkEndian::read_u16(&self.buffer[14..16]) } /// Check whether this is a SYN packet pub fn is_syn(&self) -> bool { self.buffer[13] & 0x02 != 0 } /// Check whether this is an ACK packet pub fn is_ack(&self) -> bool { self.buffer[13] & 0x10 != 0 } /// Check whether this is a FIN packet pub fn is_fin(&self) -> bool { self.buffer[13] & 0x01 != 0 } /// Check whether this is an RST packet pub fn is_rst(&self) -> bool { self.buffer[13] & 0x04 != 0 } /// Get the packet's payload data pub fn payload(&self) -> Bytes { let data_offset = 4 * (self.buffer[12] >> 4) as usize; self.buffer.slice_from(data_offset) } /// Get the entire packet as a raw byte buffer. pub fn as_bytes(&self) -> &Bytes { &self.buffer } /// Consume the packet and return the underlying buffer pub fn into_bytes(self) -> Bytes { self.buffer } /// Verify the checksum of the packet. The source/destination IP addresses of the packet are /// needed to calculate the checksum. pub fn verify_checksum_v4( &self, source_ip: Ipv4Addr, dest_ip: Ipv4Addr, ) -> bool { let len = self.buffer.len(); !0 == checksum::combine(&[ checksum::pseudo_header_ipv4(source_ip, dest_ip, 17, len as u32), checksum::data(&self.buffer[..]), ]) } }
#[doc = "Reader of register MEMPOWERDOWN"] pub type R = crate::R<u32, super::MEMPOWERDOWN>; #[doc = "Writer for register MEMPOWERDOWN"] pub type W = crate::W<u32, super::MEMPOWERDOWN>; #[doc = "Register MEMPOWERDOWN `reset()`'s with value 0"] impl crate::ResetValue for super::MEMPOWERDOWN { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ROM`"] pub type ROM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ROM`"] pub struct ROM_W<'a> { w: &'a mut W, } impl<'a> ROM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Reader of field `USB`"] pub type USB_R = crate::R<bool, bool>; #[doc = "Write proxy for field `USB`"] pub struct USB_W<'a> { w: &'a mut W, } impl<'a> USB_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Reader of field `SRAM5`"] pub type SRAM5_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM5`"] pub struct SRAM5_W<'a> { w: &'a mut W, } impl<'a> SRAM5_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Reader of field `SRAM4`"] pub type SRAM4_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM4`"] pub struct SRAM4_W<'a> { w: &'a mut W, } impl<'a> SRAM4_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `SRAM3`"] pub type SRAM3_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM3`"] pub struct SRAM3_W<'a> { w: &'a mut W, } impl<'a> SRAM3_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Reader of field `SRAM2`"] pub type SRAM2_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM2`"] pub struct SRAM2_W<'a> { w: &'a mut W, } impl<'a> SRAM2_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `SRAM1`"] pub type SRAM1_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM1`"] pub struct SRAM1_W<'a> { w: &'a mut W, } impl<'a> SRAM1_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `SRAM0`"] pub type SRAM0_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SRAM0`"] pub struct SRAM0_W<'a> { w: &'a mut W, } impl<'a> SRAM0_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bit 7"] #[inline(always)] pub fn rom(&self) -> ROM_R { ROM_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 6"] #[inline(always)] pub fn usb(&self) -> USB_R { USB_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 5"] #[inline(always)] pub fn sram5(&self) -> SRAM5_R { SRAM5_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 4"] #[inline(always)] pub fn sram4(&self) -> SRAM4_R { SRAM4_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 3"] #[inline(always)] pub fn sram3(&self) -> SRAM3_R { SRAM3_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 2"] #[inline(always)] pub fn sram2(&self) -> SRAM2_R { SRAM2_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 1"] #[inline(always)] pub fn sram1(&self) -> SRAM1_R { SRAM1_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0"] #[inline(always)] pub fn sram0(&self) -> SRAM0_R { SRAM0_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 7"] #[inline(always)] pub fn rom(&mut self) -> ROM_W { ROM_W { w: self } } #[doc = "Bit 6"] #[inline(always)] pub fn usb(&mut self) -> USB_W { USB_W { w: self } } #[doc = "Bit 5"] #[inline(always)] pub fn sram5(&mut self) -> SRAM5_W { SRAM5_W { w: self } } #[doc = "Bit 4"] #[inline(always)] pub fn sram4(&mut self) -> SRAM4_W { SRAM4_W { w: self } } #[doc = "Bit 3"] #[inline(always)] pub fn sram3(&mut self) -> SRAM3_W { SRAM3_W { w: self } } #[doc = "Bit 2"] #[inline(always)] pub fn sram2(&mut self) -> SRAM2_W { SRAM2_W { w: self } } #[doc = "Bit 1"] #[inline(always)] pub fn sram1(&mut self) -> SRAM1_W { SRAM1_W { w: self } } #[doc = "Bit 0"] #[inline(always)] pub fn sram0(&mut self) -> SRAM0_W { SRAM0_W { w: self } } }
use server::protocol::PlaneType; use server::*; use fnv::FnvHashMap; use std::time::Duration; lazy_static! { pub static ref FLAG_RADIUS: FnvHashMap<Plane, Distance> = { let mut map = FnvHashMap::default(); // These are just random guesses // TODO: rev-eng these from official server map.insert( PlaneType::Predator, Distance::new(100.0) ); map.insert( PlaneType::Goliath, Distance::new(100.0) ); map.insert( PlaneType::Tornado, Distance::new(100.0) ); map.insert( PlaneType::Prowler, Distance::new(100.0) ); map.insert( PlaneType::Mohawk, Distance::new(100.0) ); map }; // TODO: Actually determine this /// Distance that the player must be within to cap pub static ref CAP_RADIUS: Distance = Distance::new(50.0); pub static ref FLAG_POS: FnvHashMap<Team, Position> = { let mut map = FnvHashMap::default(); // Blue team map.insert(Team(1), Position::new( Distance::new(-9670.0), Distance::new(-1470.0)) ); // Red team map.insert(Team(2), Position::new( Distance::new(8600.0), Distance::new(-940.0)) ); map }; pub static ref FLAG_RETURN_POS: FnvHashMap<Team, Position> = { let mut map = FnvHashMap::default(); // Flags get returned at the opposite base map.insert(Team(2), FLAG_POS[&Team(1)]); map.insert(Team(1), FLAG_POS[&Team(2)]); map }; pub static ref FLAG_NO_REGRAB_TIME: Duration = Duration::from_secs(5); pub static ref FLAG_MESSAGE_TEAM: FnvHashMap<Team, &'static str> = { let mut map = FnvHashMap::default(); map.insert(Team(1), "blueflag"); map.insert(Team(2), "redflag"); map }; }
//! Vertical math operations crate mod float;
// src/lexer.rs use crate::token::*; pub struct Lexer { input: String, position: usize, // 当前字符位置 read_position: usize, // 当前读取位置(在当前字符位置之后) ch: u8, // 当前字符 } impl Lexer { pub fn new(input: &str) -> Lexer { let mut l = Lexer { input: String::from(input), position: 0, read_position: 0, ch: 0, }; l.read_char(); l } pub fn read_char(&mut self) { if self.read_position >= self.input.len() { self.ch = 0; } else { self.ch = self.input.bytes().nth(self.read_position).unwrap(); } self.position = self.read_position; self.read_position += 1; } pub fn next_token(&mut self) -> Token { let tok: Token; self.skip_whitespace(); match self.ch { b':' => tok = new_token(TokenType::COLON, self.ch), b'"' => { tok = Token { tk_type: TokenType::STRING, literal: String::from(self.read_string()), } } b'=' => { if self.peek_char() == b'=' { self.read_char(); tok = Token { tk_type: TokenType::EQ, literal: String::from("=="), } } else { tok = new_token(TokenType::ASSIGN, self.ch) } } b';' => tok = new_token(TokenType::SEMICOLON, self.ch), b'(' => tok = new_token(TokenType::LPAREN, self.ch), b')' => tok = new_token(TokenType::RPAREN, self.ch), b',' => tok = new_token(TokenType::COMMA, self.ch), b'+' => tok = new_token(TokenType::PLUS, self.ch), b'{' => tok = new_token(TokenType::LBRACE, self.ch), b'}' => tok = new_token(TokenType::RBRACE, self.ch), b'-' => tok = new_token(TokenType::MINUS, self.ch), b'!' => { if self.peek_char() == b'=' { self.read_char(); tok = Token { tk_type: TokenType::NOTEQ, literal: String::from("!="), } } else { tok = new_token(TokenType::BANG, self.ch) } } b'/' => tok = new_token(TokenType::SLASH, self.ch), b'*' => tok = new_token(TokenType::ASTERISK, self.ch), b'<' => tok = new_token(TokenType::LT, self.ch), b'>' => tok = new_token(TokenType::GT, self.ch), b'[' => tok = new_token(TokenType::LBRACKET, self.ch), b']' => tok = new_token(TokenType::RBRACKET, self.ch), 0 => { tok = Token { tk_type: TokenType::EOF, literal: String::new(), } } _ => { if is_letter(self.ch) { let literal = self.read_identifier(); tok = Token { tk_type: lookup_ident(&literal), literal: String::from(literal), }; return tok; } else if self.ch.is_ascii_digit() { tok = Token { tk_type: TokenType::INT, literal: String::from(self.read_number()), }; return tok; } tok = new_token(TokenType::ILLEGAL, self.ch); } } self.read_char(); tok } fn read_identifier(&mut self) -> &str { let position = self.position; while is_letter(self.ch) { self.read_char(); } &self.input[position..self.position] } fn skip_whitespace(&mut self) { loop { match self.ch { b' ' | b'\t' | b'\n' | b'\r' => self.read_char(), _ => return, } } } fn read_number(&mut self) -> &str { let position = self.position; while self.ch.is_ascii_digit() { self.read_char(); } &self.input[position..self.position] } fn peek_char(&mut self) -> u8 { if self.read_position >= self.input.len() { return 0; } else { return self.input.bytes().nth(self.read_position).unwrap(); } } fn read_string(&mut self) -> &str { let position = self.position + 1; loop { self.read_char(); if self.ch == b'"' { break; } } &self.input[position..self.position] } } pub fn new_token(token_type: TokenType, ch: u8) -> Token { let mut literal = String::new(); literal.push(ch as char); Token { tk_type: token_type, literal: literal, } } fn is_letter(ch: u8) -> bool { ch.is_ascii_alphabetic() || ch == b'_' }
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 mod common; mod mock_chain_test_helper; mod rpc_chain_test_helper; #[macro_use] extern crate rusty_fork; use anyhow::Result; use coerce_rt::actor::context::{ActorContext, ActorStatus}; use libra_crypto::HashValue; use libra_logger::prelude::*; use libra_types::{ access_path::DataPath, account_address::AccountAddress, channel::ChannelResource, libra_resource::{make_resource, LibraResource}, transaction::{Transaction, TransactionPayload}, }; use rpc_chain_test_helper::run_with_rpc_client; use sgwallet::{ chain_watcher::{ChainWatcher, Interest, TransactionWithInfo}, wallet::WalletHandle, }; use std::{sync::Arc, time::Duration}; rusty_fork_test! { #[test] fn run_test_channel_lock_and_then_resolve() { if let Err(e) = run_with_rpc_client(|chain_client| { common::with_wallet(chain_client.clone(), |rt, sender, receiver| { rt.block_on(test_channel_lock_and_resolve(sender, receiver)) }) }) { panic!("error, {}", e); } } #[test] fn run_test_channel_lock_and_then_chanllenge() { if let Err(e) = run_with_rpc_client(|chain_client| { common::with_wallet(chain_client, |rt, sender, receiver| { rt.block_on(test_channel_lock_and_challenge(sender, receiver)) }) }) { panic!("error, {}", e); } } #[test] fn run_test_channel_lock_and_then_timeout() { if let Err(e) = run_with_rpc_client(|chain_client| { common::with_wallet(chain_client, |rt, sender, receiver| { rt.block_on(test_channel_lock_and_timeout(sender, receiver)) }) }) { panic!("error, {}", e); } } } async fn test_channel_lock_and_resolve( sender: Arc<WalletHandle>, receiver: Arc<WalletHandle>, ) -> Result<()> { let _sender_init_balance = sender.balance()?; let _receiver_init_balance = receiver.balance()?; let _gas = common::open_channel(sender.clone(), receiver.clone(), 10000, 10000).await?; assert_eq!(1, sender.channel_sequence_number(receiver.account()).await?); assert_eq!(1, receiver.channel_sequence_number(sender.account()).await?); let _ = common::transfer(sender.clone(), receiver.clone(), 300).await?; assert_eq!(2, sender.channel_sequence_number(receiver.account()).await?); assert_eq!(2, receiver.channel_sequence_number(sender.account()).await?); let preimage = HashValue::random(); let lock = preimage.to_vec(); let _request = sender .send_payment(receiver.account(), 500, lock, 10) .await?; let _gas = sender.force_travel_txn(receiver.account()).await?; assert_eq!(3, sender.channel_sequence_number(receiver.account()).await?); let sender_channel_handle = sender.channel_handle(receiver.account()).await?; let receiver_channel_handle = receiver.channel_handle(sender.account()).await?; let sender_channel_handle_clone = sender_channel_handle.clone(); let receiver_channel_handle_clone = receiver_channel_handle.clone(); tokio::task::spawn(async move { loop { tokio::time::delay_for(Duration::from_secs(2)).await; let sender_channel_resource = sender_channel_handle .get_channel_resource::<ChannelResource>(DataPath::onchain_resource_path( ChannelResource::struct_tag(), )) .await .unwrap(); let receiver_channel_resource = receiver_channel_handle .get_channel_resource::<ChannelResource>(DataPath::onchain_resource_path( ChannelResource::struct_tag(), )) .await .unwrap(); info!("sender channel_resource: {:?}", sender_channel_resource); info!("receiver channel_resource: {:?}", receiver_channel_resource); } }); let chain_watcher = ChainWatcher::new(sender.get_chain_client(), 0, 10); let actor_context = ActorContext::new(); let chain_watcher_handle = chain_watcher.start(actor_context.clone()).await?; let channel_address = sender_channel_handle_clone.channel_address().clone(); let channel_txn_receiver = chain_watcher_handle .add_interest_oneshot(channel_txn_interest_oneshot(channel_address, 3)) .await?; let txn_with_info: TransactionWithInfo = channel_txn_receiver.await?; let _resolve_txn_version = txn_with_info.version; // delay 1s to let channel handle events tokio::time::delay_for(Duration::from_secs(1)).await; let sender_channel_resource = sender_channel_handle_clone .get_channel_resource::<ChannelResource>(DataPath::onchain_resource_path( ChannelResource::struct_tag(), )) .await? .unwrap(); let receiver_channel_resource = receiver_channel_handle_clone .get_channel_resource::<ChannelResource>(DataPath::onchain_resource_path( ChannelResource::struct_tag(), )) .await? .unwrap(); assert!(sender_channel_resource.opened()); assert!(receiver_channel_resource.opened()); Ok(()) } async fn test_channel_lock_and_challenge( sender: Arc<WalletHandle>, receiver: Arc<WalletHandle>, ) -> Result<()> { let _sender_init_balance = sender.balance()?; let _receiver_init_balance = receiver.balance()?; let _gas = common::open_channel(sender.clone(), receiver.clone(), 10000, 10000).await?; assert_eq!(1, sender.channel_sequence_number(receiver.account()).await?); assert_eq!(1, receiver.channel_sequence_number(sender.account()).await?); let _ = common::transfer(sender.clone(), receiver.clone(), 300).await?; assert_eq!(2, sender.channel_sequence_number(receiver.account()).await?); assert_eq!(2, receiver.channel_sequence_number(sender.account()).await?); let preimage = HashValue::random(); let lock = preimage.to_vec(); let request = sender .send_payment(receiver.account(), 500, lock, 10) .await?; // make receiver apply let resp = receiver.verify_txn(sender.account(), &request).await?; assert!(resp.is_some()); let resp = resp.unwrap(); receiver.apply_txn(sender.account(), &resp).await?; assert_eq!(3, receiver.channel_sequence_number(sender.account()).await?); // but sender didn't receive the signature, so he solo sender.force_travel_txn(receiver.account()).await?; assert_eq!(3, sender.channel_sequence_number(receiver.account()).await?); let sender_channel_handle = sender.channel_handle(receiver.account()).await?; let receiver_channel_handle = receiver.channel_handle(sender.account()).await?; let chain_watcher = ChainWatcher::new(sender.get_chain_client(), 0, 10); let actor_context = ActorContext::new(); let chain_watcher_handle = chain_watcher.start(actor_context.clone()).await?; let channel_address = sender_channel_handle.channel_address().clone(); let channel_txn_receiver = chain_watcher_handle .add_interest_oneshot(channel_txn_interest_oneshot(channel_address, 3)) .await?; let txn_with_info: TransactionWithInfo = channel_txn_receiver.await?; let channel_state = sender .get_chain_client() .get_account_state(channel_address, Some(txn_with_info.version))?; let channel_resource = channel_state .get(&DataPath::onchain_resource_path(ChannelResource::struct_tag()).to_vec()) .map(|b| make_resource::<ChannelResource>(&b)) .transpose()? .expect("channel resource should exists"); assert!( channel_resource.closed(), "channel should be closed, locked: {}", channel_resource.locked() ); // delay 1s to let channel handle events tokio::time::delay_for(Duration::from_secs(1)).await; if let Ok(s) = sender_channel_handle.channel_ref().status().await { assert!(s == ActorStatus::Stopping || s == ActorStatus::Stopped) } if let Ok(s) = receiver_channel_handle.channel_ref().status().await { assert!(s == ActorStatus::Stopping || s == ActorStatus::Stopped) } Ok(()) } async fn test_channel_lock_and_timeout( sender: Arc<WalletHandle>, receiver: Arc<WalletHandle>, ) -> Result<()> { let _sender_init_balance = sender.balance()?; let _receiver_init_balance = receiver.balance()?; let _gas = common::open_channel(sender.clone(), receiver.clone(), 10000, 10000).await?; assert_eq!(1, sender.channel_sequence_number(receiver.account()).await?); assert_eq!(1, receiver.channel_sequence_number(sender.account()).await?); let _ = common::transfer(sender.clone(), receiver.clone(), 300).await?; assert_eq!(2, sender.channel_sequence_number(receiver.account()).await?); assert_eq!(2, receiver.channel_sequence_number(sender.account()).await?); let preimage = HashValue::random(); let lock = preimage.to_vec(); let _request = sender .send_payment(receiver.account(), 500, lock, 10) .await?; receiver.stop().await?; // but sender didn't receive the signature, so he solo sender.force_travel_txn(receiver.account()).await?; assert_eq!(3, sender.channel_sequence_number(receiver.account()).await?); let sender_channel_handle = sender.channel_handle(receiver.account()).await?; let chain_watcher = ChainWatcher::new(sender.get_chain_client(), 0, 10); let actor_context = ActorContext::new(); let chain_watcher_handle = chain_watcher.start(actor_context.clone()).await?; // wait timeout and close channel let channel_address = sender_channel_handle.channel_address().clone(); let channel_txn_receiver = chain_watcher_handle .add_interest_oneshot(channel_txn_interest_oneshot(channel_address, 3)) .await?; let txn_with_info: TransactionWithInfo = channel_txn_receiver.await?; let channel_state = sender .get_chain_client() .get_account_state(channel_address, Some(txn_with_info.version))?; let channel_resource = channel_state .get(&DataPath::onchain_resource_path(ChannelResource::struct_tag()).to_vec()) .map(|b| make_resource::<ChannelResource>(&b)) .transpose()? .expect("channel resource should exists"); assert!( channel_resource.closed(), "channel should be closed, locked: {}", channel_resource.locked() ); // delay 1s to let channel handle events tokio::time::delay_for(Duration::from_secs(1)).await; if let Ok(s) = sender_channel_handle.channel_ref().status().await { assert!(s == ActorStatus::Stopping || s == ActorStatus::Stopped) } // FIXME: check receiver restart ok Ok(()) } fn channel_txn_interest_oneshot( channel_address: AccountAddress, channel_sequence_number: u64, ) -> Interest { Box::new(move |txn| match &txn.txn { Transaction::UserTransaction(s) => { if let TransactionPayload::Channel(cp) = s.payload() { cp.channel_address() == channel_address && cp.channel_sequence_number() == channel_sequence_number } else { false } } _ => false, }) }
//! Insertion Sort (Chapter 2: Getting Started) //! //! ```text //! for j = 2 to A.length //! key = A[j] //! // insert A[j] into the sorted sequence A[1 .. j - 1] //! i = j - 1 //! while i > 0 and A[i] > key //! a[i + 1] = A[i] //! i = i - 1 //! A[i + 1] = key //! ``` /// Plain port of the pseudocode. pub fn sort(data: &mut Vec<i32>) { for j in 1..data.len() { let key = data[j]; let mut i: i32 = j as i32 - 1; while i >= 0 && data[i as usize] > key { data[(i + 1) as usize] = data[i as usize]; i = i - 1; data[(i + 1) as usize] = key; } } } /// Slightly more rusty version. pub fn sort2(data: &mut Vec<i32>) { for j in 1..data.len() { let key = data[j]; for i in (0..j).rev() { if data[i] <= key { break; } data.swap(i + 1, i); } } } #[cfg(test)] mod tests { use super::*; #[test] fn test_sort_output_is_ordered() { let mut data = vec![5, 4, 3, 3, 1, 2, 3]; sort(&mut data); assert_eq!(vec![1, 2, 3, 3, 3, 4, 5], data); } #[test] fn test_sort2_output_is_ordered() { let mut data = vec![5, 4, 3, 3, 1, 2, 3]; sort2(&mut data); assert_eq!(vec![1, 2, 3, 3, 3, 4, 5], data); } }
#![cfg(feature = "std")] use tabled::settings::{ object::{Cell, Columns, Object, Rows, Segment}, Alignment, Format, Modify, Padding, Style, }; use crate::matrix::Matrix; use testing_table::test_table; #[cfg(feature = "color")] use owo_colors::OwoColorize; test_table!( formatting_full_test, Matrix::new(3, 3).with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))), "+-----+------------+------------+------------+" "| [N] | [column 0] | [column 1] | [column 2] |" "+-----+------------+------------+------------+" "| [0] | [0-0] | [0-1] | [0-2] |" "+-----+------------+------------+------------+" "| [1] | [1-0] | [1-1] | [1-2] |" "+-----+------------+------------+------------+" "| [2] | [2-0] | [2-1] | [2-2] |" "+-----+------------+------------+------------+" ); test_table!( formatting_head_test, Matrix::new(3, 3) .with(Style::markdown()) .with(Modify::new(Rows::first()).with(Format::content(|s| format!(":{s}")))), "| :N | :column 0 | :column 1 | :column 2 |" "|----|-----------|-----------|-----------|" "| 0 | 0-0 | 0-1 | 0-2 |" "| 1 | 1-0 | 1-1 | 1-2 |" "| 2 | 2-0 | 2-1 | 2-2 |" ); test_table!( formatting_row_test, Matrix::new(3, 3) .with(Style::psql()) .with(Modify::new(Rows::new(1..)).with(Format::content(|s| format!("<{s}>")))), " N | column 0 | column 1 | column 2 " "-----+----------+----------+----------" " <0> | <0-0> | <0-1> | <0-2> " " <1> | <1-0> | <1-1> | <1-2> " " <2> | <2-0> | <2-1> | <2-2> " ); test_table!( formatting_column_test, Matrix::new(3, 3) .with(Style::psql()) .with(Modify::new(Columns::single(0)).with(Format::content(|s| format!("(x) {s}")))), " (x) N | column 0 | column 1 | column 2 " "-------+----------+----------+----------" " (x) 0 | 0-0 | 0-1 | 0-2 " " (x) 1 | 1-0 | 1-1 | 1-2 " " (x) 2 | 2-0 | 2-1 | 2-2 " ); test_table!( formatting_multiline_test, Matrix::new(3, 3) .insert((2, 2), "E\nnde\navou\nros") .insert((3, 2), "Red\nHat") .insert((3, 3), "https://\nwww\n.\nredhat\n.com\n/en") .with(Style::psql()) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("(x) {s}")).multiline())), " (x) N | (x) column 0 | (x) column 1 | (x) column 2 " "-------+--------------+--------------+--------------" " (x) 0 | (x) 0-0 | (x) 0-1 | (x) 0-2 " " (x) 1 | (x) 1-0 | (x) E | (x) 1-2 " " | | (x) nde | " " | | (x) avou | " " | | (x) ros | " " (x) 2 | (x) 2-0 | (x) Red | (x) https:// " " | | (x) Hat | (x) www " " | | | (x) . " " | | | (x) redhat " " | | | (x) .com " " | | | (x) /en " ); test_table!( formatting_cell_test, Matrix::new(3, 3) .with(Style::psql()) .with(Modify::new(Cell::new(0, 0)).with(Format::content(|s| format!("(x) {s}")))) .with(Modify::new(Cell::new(0, 1)).with(Format::content(|s| format!("(x) {s}")))) .with(Modify::new(Cell::new(0, 2)).with(Format::content(|s| format!("(x) {s}")))), " (x) N | (x) column 0 | (x) column 1 | column 2 " "-------+--------------+--------------+----------" " 0 | 0-0 | 0-1 | 0-2 " " 1 | 1-0 | 1-1 | 1-2 " " 2 | 2-0 | 2-1 | 2-2 " ); test_table!( formatting_combination_and_test, Matrix::new(3, 3) .with(Style::psql()) .with( Modify::new(Columns::single(0).and(Rows::single(0))) .with(Format::content(|s| format!("(x) {s}"))), ), " (x) N | (x) column 0 | (x) column 1 | (x) column 2 " "-------+--------------+--------------+--------------" " (x) 0 | 0-0 | 0-1 | 0-2 " " (x) 1 | 1-0 | 1-1 | 1-2 " " (x) 2 | 2-0 | 2-1 | 2-2 " ); test_table!( formatting_combination_not_test, Matrix::new(3, 3) .with(Style::psql()) .with( Modify::new(Columns::single(0).and(Rows::single(0)).not(Cell::new(0, 0))) .with(Format::content(|s| format!("(x) {s}"))), ), " N | (x) column 0 | (x) column 1 | (x) column 2 " "-------+--------------+--------------+--------------" " (x) 0 | 0-0 | 0-1 | 0-2 " " (x) 1 | 1-0 | 1-1 | 1-2 " " (x) 2 | 2-0 | 2-1 | 2-2 " ); test_table!( formatting_combination_inverse_test, Matrix::new(3, 3) .with(Style::psql()) .with(Modify::new(Columns::single(0).inverse()).with(Format::content(|s| format!("(x) {s}")))), " N | (x) column 0 | (x) column 1 | (x) column 2 " "---+--------------+--------------+--------------" " 0 | (x) 0-0 | (x) 0-1 | (x) 0-2 " " 1 | (x) 1-0 | (x) 1-1 | (x) 1-2 " " 2 | (x) 2-0 | (x) 2-1 | (x) 2-2 " ); test_table!( formatting_combination_intersect_test, Matrix::new(3, 3) .with(Style::psql()) .with( Modify::new(Columns::new(1..3).intersect(Rows::new(1..3))) .with(Format::content(|s| format!("(x) {s}"))), ), " N | column 0 | column 1 | column 2 " "---+----------+----------+----------" " 0 | (x) 0-0 | (x) 0-1 | 0-2 " " 1 | (x) 1-0 | (x) 1-1 | 1-2 " " 2 | 2-0 | 2-1 | 2-2 " ); test_table!( formatting_using_lambda_test, Matrix::new(3, 3) .with(Style::markdown()) .with(Modify::new(Rows::first()).with(Format::content(|s| format!(":{s}")))), "| :N | :column 0 | :column 1 | :column 2 |" "|----|-----------|-----------|-----------|" "| 0 | 0-0 | 0-1 | 0-2 |" "| 1 | 1-0 | 1-1 | 1-2 |" "| 2 | 2-0 | 2-1 | 2-2 |" ); test_table!( formatting_using_function_test, Matrix::new(3, 3) .with(Style::markdown()) .with(Modify::new(Rows::first()).with(Format::content(str::to_uppercase))), "| N | COLUMN 0 | COLUMN 1 | COLUMN 2 |" "|---|----------|----------|----------|" "| 0 | 0-0 | 0-1 | 0-2 |" "| 1 | 1-0 | 1-1 | 1-2 |" "| 2 | 2-0 | 2-1 | 2-2 |" ); test_table!( format_with_index, Matrix::new(3, 3) .with(Style::markdown()) .with(Modify::new(Rows::first()).with(Format::positioned(|a, (b, c)| match (b, c) { (0, 0) => "(0, 0)".to_string(), (0, 1) => "(0, 1)".to_string(), (0, 2) => "(0, 2)".to_string(), _ => a.to_string(), }))), "| (0, 0) | (0, 1) | (0, 2) | column 2 |" "|--------|--------|--------|----------|" "| 0 | 0-0 | 0-1 | 0-2 |" "| 1 | 1-0 | 1-1 | 1-2 |" "| 2 | 2-0 | 2-1 | 2-2 |" ); test_table!( format_doesnt_change_padding, Matrix::new(3, 3) .with(Modify::new(Segment::all()).with(Alignment::left())) .with(Modify::new(Segment::all()).with(Padding::new(3, 1, 0, 0))) .with(Modify::new(Segment::all()).with(Format::content(|s| format!("[{s}]")))), "+-------+--------------+--------------+--------------+" "| [N] | [column 0] | [column 1] | [column 2] |" "+-------+--------------+--------------+--------------+" "| [0] | [0-0] | [0-1] | [0-2] |" "+-------+--------------+--------------+--------------+" "| [1] | [1-0] | [1-1] | [1-2] |" "+-------+--------------+--------------+--------------+" "| [2] | [2-0] | [2-1] | [2-2] |" "+-------+--------------+--------------+--------------+" ); test_table!( formatting_content_str_test, Matrix::new(3, 3).with(Modify::new(Segment::all()).with(Format::content(|_| String::from("Hello World")))), "+-------------+-------------+-------------+-------------+" "| Hello World | Hello World | Hello World | Hello World |" "+-------------+-------------+-------------+-------------+" "| Hello World | Hello World | Hello World | Hello World |" "+-------------+-------------+-------------+-------------+" "| Hello World | Hello World | Hello World | Hello World |" "+-------------+-------------+-------------+-------------+" "| Hello World | Hello World | Hello World | Hello World |" "+-------------+-------------+-------------+-------------+" ); #[cfg(feature = "color")] test_table!( color_test, Matrix::new(3, 3) .with(Style::psql()) .with( Modify::new(Columns::new(..1).and(Columns::new(2..))) .with(Format::content(|s| s.red().to_string())), ) .with(Modify::new(Columns::new(1..2)).with(Format::content(|s| s.blue().to_string()))), " \u{1b}[31mN\u{1b}[39m | \u{1b}[34mcolumn 0\u{1b}[39m | \u{1b}[31mcolumn 1\u{1b}[39m | \u{1b}[31mcolumn 2\u{1b}[39m " "---+----------+----------+----------" " \u{1b}[31m0\u{1b}[39m | \u{1b}[34m0-0\u{1b}[39m | \u{1b}[31m0-1\u{1b}[39m | \u{1b}[31m0-2\u{1b}[39m " " \u{1b}[31m1\u{1b}[39m | \u{1b}[34m1-0\u{1b}[39m | \u{1b}[31m1-1\u{1b}[39m | \u{1b}[31m1-2\u{1b}[39m " " \u{1b}[31m2\u{1b}[39m | \u{1b}[34m2-0\u{1b}[39m | \u{1b}[31m2-1\u{1b}[39m | \u{1b}[31m2-2\u{1b}[39m " ); #[cfg(feature = "color")] test_table!( color_multiline_test, Matrix::new(3, 3) .insert((2, 2), "E\nnde\navou\nros") .insert((3, 2), "Red\nHat") .insert((3, 3), "https://\nwww\n.\nredhat\n.com\n/en") .with(Style::psql()) .with(Modify::new(Columns::new(..1)).with(Format::content(|s| s.red().to_string()).multiline())) .with(Modify::new(Columns::new(1..2)).with(Format::content(|s| s.blue().to_string()).multiline())) .with(Modify::new(Columns::new(2..)).with(Format::content(|s| s.green().to_string()).multiline())), " \u{1b}[31mN\u{1b}[39m | \u{1b}[34mcolumn 0\u{1b}[39m | \u{1b}[32mcolumn 1\u{1b}[39m | \u{1b}[32mcolumn 2\u{1b}[39m " "---+----------+----------+----------\n \u{1b}[31m0\u{1b}[39m | \u{1b}[34m0-0\u{1b}[39m | \u{1b}[32m0-1\u{1b}[39m | \u{1b}[32m0-2\u{1b}[39m " " \u{1b}[31m1\u{1b}[39m | \u{1b}[34m1-0\u{1b}[39m | \u{1b}[32mE\u{1b}[39m | \u{1b}[32m1-2\u{1b}[39m " " | | \u{1b}[32mnde\u{1b}[39m | " " | | \u{1b}[32mavou\u{1b}[39m | " " | | \u{1b}[32mros\u{1b}[39m | " " \u{1b}[31m2\u{1b}[39m | \u{1b}[34m2-0\u{1b}[39m | \u{1b}[32mRed\u{1b}[39m | \u{1b}[32mhttps://\u{1b}[39m " " | | \u{1b}[32mHat\u{1b}[39m | \u{1b}[32mwww\u{1b}[39m \n | | | \u{1b}[32m.\u{1b}[39m " " | | | \u{1b}[32mredhat\u{1b}[39m " " | | | \u{1b}[32m.com\u{1b}[39m " " | | | \u{1b}[32m/en\u{1b}[39m " );
use std::env; use tokio::net::TcpListener; mod relay; mod tests; #[tokio::main] async fn main() { let address = env::args().nth(1).unwrap_or("0.0.0.0".to_string()); let port = env::args().nth(2).unwrap_or("0".to_string()); let host = env::args().nth(3).unwrap_or("".to_string()); let server = relay::Server::new(); if let Ok(listener) = TcpListener::bind(&format!("{}:{}", address, port)).await { println!("Listening on: {}", listener.local_addr().unwrap()); while let Ok((tcp_stream, _)) = listener.accept().await { tcp_stream.set_nodelay(true).unwrap(); tokio::spawn(relay::Server::handle_connection( tcp_stream, server.clone(), host.clone(), )); } } else { println!("Failed to listen on: {}:{}", address, port); } }
extern crate peroxide; extern crate fitsio; use peroxide::fuga::*; use fitsio::*; fn main() -> Result<(), Box<dyn Error>> { let mut fptr = FitsFile::open("data/nsa_v1_0_1.fits")?; fptr.pretty_print()?; let hdu = fptr.hdu(0)?; if let hdu::HduInfo::ImageInfo { shape, .. } = hdu.info { println!("Image is {}-dimensional", shape.len()); println!("Found image with shape {:?}", shape); } Ok(()) }
use menu::anilist::{AniListPagination, AniListUserView}; use serenity::{ model::application::interaction::application_command::ApplicationCommandInteraction, model::prelude::{GuildId, Interaction}, prelude::{Context, SerenityError}, }; use crate::utils::{get_application_command, regitser_command, CommandOption}; pub const NAMES: [&str; 3] = ["anime", "manga", "user"]; fn _get_command_option( interaction: &ApplicationCommandInteraction, name: &str, ) -> Result<String, SerenityError> { interaction .data .options .iter() .find(|opt| opt.name == name) .map(|opt| opt.value.clone()) .flatten() .map(|val| val.to_string()) .ok_or(SerenityError::Other("Error getting Interaction Data")) } pub async fn register_interactions( context: &Context, guild_id: GuildId, ) -> Result<(), SerenityError> { let opts = vec![CommandOption::string( "title", "Anime title to search for in AniList", )]; let description = "Search for an anime in AniList"; regitser_command(&context, guild_id, "anime", description, opts).await?; let opts = vec![CommandOption::string( "title", "Mange title to search for in AniList", )]; let description = "Search for a manga in AniList"; regitser_command(&context, guild_id, "manga", description, opts).await?; let opts = vec![CommandOption::string("name", "The user's username")]; let description = "Search for a user in AniList"; regitser_command(&context, guild_id, "user", description, opts).await?; Ok(()) } pub async fn handle_interactions( context: &Context, interaction: &Interaction, name: &str, ) -> Result<(), SerenityError> { match name { name @ ("anime" | "manga") => { handle_media_interaction(&context, &interaction, name).await? } "user" => handle_user_interaction(&context, &interaction).await?, _ => {} } Ok(()) } async fn handle_media_interaction( context: &Context, interaction: &Interaction, name: &str, ) -> Result<(), SerenityError> { let application_command = get_application_command(&interaction)?; let author_id = application_command.user.id; let channel_id = application_command.channel_id; let title = _get_command_option(&application_command, "title")?; let media_type = anilist::models::MediaType::from(name); let media = anilist::client::search_media_with_adult(&title, media_type.clone(), false) .await .unwrap(); if media.is_empty() { let content = format!("No {} was found for `{}`", media_type, title); channel_id.say(&context, content).await.unwrap(); return Err(SerenityError::Other( "AniList Error. TODO: user custom error type", )); } menu::anilist::AniListPagination::new_media_pagination( context, &channel_id, &author_id, &media, menu::anilist::AniListMediaView::Overview, ) .await .unwrap(); Ok(()) } async fn handle_user_interaction( context: &Context, interaction: &Interaction, ) -> Result<(), SerenityError> { let application_command = get_application_command(&interaction)?; let author_id = application_command.user.id; let channel_id = application_command.channel_id; let username = _get_command_option(&application_command, "name")?; let users = anilist::client::search_user(username) .await .map_err(|_err| SerenityError::Other("TODO"))?; if users.is_empty() { return Err(SerenityError::Other("TODO")); } AniListPagination::new_user_pagination( &context, &channel_id, &author_id, &users, AniListUserView::Overview, ) .await .map_err(|_err| SerenityError::Other("TODO"))?; Ok(()) }
/* https://projecteuler.net The number, 197, is called a circular prime because all rotations of the digits: 197, 971, and 719, are themselves prime. There are thirteen such primes below 100: 2, 3, 5, 7, 11, 13, 17, 31, 37, 71, 73, 79, and 97. How many circular primes are there below one million? NOTES: */ // return vector of digits least significant digit first fn digits(n : u64) -> Vec::<u64> { let mut rv = Vec::<u64>::new(); let mut temp = n; while temp > 0 { rv.push(temp%10); temp /= 10; } if rv.is_empty() { rv.push(0); } rv } fn vec_to_number(v : &Vec::<u64>) -> u64 { let mut rv = 0; for n in v.iter().rev() { rv *= 10; rv += *n; } rv } fn solve() -> u64 { let mut rv = 0; let primes = sb::math::prime_to(1_000_000); for n in &primes { let mut good = true; let mut v = digits(*n); for _ in 1..v.len() { v.rotate_right(1); let temp = vec_to_number(&v); if let Err(_) = primes.binary_search(&temp) { good = false; break; } } rv += good as u64; } rv } fn main() { let start_time = std::time::Instant::now(); let sol = solve(); let elapsed = start_time.elapsed().as_micros(); println!("\nSolution: {}", sol); //println!("Elasped time: {} us", elapsed); let mut remain = elapsed; let mut s = String::new(); if remain == 0 { s.insert(0,'0'); } while remain > 0 { let temp = remain%1000; remain /= 1000; if remain > 0 { s = format!(",{:03}",temp) + &s; } else { s = format!("{}",temp) + &s; } } println!("Elasped time: {} us", s); }
pub mod ebo; pub mod vao; pub mod vao_builder; pub mod vbo;
pub mod map; pub mod mrr; pub mod pak; pub mod pr; use map::Map; use mrr::Mrr; use pak::Pak; use pr::PrecisionAndRecall; use std::collections::{HashSet, HashMap}; use std::io::BufReader; use std::io::prelude::*; use std::fs::File; use std::env; use std::process; fn main() { let relevant_docs = get_relevance_info(); let args: Vec<String> = env::args().collect(); if args.len() != 2 { eprintln!("Must specify results file to load from"); process::exit(1); } let result_name = &args[1][..]; println!("\"Retrieval Model\", \"Evaluation Metric\", \"Value\""); let results = get_results(String::from(result_name.clone())); println!("\"{}\", \"MAP\", \"{}\"", result_name, Map::calc(&results, &relevant_docs)); println!("\"{}\", \"MRR\", \"{}\"", result_name, Mrr::calc(&results, &relevant_docs)); print!("\n"); println!("\"Retrieval Model\", \"Evaluation Metric\", \"Query\", \"Value\""); for (query_id, pak) in Pak::calc(5, &results, &relevant_docs) { println!("\"{}\", \"P@5\", \"{}\", \"{}\"", result_name, query_id, pak); } for (query_id, pak) in Pak::calc(20, &results, &relevant_docs) { println!("\"{}\", \"P@20\", \"{}\", \"{}\"", result_name, query_id, pak); } print!("\n"); println!("\"Retrieval Model\", \"Query\", \"Rank\", \"Precision\", \"Recall\""); for (query_id, rank, precision, recall) in PrecisionAndRecall::calc(&results, &relevant_docs) { println!("\"{}\", \"{}\", \"{}\", \"{}\", \"{}\"", result_name, query_id, rank, precision, recall); } print!("\n\n"); } /// Gets a mapping from query to list of retrieved /// documents in order of relevance. fn get_results(file_name: String) -> HashMap<usize, Vec<usize>> { let path = format!("{}/{}", env::current_dir().unwrap().display(), file_name); let results_f = File::open(path).expect("Results file not found"); let results_reader = BufReader::new(results_f); let mut results: HashMap<usize, Vec<usize>> = HashMap::new(); for m_line in results_reader.lines() { if let Ok(line) = m_line { let parts: Vec<String> = line.split_whitespace().map(|s| String::from(s)).collect(); let query_id = parts.get(0).expect("Relevance line is not formatted correctly").parse::<usize>().unwrap(); let doc_id = parts.get(2).expect("Relevance line is not formatted correctly").parse::<usize>().unwrap(); let docs = results.entry(query_id).or_insert(Vec::new()); docs.push(doc_id.clone()); } } results } /// Gets a mapping from query to set of relevant documents fn get_relevance_info() -> HashMap<usize, HashSet<usize>> { let rel_file = File::open("../cacm.rel.txt").expect("../cacm.rel.txt not found"); let rel_reader = BufReader::new(rel_file); let mut relevant_docs: HashMap<usize, HashSet<usize>> = HashMap::new(); for m_line in rel_reader.lines() { if let Ok(line) = m_line { let parts: Vec<String> = line.split_whitespace().map(|s| String::from(s)).collect(); let query_id = parts.get(0).expect("Relevance line is not formatted correctly").parse::<usize>().unwrap(); let doc_id = parts.get(2).expect("Relevance line is not formatted correctly")[5..].parse::<usize>().unwrap(); let docs = relevant_docs.entry(query_id).or_insert(HashSet::new()); docs.insert(doc_id.clone()); } } relevant_docs }
// // Copyright 2021 The Project Oak Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // use crate::logger::Logger; use anyhow::{anyhow, Context}; use hyper::{body::Bytes, client::connect::Connect, Body, Client, Request}; use hyper_rustls::HttpsConnector; use log::Level; use prost::Message; use serde_derive::Deserialize; use std::{collections::HashMap, sync::RwLock, time::Instant}; #[derive(Copy, Clone, Deserialize, Debug)] pub enum LookupDataAuth { None, GcpMetadataToken, } impl LookupDataAuth { pub fn default() -> Self { LookupDataAuth::None } } #[derive(Clone, Debug)] pub enum LookupDataSource { Http { url: String, auth: LookupDataAuth }, File(std::path::PathBuf), } /// An in-memory lookup store instance that can refresh its internal entries from the provided data /// file URL. /// /// Entries in the data file path must be consecutive binary encoded and length delimited /// protobuf messages according to the definition in `/oak_functions/proto/lookup_data.proto`. pub struct LookupData { lookup_data_source: Option<LookupDataSource>, entries: RwLock<HashMap<Vec<u8>, Vec<u8>>>, logger: Logger, } impl LookupData { /// Creates a new empty [`LookupData`] instance that can refresh its internal entries from the /// provided data file URL. /// /// The returned instance is empty, and must be populated by calling the [`LookupData::refresh`] /// method at least once. pub fn new_empty(lookup_data_source: Option<LookupDataSource>, logger: Logger) -> LookupData { LookupData { lookup_data_source, entries: RwLock::new(HashMap::new()), logger, } } /// Refreshes the internal entries of this struct from the data file URL provided at /// construction time. /// /// If the `lookup_data_auth` config setting is set to `GcpMetadataToken` a service account /// access token token will be downloaded from the GCP metadata service first and then used to /// authenticate the lookup data download request. /// /// If successful, entries are completely replaced (i.e. not merged). /// /// If there is any error while reading or parsing the data, an error is returned by this /// method, and existing entries are left untouched. The caller may retry the refresh operation /// at a future time. pub async fn refresh(&self) -> anyhow::Result<()> { match &self.lookup_data_source { Some(lookup_data_source) => { let start = Instant::now(); let lookup_data_buf = fetch_lookup_data(&self.logger, lookup_data_source).await?; self.logger.log_public( Level::Info, &format!( "fetched {} bytes of lookup data in {:.0?}", lookup_data_buf.len(), start.elapsed() ), ); let start = Instant::now(); let entries = parse_lookup_entries(&mut lookup_data_buf.as_ref()) .context("could not parse lookup data")?; self.logger.log_public( Level::Info, &format!( "parsed {} entries of lookup data in {:.0?}", entries.len(), start.elapsed() ), ); // This block is here to emphasize and ensure that the write lock is only held for a // very short time. let start = Instant::now(); { *self .entries .write() .expect("could not lock entries for write") = entries; } self.logger.log_public( Level::Debug, &format!( "lookup data write lock acquisition time: {:.0?}", start.elapsed() ), ); Ok(()) } None => Ok(()), } } /// Creates an instance of LookupData populated with the given entries. #[allow(dead_code)] pub fn for_test(entries: HashMap<Vec<u8>, Vec<u8>>) -> Self { LookupData { lookup_data_source: None, entries: RwLock::new(entries), logger: Logger::for_test(), } } /// Convenience getter for an individual entry that reduces lock contention by cloning the /// resulting value as quickly as possible and returning it instead of a reference. pub fn get(&self, key: &[u8]) -> Option<Vec<u8>> { self.entries .read() .expect("could not lock entries for read") .get(key) .cloned() } #[allow(dead_code)] pub fn len(&self) -> usize { self.entries .read() .expect("could not lock entries for read") .len() } #[allow(dead_code)] pub fn is_empty(&self) -> bool { self.entries .read() .expect("Could not lock entries for read") .is_empty() } } async fn fetch_lookup_data( logger: &Logger, lookup_data_source: &LookupDataSource, ) -> anyhow::Result<Bytes> { match lookup_data_source { LookupDataSource::Http { url, auth } => { logger.log_public( Level::Info, &format!( "refreshing lookup data from HTTP: {} with auth {:?}", url, auth ), ); // TODO(#1930): Avoid loading the entire file in memory for parsing. let https = HttpsConnector::with_native_roots(); let client = Client::builder().build::<_, Body>(https); send_request(&client, build_download_request(url, auth).await?).await } LookupDataSource::File(file_path) => { logger.log_public( Level::Info, &format!("refreshing lookup data from file path: {:?}", file_path), ); Ok(tokio::fs::read(&file_path).await?.into()) } } } pub fn parse_lookup_entries<B: prost::bytes::Buf>( lookup_data_buffer: B, ) -> anyhow::Result<HashMap<Vec<u8>, Vec<u8>>> { let mut lookup_data_buffer = lookup_data_buffer; let mut entries = HashMap::new(); while lookup_data_buffer.has_remaining() { let entry = oak_functions_abi::proto::Entry::decode_length_delimited(&mut lookup_data_buffer) .context("could not decode entry")?; entries.insert(entry.key, entry.value); } Ok(entries) } async fn build_download_request(url: &str, auth: &LookupDataAuth) -> anyhow::Result<Request<Body>> { let builder = match auth { LookupDataAuth::None => Request::builder().method(http::Method::GET).uri(url), LookupDataAuth::GcpMetadataToken => { let access_token = get_access_token() .await .context("could not get access token")?; Request::builder() .method(http::Method::GET) .uri(url) .header("Authorization", format!("Bearer {}", access_token)) } }; builder .body(Body::empty()) .context("could not create lookup data request") } async fn send_request<C>(client: &Client<C, Body>, request: Request<Body>) -> anyhow::Result<Bytes> where C: Connect + Clone + Send + Sync + 'static, { let response = client .request(request) .await .context("could not execute request")?; hyper::body::to_bytes(response.into_body()) .await .context("could not read response body") } /// Gets a service account access token from the GCP metadata service. async fn get_access_token() -> anyhow::Result<String> { let client = Client::new(); let request = Request::builder() .method(http::Method::GET) // See https://cloud.google.com/run/docs/securing/service-identity#access_tokens for details. .uri("http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/token") .header("Metadata-Flavor", "Google") .body(Body::empty()) .context("could not create auth token request")?; let result = send_request(&client, request).await?; let token_json = std::str::from_utf8(result.as_ref()).context("could not decode response as a string")?; let token: serde_json::Value = serde_json::from_str(token_json).context("could not decode response as JSON")?; token["access_token"] .as_str() .ok_or_else(|| anyhow!("access token not found")) .map(String::from) }
use std::borrow::Cow; use std::collections::HashSet; use std::sync::Arc; use itertools::Itertools; use strum::EnumCount; use command_data_derive::CommandData; use discorsd::{async_trait, BotState}; use discorsd::commands::*; use discorsd::errors::BotError; use crate::avalon::characters::Character; use crate::avalon::characters::Character::{LoyalServant, MinionOfMordred}; use crate::Bot; #[derive(Clone, Debug)] pub struct RolesCommand(pub Vec<Character>); #[async_trait] impl SlashCommand for RolesCommand { type Bot = Bot; type Data = RoleData; type Use = Deferred; const NAME: &'static str = "roles"; fn description(&self) -> Cow<'static, str> { "Pick which roles will be available in the next game of Avalon.".into() } async fn run(&self, state: Arc<BotState<Bot>>, interaction: InteractionUse<AppCommandData, Unused>, data: RoleData, ) -> Result<InteractionUse<AppCommandData, Self::Use>, BotError> { let interaction = interaction.defer(&state).await?; let guild = interaction.guild().unwrap(); let mut guard = state.bot.avalon_games.write().await; let game = guard.get_mut(&guild).unwrap(); let config = game.config_mut(); let roles = &mut config.roles; let changed = match data { RoleData::Add(add) => { let new = add.into_iter() .filter(|c| !roles.contains(c)) .collect_vec(); let added = !new.is_empty(); roles.extend(&new); added } RoleData::Remove(rem) => { let mut removed = false; roles.retain(|char| { let retain = !rem.contains(char); if !retain { removed = true } retain }); removed } RoleData::Clear => { roles.clear(); true } }; if changed { let guard = state.slash_commands.read().await; let mut commands = guard.get(&guild).unwrap().write().await; let roles_cmd = commands.get_mut(&interaction.data.command) .unwrap() .downcast_mut::<Self>() .unwrap(); roles_cmd.0 = roles.clone(); roles_cmd.edit_command(&state, guild, interaction.data.command).await?; config.start_command(&state, commands, config.startable(), guild).await?; } config.update_embed(&state, &interaction).await?; Ok(interaction) } } #[derive(CommandData)] #[command(command = "RolesCommand")] pub enum RoleData { #[command(desc = "Choose roles to add", enable_if = "add_roles")] Add( #[command(va_ordinals, va_count = "add_count", va_req = 1, retain = "add_choice")] HashSet<Character> ), #[command(desc = "Choose roles to remove", enable_if = "remove_roles")] Remove( #[command(va_ordinals, va_count = "remove_count", va_req = 1, retain = "remove_choice")] HashSet<Character> ), #[command(desc = "Clear all roles", enable_if = "remove_roles")] Clear, } fn add_count(command: &RolesCommand) -> usize { Character::COUNT - 2 - command.0.len() } fn add_choice(command: &RolesCommand, choice: Character) -> bool { choice != LoyalServant && choice != MinionOfMordred && !command.0.iter().any(|&c| choice == c) } fn remove_count(command: &RolesCommand) -> usize { command.0.len() } fn remove_choice(command: &RolesCommand, choice: Character) -> bool { command.0.iter().any(|&c| choice == c) } fn add_roles(command: &RolesCommand) -> bool { command.0.len() < Character::COUNT - 2 } fn remove_roles(command: &RolesCommand) -> bool { !command.0.is_empty() }
use crate::{borrow_streamer::BorrowStreamer, Advice, Array, Metadata, ReadAt, WriteAt}; #[cfg(feature = "io-streams")] use io_streams::StreamReader; use std::{ cmp::min, convert::TryInto, io::{self, IoSlice, IoSliceMut, Read}, }; impl Array for [u8] { #[inline] fn metadata(&self) -> io::Result<Metadata> { Ok(Metadata { len: self.len().try_into().unwrap(), #[cfg(not(target_os = "wasi"))] blksize: page_size::get().try_into().unwrap(), // Hard-code the size here pending // <https://github.com/Elzair/page_size_rs/pull/3> #[cfg(target_os = "wasi")] blksize: 65536, }) } #[inline] fn advise(&self, _offset: u64, _len: u64, _advice: Advice) -> io::Result<()> { Ok(()) } } impl ReadAt for [u8] { #[inline] fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result<usize> { self.read_exact_at(buf, offset)?; Ok(buf.len()) } fn read_exact_at(&self, buf: &mut [u8], offset: u64) -> io::Result<()> { let offset = offset .try_into() .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?; let at: &[u8] = self.get(offset..).unwrap_or(&[]); let len = min(at.len(), buf.len()); buf[..len].copy_from_slice(&at[..len]); Ok(()) } fn read_vectored_at(&self, bufs: &mut [IoSliceMut], offset: u64) -> io::Result<usize> { let initial_offset = offset .try_into() .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?; let mut running_offset = initial_offset; for buf in bufs { let at = self.get(running_offset..).unwrap_or(&[]); let len = min(at.len(), buf.len()); buf.copy_from_slice(&at[..len]); running_offset += len; } Ok(running_offset - initial_offset) } fn read_exact_vectored_at(&self, bufs: &mut [IoSliceMut], offset: u64) -> io::Result<()> { let mut running_offset = offset .try_into() .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?; for buf in bufs { let at = self.get(running_offset..).unwrap_or(&[]); if at.len() < buf.len() { return Err(io::Error::new( io::ErrorKind::UnexpectedEof, "failed to fill whole buffer", )); } let len = buf.len(); buf.copy_from_slice(&at[..len]); running_offset += len; } Ok(()) } #[inline] fn is_read_vectored_at(&self) -> bool { true } #[cfg(feature = "io-streams")] fn read_via_stream_at(&self, _offset: u64) -> io::Result<StreamReader> { todo!("slice::read_via_stream_at") } } impl WriteAt for [u8] { #[inline] fn write_at(&mut self, buf: &[u8], offset: u64) -> io::Result<usize> { self.write_all_at(buf, offset)?; Ok(buf.len()) } fn write_all_at(&mut self, buf: &[u8], offset: u64) -> io::Result<()> { let offset = offset .try_into() .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?; let at = self.get_mut(offset..).unwrap_or(&mut []); let len = min(at.len(), buf.len()); at[..len].copy_from_slice(&buf[..len]); Ok(()) } fn write_vectored_at(&mut self, bufs: &[IoSlice], offset: u64) -> io::Result<usize> { let initial_offset = offset .try_into() .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?; let mut running_offset = initial_offset; for buf in bufs { let at = self.get_mut(running_offset..).unwrap_or(&mut []); let len = min(at.len(), buf.len()); at[..len].copy_from_slice(buf); running_offset += len; } Ok(running_offset - initial_offset) } fn write_all_vectored_at(&mut self, bufs: &mut [IoSlice], offset: u64) -> io::Result<()> { let mut running_offset = offset .try_into() .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?; for buf in bufs { let at = self.get_mut(running_offset..).unwrap_or(&mut []); if at.len() < buf.len() { return Err(io::Error::new( io::ErrorKind::UnexpectedEof, "failed to fill whole buffer", )); } let len = buf.len(); at[..len].copy_from_slice(buf); running_offset += len; } Ok(()) } #[inline] fn is_write_vectored_at(&self) -> bool { true } #[inline] fn copy_from<R: ReadAt>( &mut self, _offset: u64, input: &R, input_offset: u64, len: u64, ) -> io::Result<u64> { let _todo = BorrowStreamer::new(input, input_offset).take(len); todo!("slice::copy_from") } #[inline] fn set_len(&mut self, _len: u64) -> io::Result<()> { Err(io::Error::new( io::ErrorKind::PermissionDenied, "cannot set_len on a slice", )) } } impl Array for Vec<u8> { #[inline] fn metadata(&self) -> io::Result<Metadata> { self.as_slice().metadata() } #[inline] fn advise(&self, offset: u64, len: u64, advice: Advice) -> io::Result<()> { self.as_slice().advise(offset, len, advice) } } impl ReadAt for Vec<u8> { #[inline] fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result<usize> { self.as_slice().read_at(buf, offset) } #[inline] fn read_exact_at(&self, buf: &mut [u8], offset: u64) -> io::Result<()> { self.as_slice().read_exact_at(buf, offset) } #[inline] fn read_vectored_at(&self, bufs: &mut [IoSliceMut], offset: u64) -> io::Result<usize> { self.as_slice().read_vectored_at(bufs, offset) } #[inline] fn read_exact_vectored_at(&self, bufs: &mut [IoSliceMut], offset: u64) -> io::Result<()> { self.as_slice().read_exact_vectored_at(bufs, offset) } #[inline] fn is_read_vectored_at(&self) -> bool { self.as_slice().is_read_vectored_at() } #[cfg(feature = "io-streams")] fn read_via_stream_at(&self, _offset: u64) -> io::Result<StreamReader> { todo!("slice::read_via_stream_at") } } impl WriteAt for Vec<u8> { #[inline] fn write_at(&mut self, buf: &[u8], offset: u64) -> io::Result<usize> { self.as_mut_slice().write_at(buf, offset) } #[inline] fn write_all_at(&mut self, buf: &[u8], offset: u64) -> io::Result<()> { self.as_mut_slice().write_all_at(buf, offset) } #[inline] fn write_vectored_at(&mut self, bufs: &[IoSlice], offset: u64) -> io::Result<usize> { self.as_mut_slice().write_vectored_at(bufs, offset) } #[inline] fn write_all_vectored_at(&mut self, bufs: &mut [IoSlice], offset: u64) -> io::Result<()> { self.as_mut_slice().write_all_vectored_at(bufs, offset) } #[inline] fn is_write_vectored_at(&self) -> bool { self.as_slice().is_write_vectored_at() } #[inline] fn copy_from<R: ReadAt>( &mut self, offset: u64, input: &R, input_offset: u64, len: u64, ) -> io::Result<u64> { self.as_mut_slice() .copy_from(offset, input, input_offset, len) } #[inline] fn set_len(&mut self, len: u64) -> io::Result<()> { self.resize( len.try_into() .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?, 0, ); Ok(()) } }
//! Provides structures used to load audio files. use std::sync::Arc; use super::AudioContext; use assets::*; /// A loaded audio file #[derive(Clone)] pub struct Source { pub(crate) pointer: AssetPtr<Arc<Vec<u8>>, Source>, } impl AsRef<Arc<Vec<u8>>> for Source { fn as_ref(&self) -> &Arc<Vec<u8>> { self.pointer.inner_ref() } } impl AsRef<[u8]> for Source { fn as_ref(&self) -> &[u8] { &*self.pointer.inner_ref() } } impl Asset for Source { type Context = AudioContext; }
#![warn(clippy::pedantic)] use libcnb_test::{assert_contains, assert_not_contains}; use test_support::Builder::{Heroku20, Heroku22}; use test_support::{ assert_health_check_responds, get_function_invoker_build_config, test_node_function, }; #[test] #[ignore] fn simple_javascript_function_heroku_20() { test_node_function("simple-function", Heroku20, |ctx| { assert_health_check_responds(&ctx); }); } #[test] #[ignore] fn simple_javascript_function_heroku_22() { test_node_function("simple-function", Heroku22, |ctx| { assert_health_check_responds(&ctx); }); } #[test] #[ignore] fn simple_typescript_function_heroku_20() { test_node_function("simple-typescript-function", Heroku20, |ctx| { assert_health_check_responds(&ctx); }); } #[test] #[ignore] fn simple_typescript_function_heroku_22() { test_node_function("simple-typescript-function", Heroku22, |ctx| { assert_health_check_responds(&ctx); }); } #[test] #[ignore] fn upgrade_simple_nodejs_function_from_heroku20_to_heroku22() { test_node_function("simple-function", Heroku20, |ctx| { assert_contains!( ctx.pack_stdout, "Installing Node.js Function Invoker Runtime" ); assert_health_check_responds(&ctx); ctx.rebuild( get_function_invoker_build_config("simple-function", Heroku22), |new_ctx| { assert_contains!( new_ctx.pack_stdout, "Installing Node.js Function Invoker Runtime" ); assert_health_check_responds(&new_ctx); }, ); }); } #[test] #[ignore] fn test_function_with_explicit_runtime_dependency_js_heroku_20() { test_node_function( "functions/with-explicit-runtime-dependency-js", Heroku20, |ctx| { assert_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_not_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_health_check_responds(&ctx); }, ); } #[test] #[ignore] fn test_function_with_explicit_runtime_dependency_js_heroku_22() { test_node_function( "functions/with-explicit-runtime-dependency-js", Heroku22, |ctx| { assert_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_not_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_health_check_responds(&ctx); }, ); } #[test] #[ignore] fn test_function_with_explicit_runtime_dependency_ts_heroku_20() { test_node_function( "functions/with-explicit-runtime-dependency-ts", Heroku20, |ctx| { assert_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_not_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_health_check_responds(&ctx); }, ); } #[test] #[ignore] fn test_function_with_explicit_runtime_dependency_ts_heroku_22() { test_node_function( "functions/with-explicit-runtime-dependency-ts", Heroku22, |ctx| { assert_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_not_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_health_check_responds(&ctx); }, ); } #[test] #[ignore] fn test_function_with_implicit_runtime_dependency_js_heroku_20() { test_node_function( "functions/with-implicit-runtime-dependency-js", Heroku20, |ctx| { assert_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_not_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_health_check_responds(&ctx); }, ); } #[test] #[ignore] fn test_function_with_implicit_runtime_dependency_js_heroku_22() { test_node_function( "functions/with-implicit-runtime-dependency-js", Heroku22, |ctx| { assert_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_not_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_health_check_responds(&ctx); }, ); } #[test] #[ignore] fn test_function_with_implicit_runtime_dependency_ts_heroku_20() { test_node_function( "functions/with-implicit-runtime-dependency-ts", Heroku20, |ctx| { assert_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_not_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_health_check_responds(&ctx); }, ); } #[test] #[ignore] fn test_function_with_implicit_runtime_dependency_ts_heroku_22() { test_node_function( "functions/with-implicit-runtime-dependency-ts", Heroku22, |ctx| { assert_contains!(ctx.pack_stderr, "Future versions of the Functions Runtime for Node.js (@heroku/sf-fx-runtime-nodejs) will not be auto-detected and must be added as a dependency in package.json"); assert_not_contains!( ctx.pack_stdout, "Node.js function runtime declared in package.json" ); assert_health_check_responds(&ctx); }, ); }
use std::net::{SocketAddr, Ipv4Addr, IpAddr}; fn main() -> Result<(), Box<dyn std::error::Error>> { let proxy_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 25566); let server_address = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 25565); proxy::start(proxy_address, server_address) }
pub mod talk { pub mod cat; pub mod dog; } #[test] #[should_panic] #[ignore] fn test_hello() { assert_eq!("cat hello--", talk::cat::hello()); }
extern crate mynumber; use mynumber::corporate; #[test] fn verify_with_shorter_corporate_number_returns_error() { let number = "12345"; assert!(corporate::verify(number).is_err()); } #[test] fn verify_with_longer_corporate_number_returns_error() { let number = "12345678901234567890"; assert!(corporate::verify(number).is_err()); } #[test] fn verify_with_invalid_corporate_number_type_returns_error() { let number = "ABCDEFGHIJKLM"; assert!(corporate::verify(number).is_err()); } #[test] fn verify_with_valid_corporate_number_returns_ok() { let number = "9234567890123"; assert!(corporate::verify(number).is_ok()); } #[test] fn verify_with_wrong_check_digit_returns_err() { let number = "1234567890123"; assert!(corporate::verify(number).is_err()); } #[test] fn verify_with_empty_number_returns_error() { let number = ""; assert!(corporate::verify(number).is_err()); }
use crate::responses::*; use crate::QueueServiceProperties; use azure_core::headers::add_optional_header; use azure_core::prelude::*; use azure_storage::core::clients::StorageClient; use std::convert::TryInto; #[derive(Debug, Clone)] pub struct SetQueueServicePropertiesBuilder<'a> { storage_client: &'a StorageClient, timeout: Option<Timeout>, client_request_id: Option<ClientRequestId<'a>>, } impl<'a> SetQueueServicePropertiesBuilder<'a> { pub(crate) fn new(storage_client: &'a StorageClient) -> Self { SetQueueServicePropertiesBuilder { storage_client, timeout: None, client_request_id: None, } } setters! { timeout: Timeout => Some(timeout), client_request_id: ClientRequestId<'a> => Some(client_request_id), } /// Pass the properties here. /// More info here /// [https://docs.microsoft.com/rest/api/storageservices/set-queue-service-properties](https://docs.microsoft.com/rest/api/storageservices/set-queue-service-properties). pub async fn execute( &self, queue_service_properties: &QueueServiceProperties, ) -> Result<SetQueueServicePropertiesResponse, Box<dyn std::error::Error + Sync + Send>> { let mut url = self .storage_client .storage_account_client() .queue_storage_url() .to_owned(); url.query_pairs_mut().append_pair("restype", "service"); url.query_pairs_mut().append_pair("comp", "properties"); self.timeout.append_to_url_query(&mut url); let xml_body = serde_xml_rs::to_string(&queue_service_properties)?; debug!("xml about to be sent == {}", xml_body); let request = self.storage_client.prepare_request( url.as_str(), &http::method::Method::PUT, &|mut request| { request = add_optional_header(&self.client_request_id, request); request }, Some(xml_body.into()), )?; let response = self .storage_client .storage_account_client() .http_client() .execute_request_check_status(request.0, http::status::StatusCode::ACCEPTED) .await?; Ok((&response).try_into()?) } }
use maud::Markup; fn layout(content: Markup) -> String { let template = html! { head { link rel="stylesheet" type="text/css" href="/static/main.css" / meta name="viewport" content="width=device-width, initial-scale=1" / } body { (content) } }; template.into_string() } pub fn index(presses: i32, user_presses: i32) -> String { layout(html! { p { "The button was pressed " (presses) " times" } p { "You pressed " (user_presses) " times" } form method="post" action="/press" { button type="submit" "Press" } }) }
use super::*; use jsonwebtoken::DecodingKey; pub trait ToDecodingKey { fn to_decoding_key(&'_ self) -> DecodingKey<'_>; } impl ToDecodingKey for RSAPublicKey { fn to_decoding_key(&'_ self) -> DecodingKey<'_> { DecodingKey::from_rsa_components(&self.n.base64, &self.e.base64) } }
//! JSONRPC types use failure::{format_err, Error}; use serde::{ de::{DeserializeOwned, Error as DeError}, Deserialize, Deserializer, Serialize, Serializer, }; use std::fmt::{self, Display}; use tendermint::Address; /// JSONRPC requests pub trait Request { /// Response type for this command type Response: Response; /// Perform this request against the given RPC endpoint fn perform(&self, rpc_addr: &Address) -> Result<Self::Response, Error> { let (host, port) = match rpc_addr { Address::Tcp { host, port, .. } => (host, *port), Address::Unix { .. } => panic!("UNIX sockets presently unsupported"), }; // TODO(tarcieri): persistent clients let http = gaunt::Connection::new(host, port, &Default::default()) .map_err(|e| format_err!("error connecting to RPC service: {}", e))?; let response = http .get(self.path(), &self.body()) .map_err(|e| format_err!("RPC HTTP error: {}", e))? .into_vec(); Self::Response::from_json(&String::from_utf8(response)?) } /// Path for this request fn path(&self) -> gaunt::Path; /// HTTP request body for this request fn body(&self) -> gaunt::request::Body { gaunt::request::Body::from(b"".as_ref()) } } /// JSONRPC responses pub trait Response: Serialize + DeserializeOwned + Sized { /// Parse a JSONRPC response from a JSON string fn from_json(response: &str) -> Result<Self, Error> { let wrapper: ResponseWrapper<Self> = serde_json::from_str(response).map_err(|e| format_err!("error parsing JSON: {}", e))?; // TODO(tarcieri): check JSONRPC version/ID? Ok(wrapper.result) } } /// Wrapper for all JSONRPC responses #[derive(Debug, Deserialize, Serialize)] pub struct ResponseWrapper<R> { /// JSONRPC version pub jsonrpc: Version, /// ID pub id: Id, /// Result pub result: R, } /// JSONRPC version #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] pub struct Version(String); impl Display for Version { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.0) } } /// JSONRPC ID #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] pub struct Id(String); impl AsRef<str> for Id { fn as_ref(&self) -> &str { self.0.as_ref() } } /// Serialize a u64 value as a JSON string #[allow(clippy::trivially_copy_pass_by_ref)] pub(crate) fn serialize_u64_string<S>(value: &u64, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { format!("{}", value).serialize(serializer) } /// Deserialize a `u64` from a string containing a nanosecond count pub(crate) fn deserialize_u64_string<'de, D>(deserializer: D) -> Result<u64, D::Error> where D: Deserializer<'de>, { String::deserialize(deserializer)? .parse::<u64>() .map_err(|e| D::Error::custom(format!("{}", e))) }
// Reference: https://leetcode.com/problems/single-number-iii/discuss/750622/Python-4-Lines-O(n)-time-O(1)-space-explained pub fn single_number(nums: Vec<i32>) -> Vec<i32> { let sum = nums.iter().fold(0, |acc, x| acc ^ *x); let nz = (sum & (sum - 1)) ^ sum; let num1 = nums.iter().filter(|x| *x & nz != 0).fold(0, |acc, x| acc ^ *x); vec![num1, sum ^ num1] }
use rand::Rng; fn main() -> Result<(), Box<dyn std::error::Error>> { let pattern = std::env::args().nth(1).expect("give me a regex pattern"); let n = std::env::args() .nth(2) .and_then(|arg| arg.parse().ok()) .unwrap_or(1); let pattern = rand_regex::Regex::compile(&pattern, 1)?; for result in rand::thread_rng().sample_iter::<String, _>(pattern).take(n) { println!("{}", result); } Ok(()) }
use super::super::atom::{ btn::{self, Btn}, common::Common, dropdown::{self, Dropdown}, fa, slider::{self, Slider}, text::Text, }; use super::super::organism::{ popup_color_pallet::{self, PopupColorPallet}, room_modeless::RoomModeless, }; use super::ShowingModal; use crate::arena::{block, BlockMut}; use isaribi::{ style, styled::{Style, Styled}, }; use kagura::prelude::*; use nusa::prelude::*; pub struct Props { pub boxblock: block::boxblock::Block, } pub enum Msg { NoOp, Sub(On), } pub enum On { OpenModal(ShowingModal), SetName(String), SetDisplayName0(String), SetDisplayName1(String), SetColor(crate::libs::color::Pallet), SetShape(block::boxblock::Shape), SetSize([f64; 3]), } pub struct Tab0 { boxblock: block::boxblock::Block, element_id: ElementId, } ElementId! { input_boxblock_name, input_boxblock_display_name } impl Component for Tab0 { type Props = Props; type Msg = Msg; type Event = On; } impl HtmlComponent for Tab0 {} impl Constructor for Tab0 { fn constructor(props: Props) -> Self { Self { boxblock: props.boxblock, element_id: ElementId::new(), } } } impl Update for Tab0 { fn on_load(mut self: Pin<&mut Self>, props: Self::Props) -> Cmd<Self> { self.boxblock = props.boxblock; Cmd::none() } fn update(self: Pin<&mut Self>, msg: Self::Msg) -> Cmd<Self> { match msg { Msg::NoOp => Cmd::none(), Msg::Sub(event) => Cmd::submit(event), } } } impl Render<Html> for Tab0 { type Children = (); fn render(&self, _: Self::Children) -> Html { Self::styled(Html::div( Attributes::new() .class(RoomModeless::class("common-base")) .class("pure-form"), Events::new(), vec![ self.boxblock .map(|data| self.render_header(data)) .unwrap_or(Common::none()), self.boxblock .map(|data| self.render_main(data)) .unwrap_or(Common::none()), ], )) } } impl Tab0 { fn render_header(&self, boxblock: &block::Boxblock) -> Html { Html::div( Attributes::new().class(RoomModeless::class("common-header")), Events::new(), vec![ Html::label( Attributes::new() .class(RoomModeless::class("common-label")) .string("for", &self.element_id.input_boxblock_name), Events::new(), vec![fa::fas_i("fa-cube")], ), Html::input( Attributes::new() .id(&self.element_id.input_boxblock_name) .value(boxblock.name()), Events::new().on_input(self, |name| Msg::Sub(On::SetName(name))), vec![], ), Html::label( Attributes::new() .class(RoomModeless::class("common-label")) .string("for", &self.element_id.input_boxblock_display_name), Events::new(), vec![Html::text("表示名")], ), Html::input( Attributes::new().value(&boxblock.display_name().1), Events::new().on_input(self, |dn1| Msg::Sub(On::SetDisplayName1(dn1))), vec![], ), Text::span(""), Html::input( Attributes::new() .id(&self.element_id.input_boxblock_display_name) .value(&boxblock.display_name().0), Events::new().on_input(self, |dn0| Msg::Sub(On::SetDisplayName0(dn0))), vec![], ), ], ) } fn render_main(&self, boxblock: &block::Boxblock) -> Html { Html::div( Attributes::new().class(Self::class("main")), Events::new(), vec![ Html::div( Attributes::new().class(Common::keyvalue()), Events::new(), vec![ Html::div( Attributes::new() .class(Common::banner()) .class(Self::class("dropdown")), Events::new(), vec![self.render_main_shape(boxblock)], ), self.render_main_x(boxblock), self.render_main_y(boxblock), self.render_main_z(boxblock), ], ), Html::div( Attributes::new().class(Common::keyvalue()), Events::new(), vec![ Text::span("色"), PopupColorPallet::empty( self, None, popup_color_pallet::Props { direction: popup_color_pallet::Direction::Bottom, default_selected: boxblock.color().clone(), }, Sub::map(|sub| match sub { popup_color_pallet::On::SelectColor(color) => { Msg::Sub(On::SetColor(color)) } }), ), Text::span("テクスチャ"), self.render_main_textures(boxblock), ], ), ], ) } fn render_main_shape(&self, boxblock: &block::Boxblock) -> Html { Dropdown::new( self, None, dropdown::Props { direction: dropdown::Direction::Bottom, toggle_type: dropdown::ToggleType::Click, variant: btn::Variant::DarkLikeMenu, }, Sub::none(), ( vec![match boxblock.shape() { block::boxblock::Shape::Cube => Html::text("立方体"), block::boxblock::Shape::Slope => Html::text("斜面"), block::boxblock::Shape::Sphere => Html::text("球体"), block::boxblock::Shape::Cylinder => Html::text("円柱"), }], vec![ Btn::menu( Attributes::new(), Events::new().on_click(self, |_| { Msg::Sub(On::SetShape(block::boxblock::Shape::Cube)) }), vec![Html::text("立方体")], ), Btn::menu( Attributes::new(), Events::new().on_click(self, |_| { Msg::Sub(On::SetShape(block::boxblock::Shape::Slope)) }), vec![Html::text("斜面")], ), Btn::menu( Attributes::new(), Events::new().on_click(self, |_| { Msg::Sub(On::SetShape(block::boxblock::Shape::Sphere)) }), vec![Html::text("球体")], ), Btn::menu( Attributes::new(), Events::new().on_click(self, |_| { Msg::Sub(On::SetShape(block::boxblock::Shape::Cylinder)) }), vec![Html::text("円柱")], ), ], ), ) } fn render_main_x(&self, boxblock: &block::Boxblock) -> Html { Html::fragment(vec![ Text::span("X幅"), Slider::new( self, None, slider::Position::Linear { min: 0.1, max: 10.0, val: boxblock.size()[0], step: 0.1, }, Sub::map({ let size = boxblock.size().clone(); move |sub| match sub { slider::On::Input(x) => { let mut size = size.clone(); size[0] = x; Msg::Sub(On::SetSize(size)) } _ => Msg::NoOp, } }), slider::Props { range_is_editable: false, theme: slider::Theme::Light, }, ), ]) } fn render_main_y(&self, boxblock: &block::Boxblock) -> Html { Html::fragment(vec![ Text::span("Y幅"), Slider::new( self, None, slider::Position::Linear { min: 0.1, max: 10.0, val: boxblock.size()[1], step: 0.1, }, Sub::map({ let size = boxblock.size().clone(); move |sub| match sub { slider::On::Input(y) => { let mut size = size.clone(); size[1] = y; Msg::Sub(On::SetSize(size)) } _ => Msg::NoOp, } }), slider::Props { range_is_editable: false, theme: slider::Theme::Light, }, ), ]) } fn render_main_z(&self, boxblock: &block::Boxblock) -> Html { Html::fragment(vec![ Text::span("Z幅"), Slider::new( self, None, slider::Position::Linear { min: 0.1, max: 10.0, val: boxblock.size()[2], step: 0.1, }, Sub::map({ let size = boxblock.size().clone(); move |sub| match sub { slider::On::Input(z) => { let mut size = size.clone(); size[2] = z; Msg::Sub(On::SetSize(size)) } _ => Msg::NoOp, } }), slider::Props { range_is_editable: false, theme: slider::Theme::Light, }, ), ]) } fn render_main_textures(&self, boxblock: &block::Boxblock) -> Html { boxblock .texture() .as_ref() .map(|texture| { texture.map(|texture| { Html::img( Attributes::new() .draggable("false") .src(texture.data().url().to_string()) .class(Common::bg_transparent()), Events::new().on_click(self, |_| { Msg::Sub(On::OpenModal(ShowingModal::SelectBlockTexture)) }), vec![], ) }) }) .unwrap_or(None) .unwrap_or_else(|| { Btn::secondary( Attributes::new(), Events::new().on_click(self, |_| { Msg::Sub(On::OpenModal(ShowingModal::SelectBlockTexture)) }), vec![Html::text("テクスチャを選択")], ) }) } } impl Styled for Tab0 { fn style() -> Style { style! { ".dropdown" { "overflow": "visible !important"; } ".main" { "display": "grid"; "grid-template-columns": "repeat(auto-fit, minmax(20rem, 1fr))"; "align-items": "start"; "padding-left": ".65rem"; "padding-right": ".65rem"; "column-gap": ".65rem"; "overflow-y": "scroll"; } } } }
extern crate rand; use std::collections::HashMap; use rand::prelude::*; #[derive(Debug)] enum Tree { Terminal(String), // atomic word NonTerminal(String, Vec<Tree>) // expandable term } // expand a term `k` according to the rules in `g` into a `Tree` fn expand_tree(g: &HashMap<&str,Vec<&str>>, k: &str) -> Tree { let name = k.to_string(); match g.get(k) { Some(rhs) => Tree::NonTerminal(name, thread_rng().choose(&rhs) .unwrap() .split_whitespace() .map(|word| expand_tree(g, word)) .collect()), None => Tree::Terminal(name) } } // expand a term `k` according to the rules in `g`, pushing atomic words // into `out` fn expand(g: &HashMap<&str,Vec<&str>>, k: &str, out: &mut Vec<String>) { match g.get(k) { Some(rhs) => thread_rng().choose(&rhs) .unwrap() .split_whitespace() .for_each(|word| expand(g, word, out)), None => out.push(k.to_string()) } } fn main() { let mut grammar = HashMap::new(); grammar.insert("S" , vec!["NP VP","S and S"]); grammar.insert("NP" , vec!["Art N","Name"]); grammar.insert("VP" , vec!["V NP"]); grammar.insert("Art" , vec!["the","a","every","some"]); grammar.insert("N" , vec!["man","ball","woman","table","dog","cat","wombat"]); grammar.insert("V" , vec!["hit","took","saw","liked","worshiped","remembered"]); grammar.insert("Name", vec!["Alice","Bob","Carlos","Dan","Eve"]); let mut output = Vec::new(); expand(&grammar, "S", &mut output); output.iter() .for_each(|word| print!("{} ", word)); println!(); println!(); println!("{:?}",expand_tree(&grammar, "S")); }