text
stringlengths
8
4.13M
// =============================================================================== // Authors: AFRL/RQQA // Organization: Air Force Research Laboratory, Aerospace Systems Directorate, Power and Control Division // // Copyright (c) 2017 Government of the United State of America, as represented by // the Secretary of the Air Force. No copyright is claimed in the United States under // Title 17, U.S. Code. All Other Rights Reserved. // =============================================================================== // This file was auto-created by LmcpGen. Modifications will be overwritten. use avtas::lmcp::{Error, ErrorType, Lmcp, LmcpSubscription, SrcLoc, Struct, StructInfo}; use std::fmt::Debug; #[derive(Clone, Copy, Debug, Default)] #[repr(C)] pub struct TaskTimingPair { pub vehicle_id: i64, pub initial_task_id: i64, pub initial_task_percentage: f32, pub destination_task_id: i64, pub time_to_go: i64, } impl PartialEq for TaskTimingPair { fn eq(&self, _other: &TaskTimingPair) -> bool { true && &self.vehicle_id == &_other.vehicle_id && &self.initial_task_id == &_other.initial_task_id && &self.initial_task_percentage == &_other.initial_task_percentage && &self.destination_task_id == &_other.destination_task_id && &self.time_to_go == &_other.time_to_go } } impl LmcpSubscription for TaskTimingPair { fn subscription() -> &'static str { "afrl.impact.TaskTimingPair" } } impl Struct for TaskTimingPair { fn struct_info() -> StructInfo { StructInfo { exist: 1, series: 5281966179208134656u64, version: 14, struct_ty: 11, } } } impl Lmcp for TaskTimingPair { fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> { let mut pos = 0; { let x = Self::struct_info().ser(buf)?; pos += x; } { let r = get!(buf.get_mut(pos ..)); let writeb: usize = self.vehicle_id.ser(r)?; pos += writeb; } { let r = get!(buf.get_mut(pos ..)); let writeb: usize = self.initial_task_id.ser(r)?; pos += writeb; } { let r = get!(buf.get_mut(pos ..)); let writeb: usize = self.initial_task_percentage.ser(r)?; pos += writeb; } { let r = get!(buf.get_mut(pos ..)); let writeb: usize = self.destination_task_id.ser(r)?; pos += writeb; } { let r = get!(buf.get_mut(pos ..)); let writeb: usize = self.time_to_go.ser(r)?; pos += writeb; } Ok(pos) } fn deser(buf: &[u8]) -> Result<(TaskTimingPair, usize), Error> { let mut pos = 0; let (si, u) = StructInfo::deser(buf)?; pos += u; if si == TaskTimingPair::struct_info() { let mut out: TaskTimingPair = Default::default(); { let r = get!(buf.get(pos ..)); let (x, readb): (i64, usize) = Lmcp::deser(r)?; out.vehicle_id = x; pos += readb; } { let r = get!(buf.get(pos ..)); let (x, readb): (i64, usize) = Lmcp::deser(r)?; out.initial_task_id = x; pos += readb; } { let r = get!(buf.get(pos ..)); let (x, readb): (f32, usize) = Lmcp::deser(r)?; out.initial_task_percentage = x; pos += readb; } { let r = get!(buf.get(pos ..)); let (x, readb): (i64, usize) = Lmcp::deser(r)?; out.destination_task_id = x; pos += readb; } { let r = get!(buf.get(pos ..)); let (x, readb): (i64, usize) = Lmcp::deser(r)?; out.time_to_go = x; pos += readb; } Ok((out, pos)) } else { Err(error!(ErrorType::InvalidStructInfo)) } } fn size(&self) -> usize { let mut size = 15; size += self.vehicle_id.size(); size += self.initial_task_id.size(); size += self.initial_task_percentage.size(); size += self.destination_task_id.size(); size += self.time_to_go.size(); size } } pub trait TaskTimingPairT: Debug + Send { fn as_afrl_impact_task_timing_pair(&self) -> Option<&TaskTimingPair> { None } fn as_mut_afrl_impact_task_timing_pair(&mut self) -> Option<&mut TaskTimingPair> { None } fn vehicle_id(&self) -> i64; fn vehicle_id_mut(&mut self) -> &mut i64; fn initial_task_id(&self) -> i64; fn initial_task_id_mut(&mut self) -> &mut i64; fn initial_task_percentage(&self) -> f32; fn initial_task_percentage_mut(&mut self) -> &mut f32; fn destination_task_id(&self) -> i64; fn destination_task_id_mut(&mut self) -> &mut i64; fn time_to_go(&self) -> i64; fn time_to_go_mut(&mut self) -> &mut i64; } impl Clone for Box<TaskTimingPairT> { fn clone(&self) -> Box<TaskTimingPairT> { if let Some(x) = TaskTimingPairT::as_afrl_impact_task_timing_pair(self.as_ref()) { Box::new(x.clone()) } else { unreachable!() } } } impl Default for Box<TaskTimingPairT> { fn default() -> Box<TaskTimingPairT> { Box::new(TaskTimingPair::default()) } } impl PartialEq for Box<TaskTimingPairT> { fn eq(&self, other: &Box<TaskTimingPairT>) -> bool { if let (Some(x), Some(y)) = (TaskTimingPairT::as_afrl_impact_task_timing_pair(self.as_ref()), TaskTimingPairT::as_afrl_impact_task_timing_pair(other.as_ref())) { x == y } else { false } } } impl Lmcp for Box<TaskTimingPairT> { fn ser(&self, buf: &mut[u8]) -> Result<usize, Error> { if let Some(x) = TaskTimingPairT::as_afrl_impact_task_timing_pair(self.as_ref()) { x.ser(buf) } else { unreachable!() } } fn deser(buf: &[u8]) -> Result<(Box<TaskTimingPairT>, usize), Error> { let (si, _) = StructInfo::deser(buf)?; if si == TaskTimingPair::struct_info() { let (x, readb) = TaskTimingPair::deser(buf)?; Ok((Box::new(x), readb)) } else { Err(error!(ErrorType::InvalidStructInfo)) } } fn size(&self) -> usize { if let Some(x) = TaskTimingPairT::as_afrl_impact_task_timing_pair(self.as_ref()) { x.size() } else { unreachable!() } } } impl TaskTimingPairT for TaskTimingPair { fn as_afrl_impact_task_timing_pair(&self) -> Option<&TaskTimingPair> { Some(self) } fn as_mut_afrl_impact_task_timing_pair(&mut self) -> Option<&mut TaskTimingPair> { Some(self) } fn vehicle_id(&self) -> i64 { self.vehicle_id } fn vehicle_id_mut(&mut self) -> &mut i64 { &mut self.vehicle_id } fn initial_task_id(&self) -> i64 { self.initial_task_id } fn initial_task_id_mut(&mut self) -> &mut i64 { &mut self.initial_task_id } fn initial_task_percentage(&self) -> f32 { self.initial_task_percentage } fn initial_task_percentage_mut(&mut self) -> &mut f32 { &mut self.initial_task_percentage } fn destination_task_id(&self) -> i64 { self.destination_task_id } fn destination_task_id_mut(&mut self) -> &mut i64 { &mut self.destination_task_id } fn time_to_go(&self) -> i64 { self.time_to_go } fn time_to_go_mut(&mut self) -> &mut i64 { &mut self.time_to_go } } #[cfg(test)] pub mod tests { use super::*; use quickcheck::*; impl Arbitrary for TaskTimingPair { fn arbitrary<G: Gen>(_g: &mut G) -> TaskTimingPair { TaskTimingPair { vehicle_id: Arbitrary::arbitrary(_g), initial_task_id: Arbitrary::arbitrary(_g), initial_task_percentage: Arbitrary::arbitrary(_g), destination_task_id: Arbitrary::arbitrary(_g), time_to_go: Arbitrary::arbitrary(_g), } } } quickcheck! { fn serializes(x: TaskTimingPair) -> Result<TestResult, Error> { let mut buf: Vec<u8> = vec![0; x.size()]; let sx = x.ser(&mut buf)?; Ok(TestResult::from_bool(sx == x.size())) } fn roundtrips(x: TaskTimingPair) -> Result<TestResult, Error> { let mut buf: Vec<u8> = vec![0; x.size()]; let sx = x.ser(&mut buf)?; let (y, sy) = TaskTimingPair::deser(&buf)?; Ok(TestResult::from_bool(sx == sy && x == y)) } } }
#![allow(missing_docs)] //! Factory methods for testing use crate::{ BooleanExpression, ComparisonFunction, DatabaseName, Expression, LogicalFunction, NnSqlValue, SqlValue, UnaryOperator, }; use rand::Rng; impl DatabaseName { /// randomly generate a database name pub fn random() -> Self { Self::new(random_id()).unwrap() } } impl Expression { pub fn factory_null() -> Self { Self::ConstantVariant(SqlValue::Null) } pub fn factory_integer(integer: i32) -> Self { Self::ConstantVariant(SqlValue::factory_integer(integer)) } pub fn factory_uni_op(unary_operator: UnaryOperator, expression: Expression) -> Self { Self::UnaryOperatorVariant(unary_operator, Box::new(expression)) } pub fn factory_eq(left: Expression, right: Expression) -> Self { Self::BooleanExpressionVariant(BooleanExpression::factory_eq(left, right)) } pub fn factory_and(left: BooleanExpression, right: BooleanExpression) -> Self { Self::BooleanExpressionVariant(BooleanExpression::LogicalFunctionVariant( LogicalFunction::AndVariant { left: Box::new(left), right: Box::new(right), }, )) } } impl BooleanExpression { pub fn factory_eq(left: Expression, right: Expression) -> Self { BooleanExpression::ComparisonFunctionVariant(ComparisonFunction::EqualVariant { left: Box::new(left), right: Box::new(right), }) } } impl SqlValue { pub fn factory_integer(integer: i32) -> Self { Self::NotNull(NnSqlValue::factory_integer(integer)) } pub fn factory_bool(bool_: bool) -> Self { Self::NotNull(NnSqlValue::factory_bool(bool_)) } } impl NnSqlValue { pub fn factory_integer(integer: i32) -> Self { Self::Integer(integer) } pub fn factory_bool(bool_: bool) -> Self { Self::Boolean(bool_) } } pub fn random_id() -> String { rand::thread_rng() .sample_iter(&rand::distributions::Alphanumeric) .map(char::from) .filter(|c| ('a'..='z').contains(c)) .take(10) .collect::<String>() }
type Block = u32; const BITS_PER_BLOCK : usize = 32; pub struct BitVec { blocks : Vec<Block>, size : usize } struct Index { block : usize, bit : usize } impl BitVec { pub fn new(n : usize) -> BitVec { let nblocks = divceil(n, BITS_PER_BLOCK); let blocks = vec![0 as Block; nblocks]; BitVec { blocks: blocks, size : n } } pub fn set(&mut self, n : usize, b : bool) { let i = self.index(n); let br = &mut self.blocks[i.block]; *br = setbit(*br, b, i.bit); } pub fn get(&self, n : usize) -> bool { let i = self.index(n); getbit(self.blocks[i.block], i.bit) } fn index(&self, n : usize) -> Index { if n >= self.size { panic!("Accessing index outside valid range"); } let block = n / BITS_PER_BLOCK; let bit = n % BITS_PER_BLOCK; Index { block: block, bit: bit } } } fn divceil(a : usize, b : usize) -> usize { if a % b == 0 { a / b } else { a / b + 1 } } fn setbit(op : Block, bit : bool, pos : usize) -> Block { if bit { op | (1 << pos) } else { op & !(1 << pos) } } fn getbit(op : Block, pos : usize) -> bool { op & (1 << pos) != 0 }
use crate::*; use rodio::{decoder::Decoder, source::Source, Sink}; use std::{io::Cursor, sync::Arc, time::Duration}; const QUICK_FADE_DURATION_SECONDS: f32 = 0.2; /// Handles playback of a [`Clip`] with support for pausing, resuming, volume adjustment. /// /// Instances can be built using a [`ClipPlayerBuilder`]. /// /// # Example /// /// ```no_run /// # use riddle_audio::*; doctest::simple(|audio_system| { /// let bytes = include_bytes!("../../example_assets/boop.wav"); /// let clip = Clip::load(&bytes[..], ClipFormat::Wav)?; /// /// // Play the clip /// let mut player = ClipPlayerBuilder::new(&audio_system).play(&clip)?; /// player.set_volume(0.5); /// # Ok(player) }); /// ``` pub struct ClipPlayer { audio: AudioSystem, clip: Clip, sink: Option<Arc<Sink>>, volume: f32, } impl ClipPlayer { pub(crate) fn new(audio: &AudioSystem, clip: &Clip, volume: f32) -> Self { Self { audio: audio.clone(), clip: clip.clone(), sink: None, volume, } } fn play(&mut self, mode: PlayMode, paused: bool) -> Result<()> { let sink: Arc<Sink> = Sink::try_new(&self.audio.internal.stream_handle) .map_err(|_| AudioError::Playback("Error making rodio Sink"))? .into(); if paused { sink.pause(); sink.set_volume(0.0); } else { sink.set_volume(self.volume); } let source = Decoder::new(Cursor::new(self.clip.data.clone())) .map_err(|_| AudioError::ClipDecode)?; match mode { PlayMode::OneShot => sink.append(source), PlayMode::Loop => sink.append(source.repeat_infinite()), } self.sink = Some(sink); Ok(()) } /// Fade the volume from the current volume to the targat volume over time. /// /// Once the volume has been changed the nominal volume will be immediately set to the new /// goal volume, as that is the volume that the player will be set to if it gets paused /// and resumed. /// /// The observed volume will change over time as the `AudioSubsystem` progresses the fade. /// /// If another volume fade is started while one is in progress the existing one is replaced /// by the new one, starting from whatever the current volume is. /// /// Since [`ClipPlayer::set_volume`], [`ClipPlayer::stop`], [`ClipPlayer::pause`] /// and [`ClipPlayer::resume`] calls also trigger a fade to avoid popping, /// calling any of those methods will also replace any current fade. /// /// # Example /// /// ```no_run /// # use riddle_audio::*; doctest::simple(|audio_system| { /// # let bytes = include_bytes!("../../example_assets/boop.wav"); /// # let clip = Clip::load(&bytes[..], ClipFormat::Wav)?; /// // The player starts with all volumes at 1.0 /// let mut player = ClipPlayerBuilder::new(&audio_system).play(&clip)?; /// assert_eq!(1.0, player.get_nominal_volume()); /// assert_eq!(1.0, player.get_observed_volume()); /// /// // The nominal volume changes immediately, the observed volume hasn't changed /// player.fade_volume(0.0, std::time::Duration::from_secs(1)); /// assert_eq!(0.0, player.get_nominal_volume()); /// assert_eq!(1.0, player.get_observed_volume()); /// /// // A few seconds later /// # doctest::pump_for_secs(audio_system, 2); /// // The fade has completed and the nominal and observed volumes agree again /// assert_eq!(0.0, player.get_nominal_volume()); /// assert_eq!(0.0, player.get_observed_volume()); /// # Ok(player) }); /// ``` pub fn fade_volume(&mut self, volume: f32, duration: Duration) { self.volume = volume; self.fade_volume_with_type(self.volume, duration, FadeType::AlterVolume); } /// Set the volume of playback immediately. /// /// This performs a very quick fade to the destination volume, to avoid popping /// audio artefacts. /// /// See the example in [`ClipPlayer::fade_volume`] for more details of how volume /// changes over time. pub fn set_volume(&mut self, volume: f32) { self.fade_volume(volume, Duration::from_secs_f32(QUICK_FADE_DURATION_SECONDS)) } /// Get the nominal volume of the player, which may not match the volume the player is currently /// playing at this second. /// /// This is the volume last set via [`ClipPlayer::set_volume`] or [`ClipPlayer::fade_volume`]. /// This is the volume the player will be at if it is paused and resumed. /// /// See the example in [`ClipPlayer::fade_volume`] for more details of how volume /// changes over time. pub fn get_nominal_volume(&self) -> f32 { self.volume } /// Get the observed volume of the player, which is always equal to exactly the volume of playback. /// /// This is the volume of playback at this moment in time, which could be either equal to the /// nominal volume, or another volume if there is a fade running or if the player has been /// paused (which causes an observed fade to 0 volume). /// /// See the example in [`ClipPlayer::fade_volume`] for more details of how volume /// changes over time. pub fn get_observed_volume(&self) -> f32 { self.sink .as_ref() .map(|sink| sink.volume()) .unwrap_or(self.volume) } /// Pauses playback of the clip. /// /// This performs a very quick fade to zero volume, to avoid popping /// audio artefacts, and then pauses playback. /// /// The nominal volume of the player won't change, but the observed volume will drop to zero. /// /// # Example /// /// ```no_run /// # use riddle_audio::*; doctest::simple(|audio_system| { /// # let bytes = include_bytes!("../../example_assets/boop.wav"); /// # let clip = Clip::load(&bytes[..], ClipFormat::Wav)?; /// // The player starts with all volumes at 1.0 /// let mut player = ClipPlayerBuilder::new(&audio_system).play(&clip)?; /// assert_eq!(1.0, player.get_nominal_volume()); /// assert_eq!(1.0, player.get_observed_volume()); /// /// // Pausing doesn't change the nominal volume /// player.pause(); /// assert_eq!(1.0, player.get_nominal_volume()); /// assert_eq!(1.0, player.get_observed_volume()); /// /// // A short moment later /// # doctest::pump_for_secs(audio_system, 1); /// // The pause has completed and the observed volume is now 0.0 /// assert_eq!(1.0, player.get_nominal_volume()); /// assert_eq!(0.0, player.get_observed_volume()); /// # Ok(player) }); /// ``` pub fn pause(&mut self) { self.fade_volume_with_type( 0.0, Duration::from_secs_f32(QUICK_FADE_DURATION_SECONDS), FadeType::Pause, ); } /// Resumes playback if paused. /// /// Immediately starts playback and performs a quick fade back up to the players /// nominal volume. /// /// The nominal volume of the player won't change, but the observed volume with fade /// from 0 to the nominal value over time. /// /// # Example /// /// ```no_run /// # use riddle_audio::*; doctest::simple(|audio_system| { /// # let bytes = include_bytes!("../../example_assets/boop.wav"); /// # let clip = Clip::load(&bytes[..], ClipFormat::Wav)?; /// // The paused player starts with an observed volume of 0.0 /// let mut player = ClipPlayerBuilder::new(&audio_system).paused(&clip)?; /// assert_eq!(1.0, player.get_nominal_volume()); /// assert_eq!(0.0, player.get_observed_volume()); /// /// // Resuming doesn't change the nominal volume /// player.resume(); /// assert_eq!(1.0, player.get_nominal_volume()); /// assert_eq!(0.0, player.get_observed_volume()); /// /// // A short moment later /// # doctest::pump_for_secs(audio_system, 1); /// // The resume has completed and the observed volume is now 1.0 /// assert_eq!(1.0, player.get_nominal_volume()); /// assert_eq!(1.0, player.get_observed_volume()); /// # Ok(player) }); /// ``` pub fn resume(&mut self) { if let Some(sink) = &self.sink { if sink.is_paused() { sink.play(); self.fade_volume_with_type( self.volume, Duration::from_secs_f32(QUICK_FADE_DURATION_SECONDS), FadeType::Resume, ); } } } /// Returns whether the player is still consuming from its clip pub fn is_finished(&self) -> bool { if let Some(sink) = &self.sink { sink.empty() } else { true } } /// Stops playback. /// /// This is equivalent to calling [`ClipPlayer::pause`] and then dropping the player /// after the fade is complete pub fn stop(mut self) { self.pause(); } fn fade_volume_with_type(&mut self, volume: f32, duration: Duration, fade_type: FadeType) { if let Some(sink) = &self.sink { let fade = Fade::new(sink.clone(), volume, duration, fade_type); self.audio.register_fade(fade); } } } /// Enum describing what the player should do at the end of the clip #[derive(Copy, Clone)] pub enum PlayMode { /// Stop playing at the end of the clip OneShot, /// Return to the beginning of the clip and play it again Loop, } /// Builder for [`ClipPlayer`] /// /// A builder instance may be used to construct multiple players. /// /// # Example /// /// ```no_run /// # use riddle_audio::*; doctest::simple(|audio_system| { /// let bytes = include_bytes!("../../example_assets/boop.wav"); /// let clip = Clip::load(&bytes[..], ClipFormat::Wav)?; /// /// // Play the clip /// let player = ClipPlayerBuilder::new(&audio_system) /// .with_volume(0.5) /// .with_mode(PlayMode::Loop) /// .play(&clip)?; /// # Ok(player) }); /// ``` pub struct ClipPlayerBuilder { mode: PlayMode, audio: AudioSystem, volume: f32, } impl ClipPlayerBuilder { /// Make a new builder. /// /// Defaults: /// /// * mode: [`PlayMode::OneShot`] /// * volume: 1.0. pub fn new(audio: &AudioSystem) -> Self { Self { mode: PlayMode::OneShot, audio: audio.clone(), volume: 1.0, } } /// Set the playback mode of the player. Defaults to [`PlayMode::OneShot`]. pub fn with_mode(&mut self, mode: PlayMode) -> &mut Self { self.mode = mode; self } /// Set the playback volume of the player. Defaults to 1.0. pub fn with_volume(&mut self, volume: f32) -> &mut Self { self.volume = volume; self } /// Build the ClipPlayer, and start playing the clip immediately. pub fn play(&self, clip: &Clip) -> Result<ClipPlayer> { let mut player = ClipPlayer::new(&self.audio, clip, self.volume); player.play(self.mode, false)?; Ok(player) } /// Build the ClipPlayer in the paused state. [`ClipPlayer::resume`] will need /// to be called on the player to start playback. pub fn paused(&self, clip: &Clip) -> Result<ClipPlayer> { let mut player = ClipPlayer::new(&self.audio, clip, self.volume); player.play(self.mode, true)?; Ok(player) } }
fn main() { println!( "{:?}", fixed_xor( &hex::decode("1c0111001f010100061a024b53535009181c").unwrap(), &hex::decode("686974207468652062756c6c277320657965").unwrap() ) ); } fn fixed_xor(one: &[u8], two: &[u8]) -> Vec<u8> { one.iter().zip(two).map(|(a, b)| (a ^ b)).collect() } #[test] fn test_fixed_xor() { assert_eq!( fixed_xor( &hex::decode("1c0111001f010100061a024b53535009181c").unwrap(), &hex::decode("686974207468652062756c6c277320657965").unwrap() ), hex::decode("746865206b696420646f6e277420706c6179").unwrap() ); }
//! Route Component. use super::YewRouterState; use crate::matcher::Matcher; use crate::router_component::render::Render; use crate::router_component::router::Router; use std::fmt::{Debug, Error as FmtError, Formatter}; use yew::{Children, Component, ComponentLink, Properties, ShouldRender}; /// A nested component used inside of [Router](../router/struct.Router.html) that can determine if a /// sub-component can be rendered. #[derive(Debug)] pub struct Route<T: for<'de> YewRouterState<'de>> { props: RouteProps<T>, } /// Properties for Route. /// /// The path matcher must be specified. /// /// If only a `render` is specified, it will display its contents if it returns `Some` after the /// path matcher succeeds in matching the URL. /// If only the `children` are specified, they will be rendered if the path matcher matches the URL. /// If both the `render` and `children` are specified, they will only both render /// (`render` elements above the `children` elements in the DOM) /// if the `render` returns `Some`. #[derive(Properties)] pub struct RouteProps<T: for<'de> YewRouterState<'de>> { /// Matches the url and can extract sections as matches to be used by the `Render`. #[props(required)] pub matcher: Matcher, /// Given matches matched from the URL, conditionally render the elements specified within. pub render: Render<T>, /// Will be rendered if it contains anything provided the `PathMatcher` matches the URL. pub children: Children<Router<T>>, } impl<T: for<'de> YewRouterState<'de>> Debug for RouteProps<T> { fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> { f.debug_struct("RouteProps") .field("matcher", &self.matcher) .field("render", &self.render) .field("children (length)", &self.children.len()) .finish() } } impl<T: for<'de> YewRouterState<'de>> Component for Route<T> { type Message = (); type Properties = RouteProps<T>; fn create(props: Self::Properties, _link: ComponentLink<Self>) -> Self { Route { props } } fn update(&mut self, _msg: Self::Message) -> bool { false } fn change(&mut self, props: Self::Properties) -> ShouldRender { self.props = props; true } }
#![feature(proc_macro)] #![crate_type = "proc-macro"] extern crate proc_macro; #[macro_use] extern crate syn; #[macro_use] extern crate quote; use proc_macro::TokenStream; use syn::{Ident, Type, Expr, WhereClause, TypeSlice, Path}; use syn::synom::Synom; struct MiscSyntax { id: Ident, ty: Type, expr: Expr, where_clause: WhereClause, type_slice: TypeSlice, path: Path } impl Synom for MiscSyntax { named!(parse -> Self, do_parse!( keyword!(where) >> keyword!(while) >> id: syn!(Ident) >> punct!(:) >> ty: syn!(Type) >> punct!(>>) >> expr: syn!(Expr) >> punct!(;) >> where_clause: syn!(WhereClause) >> punct!(;) >> type_slice: syn!(TypeSlice) >> punct!(;) >> path: syn!(Path) >> (MiscSyntax { id, ty, expr, where_clause, type_slice, path }) )); } #[proc_macro] pub fn misc_syntax(input: TokenStream) -> TokenStream { let m: MiscSyntax = syn::parse(input).expect("expected Miscellaneous Syntax"); let MiscSyntax { id, ty, expr, where_clause, type_slice, path } = m; (quote! { let #id: #ty = #expr; println!("variable = {}", #id); }).into() }
// Copyright (c) 2016, <daggerbot@gmail.com> // This software is available under the terms of the zlib license. // See COPYING.md for more information. use winapi; use error::Result; use pixel_format::PixelFormat; use util::GetProvider; use window::Window; /// Windows-specific extensions for `PixelFormat`. pub trait PixelFormatExt { fn pfd (&self) -> Option<winapi::PIXELFORMATDESCRIPTOR>; } impl PixelFormatExt for PixelFormat { fn pfd (&self) -> Option<winapi::PIXELFORMATDESCRIPTOR> { self.provider().pfd() } } /// Windows-specific extensions for `Window`. pub trait WindowExt { fn hwnd (&self) -> Result<winapi::HWND>; } impl WindowExt for Window { fn hwnd (&self) -> Result<winapi::HWND> { self.provider().try_hwnd() } }
use core::{convert::TryFrom, num::NonZeroU32}; use hashbrown::HashMap; use necsim_core::cogs::Habitat; use crate::{ cogs::habitat::{non_spatial::NonSpatialHabitat, spatially_implicit::SpatiallyImplicitHabitat}, decomposition::Decomposition, }; use super::EqualDecomposition; #[test] fn test_equal_area_decomposition() { let mut indices: HashMap<u32, usize> = HashMap::with_capacity(64); for width in 1..=8 { for height in 1..=8 { let habitat = NonSpatialHabitat::new((width, height), 1); for partition in 1..=(width * height + 1) { let (successful, decomposition) = match EqualDecomposition::area( &habitat, 0, NonZeroU32::new(partition).unwrap(), ) { Ok(decomposition) => (true, decomposition), Err(decomposition) => (false, decomposition), }; indices.clear(); for location in habitat.get_extent().iter() { let index = decomposition.map_location_to_subdomain_rank(&location, &habitat); *indices.entry(index).or_insert(0) += 1; } let assert_message = alloc::format!( "{}x{} / {} => {:?} => {}@{:?}", width, height, partition, decomposition, indices.len(), indices, ); let num_indices = u32::try_from(indices.len()).expect(&assert_message); // Check that the number of generated indices is less than // (unsuccessful) or equal (successful) to the partition if successful { assert_eq!(num_indices, partition, "{}", &assert_message); } else { assert!(num_indices > 0, "{}", assert_message); assert!(num_indices < partition, "{}", assert_message); assert!( u64::from(num_indices) == (u64::from(width) * u64::from(height)), "{}", assert_message ); } // Check that all indices in [0, num_indices) have been assigned for i in 0..num_indices { assert!(indices.contains_key(&i), "{}", assert_message); } let min_index_frequency = indices.iter().map(|(_, freq)| freq).min().unwrap(); let max_index_frequency = indices.iter().map(|(_, freq)| freq).max().unwrap(); // Check that the indices are distributed equally assert!( (max_index_frequency - min_index_frequency) <= 1, "{}", assert_message ); } } } } #[test] fn test_equal_weight_decomposition() { let mut indices: HashMap<u32, usize> = HashMap::with_capacity(64); for local in 1..=8 { for meta in 1..=8 { let habitat = SpatiallyImplicitHabitat::new((8, 1), local, (8, 1), meta); for partition in 1..=(local * 8 + meta * 8 + 1) { let (successful, decomposition) = match EqualDecomposition::weight( &habitat, 0, NonZeroU32::new(partition).unwrap(), ) { Ok(decomposition) => (true, decomposition), Err(decomposition) => (false, decomposition), }; indices.clear(); for location in habitat.get_extent().iter() { let h = habitat.get_habitat_at_location(&location); if h > 0 { let index = decomposition.map_location_to_subdomain_rank(&location, &habitat); *indices.entry(index).or_insert(0) += h as usize; } } let assert_message = alloc::format!( "{}x{}->{}x{} / {} => {:?} => {}@{:?}", 8, local, 8, meta, partition, decomposition, indices.len(), indices, ); let num_indices = u32::try_from(indices.len()).expect(&assert_message); // Check that the number of generated indices is less than // (unsuccessful) or equal (successful) to the partition if successful { assert_eq!(num_indices, partition, "{}", &assert_message); } else { assert!(num_indices > 0, "{}", assert_message); assert!(num_indices < partition, "{}", assert_message); } // Check that all indices in [0, num_indices) have been assigned for i in 0..num_indices { assert!(indices.contains_key(&i), "{}", assert_message); } let min_index_frequency = indices.iter().map(|(_, freq)| freq).min().unwrap(); let max_index_frequency = indices.iter().map(|(_, freq)| freq).max().unwrap(); // Check that the indices are distributed equally assert!( (max_index_frequency - min_index_frequency) <= (local.max(meta) * 2) as usize, "{}", assert_message ); } } } }
use metrics::config::*; use chrono::{Date, DateTime, Datelike, TimeZone, Utc, Weekday}; use clap::{App, Arg}; use env_logger; use lazy_static::lazy_static; use metrics::asana::*; use regex::Regex; use std::collections::{HashMap, HashSet}; use std::fmt::Write as FmtWrite; use std::fs; use std::fs::File; use std::io::Write as IoWrite; use std::path::{Path, PathBuf}; fn main() { /* Logging */ env_logger::init(); /* Command Line */ let (config_file_str, input_file_str, output_dir_str) = process_command_line(); /* * Config data */ let config_file_path = Path::new(&config_file_str) .canonicalize() .expect(&format!("Bad config file path: {}", &config_file_str)); let config_str = fs::read_to_string(config_file_path) .expect(&format!("Bad config file: {}", &config_file_str)); let config: MyConfig = parse_config(&config_str); /* * Input file -- output of `fetch` program */ let input_file_path = Path::new(&input_file_str) .canonicalize() .expect(&format!("Bad input file path: {}", &input_file_str)); let input_str = fs::read_to_string(input_file_path).expect(&format!("Bad token file: {}", &input_file_str)); let data: AsanaData = serde_json::from_str(&input_str).expect("Invalid output."); /* * Output */ let mut output_dir_path = PathBuf::from("."); output_dir_path.push(output_dir_str); match fs::metadata(&output_dir_path) { Ok(dir_metadata) => { if !dir_metadata.is_dir() { panic!( "Output dir path {} is not a dir", &output_dir_path.to_str().unwrap() ); } } Err(_) => { fs::create_dir_all(&output_dir_path).expect("Could not create output directory"); } } let output_dir_path = output_dir_path.canonicalize().expect( format!( "Directoruy {} should exist", output_dir_path.to_str().unwrap() ) .as_str(), ); /* * Process */ let report = proc_data(&config, &data); for report_project in report.projects { output_gnuplot_data(&report_project, &output_dir_path); } } fn process_command_line() -> (String, String, String) { let matches = App::new("proc") .version("0.1.0") .author("Parijat Mishra <parijat.mishra@gmail.com>") .about("Process Output of `fetch`") .arg( Arg::with_name("config-file") .short("c") .long("config-file") .takes_value(true) .help("path to config file"), ) .arg( Arg::with_name("input-file") .short("i") .long("input-file") .takes_value(true) .help("path of file containing the output of the `fetch` program."), ) .arg( Arg::with_name("output-dir") .short("o") .long("output-directory") .takes_value(true) .help("path to directory where output files will be stored"), ) .get_matches(); let config_file_str = matches .value_of("config-file") .expect("Flag --config-file=PATH must be specified"); let input_file_str = matches .value_of("input-file") .expect("Flag --input-file=PATH must be specified"); let output_dir_str = matches .value_of("output-dir") .expect("Flag --output-dir=DIRPATH must be specified"); return ( config_file_str.to_owned(), input_file_str.to_owned(), output_dir_str.to_owned(), ); } #[derive(Debug)] struct Report<'a> { projects: Vec<Project<'a>>, } #[derive(Debug)] struct Project<'a> { label: &'a str, name: &'a str, cfd: Cfd<'a>, } #[derive(Debug)] struct Cfd<'a> { cfd_states: Vec<&'a str>, done_states: Vec<&'a str>, period_counts: Vec<PeriodCounts>, period_durations: Vec<PeriodDurations>, } #[derive(Debug)] struct PeriodCounts { date: Date<Utc>, cfd_state_counts: Vec<u32>, done_count: u32, } #[derive(Debug)] struct PeriodDurations { date: Date<Utc>, p90_duration_seconds: Vec<u64>, } fn proc_data<'a>(config: &'a MyConfig, asana_data: &'a AsanaData) -> Report<'a> { let pnames: HashSet<&str> = get_data_pnames(asana_data); let pgid2pname: HashMap<&str, &str> = get_pgid2pname(asana_data); let sgid2sname: HashMap<&str, &str> = get_sgid2sname(asana_data); let tgid2asana_task: HashMap<&str, &AsanaTask> = get_tgid2asana_task(asana_data); let sgid2pgid: HashMap<&str, &str> = get_sgid2pgid(asana_data); let tgid2pname2sname: HashMap<&str, HashMap<&str, &str>> = get_tgid2pname2sname(&sgid2pgid, &sgid2sname, &pgid2pname, asana_data); // capture the times when a task entered a state ("section") // project_name => Vec<(event_time, task gid, state)> let mut pname2t_events: HashMap<&str, Vec<(&DateTime<Utc>, &str, &str)>> = get_task_events( &pnames, &tgid2asana_task, &tgid2pname2sname, &asana_data.task_stories, ); let mut projects: Vec<Project> = Vec::new(); for (label, project_config) in &config.projects { println!("Processing: {}", label); let pgid = project_config.gid.as_str(); let pname: &str = pgid2pname[pgid]; let cfd_states: Vec<&str> = project_config .cfd_states .iter() .map(|s| s.as_str()) .collect(); let done_states: Vec<&str> = project_config .done_states .iter() .map(|s| s.as_str()) .collect(); let horizon = &project_config.horizon.iso_week(); let events: Vec<(&DateTime<Utc>, &str, &str)> = pname2t_events.remove(pname).unwrap(); let mut cfd_period_counts: Vec<PeriodCounts> = Vec::new(); let mut cfd_period_durations: Vec<PeriodDurations> = Vec::new(); // ---- // last know state of each task, and the timestamp when task entered that state let mut task_latest_state: HashMap<&str, (&str, &DateTime<Utc>)> = HashMap::new(); // how many tasks are in each state at the moment let mut state_taskcounts: HashMap<&str, u32> = HashMap::new(); // *in this period* how much time did tasks spend in this state let mut state_period_dwelltimes: HashMap<&str, Vec<u64>> = HashMap::new(); // *in this period* how many tasks are in states considered to be "Done" // note - there can be multiple states that are considered to conceptually // be Done let mut done_count: u32 = 0; // ---- let mut start_of_period = Utc .isoywd(horizon.year(), horizon.week(), Weekday::Mon) .and_hms(0, 0, 0); let mut start_of_next_period = start_of_period .checked_add_signed(chrono::Duration::weeks(1)) .unwrap(); // ---- for (at, task_gid, sname) in events.into_iter() { while at >= &start_of_next_period { // event in next period -- finalize this period stats and rollover to next period // task -> state ==> count how many times each state appeared for (sname, &timestamp) in task_latest_state.values() { let count = state_taskcounts.entry(sname).or_insert_with(|| 0); *count += 1; let dwelltime = (start_of_next_period - timestamp).num_seconds() as u64; state_period_dwelltimes .entry(sname) .or_insert_with(|| Vec::new()) .push(dwelltime); } // extract the counts of the subset of states in `p_counted_states` let state_count_vec: Vec<u32> = cfd_states .iter() .map(|&k| *state_taskcounts.get(k).unwrap_or(&0)) .collect(); let period_counts = PeriodCounts { date: start_of_period.date(), cfd_state_counts: state_count_vec, done_count: done_count, }; cfd_period_counts.push(period_counts); // extract the P90 duration of the subsets of states in `p_counted_states` let p90_duration_seconds: Vec<u64> = cfd_states .iter() .map(|&k| { state_period_dwelltimes .get_mut(k) .map(|vec| { vec.sort_unstable(); p90(vec) }) .unwrap_or(0) }) .collect(); let period_durations = PeriodDurations { date: start_of_period.date(), p90_duration_seconds: p90_duration_seconds, }; cfd_period_durations.push(period_durations); // clear the state_durations because we only count the time // tasks spend in a state within a period state_period_dwelltimes.clear(); // reset done_count because we only count tasks done // within this period done_count = 0; // update loop variables for next period start_of_period = start_of_period .checked_add_signed(chrono::Duration::weeks(1)) .unwrap(); start_of_next_period = start_of_next_period .checked_add_signed(chrono::Duration::weeks(1)) .unwrap(); } // event in current period if let Some((old_state, old_at)) = task_latest_state.insert(task_gid, (sname, at)) { let old_state_duration_seconds = (*at - *old_at).num_seconds() as u64; state_period_dwelltimes .entry(old_state) .or_insert_with(|| Vec::new()) .push(old_state_duration_seconds); } if done_states.contains(&sname) { done_count += 1; } } let project = Project { label: label, name: pname, cfd: Cfd { cfd_states: cfd_states, done_states: done_states, period_counts: cfd_period_counts, period_durations: cfd_period_durations, }, }; projects.push(project); } let report = Report { projects }; return report; } fn p90(vec: &Vec<u64>) -> u64 { let idx = ((vec.len() - 1) as f64 * 0.9) as usize; return *vec.iter().nth(idx).unwrap(); } fn get_data_pnames(asana_data: &AsanaData) -> HashSet<&str> { return asana_data .projects .iter() .map(|AsanaProject { name, .. }| name.as_str()) .collect(); } fn get_pgid2pname(asana_data: &AsanaData) -> HashMap<&str, &str> { return asana_data .projects .iter() .map(|AsanaProject { gid, name, .. }| (gid.as_str(), name.as_str())) .collect(); } fn get_sgid2sname(asana_data: &AsanaData) -> HashMap<&str, &str> { return asana_data .project_sections .iter() .flat_map(|aps| { aps.sections .iter() .map(|a_s| (a_s.gid.as_str(), a_s.name.as_str())) }) .collect(); } fn get_tgid2asana_task(asana_data: &AsanaData) -> HashMap<&str, &AsanaTask> { return asana_data .tasks .iter() .map(|t| (t.gid.as_str(), t)) .collect(); } fn get_sgid2pgid(asana_data: &AsanaData) -> HashMap<&str, &str> { return asana_data .project_sections .iter() .flat_map(|aps| { aps.sections .iter() .map(move |a_s| (a_s.gid.as_str(), aps.project_gid.as_str())) }) .collect(); } fn get_tgid2pname2sname<'a>( sgid2pgid: &HashMap<&'a str, &'a str>, sgid2sname: &HashMap<&'a str, &'a str>, pgid2pname: &HashMap<&'a str, &'a str>, asana_data: &'a AsanaData, ) -> HashMap<&'a str, HashMap<&'a str, &'a str>> { let tgid2sgids: HashMap<&str, Vec<&str>> = asana_data .tasks .iter() .map(|a_t| { ( a_t.gid.as_str(), a_t.memberships .iter() .map(|hm| hm["section"].gid.as_str()) // AsanaTask.membership lists sections from *all* projects a task is in // not just the ones we are interested in, so filter out the sections // that con't exist in our `project_sections` .filter(|sgid| sgid2pgid.contains_key(*sgid)) .collect(), ) }) .collect(); let tgid2pname2sname = tgid2sgids .iter() .map(|(tgid, vec_sgid)| { ( *tgid, vec_sgid .iter() .map(|sgid| (pgid2pname[sgid2pgid[sgid]], sgid2sname[sgid])) .collect(), ) }) .collect(); return tgid2pname2sname; } fn get_task_events<'a>( pnames: &'a HashSet<&str>, tgid2asana_task: &'a HashMap<&str, &AsanaTask>, tgid2pname2sname: &'a HashMap<&str, HashMap<&str, &str>>, task_stories: &'a Vec<AsanaTaskStories>, ) -> HashMap<&'a str, Vec<(&'a DateTime<Utc>, &'a str, &'a str)>> { let mut pname2t_events: HashMap<&str, Vec<(&DateTime<Utc>, &str, &str)>> = HashMap::new(); // read all the stories and convert them into a timeline of events per project for asana_task_story in task_stories { let task_gid: &str = asana_task_story.task_gid.as_str(); let task_created_at = &tgid2asana_task[task_gid].created_at; for asana_story in &asana_task_story.stories { if asana_story.resource_subtype.eq("section_changed") { // parse the text of the story let (sname_from, sname_to, pname) = parse_section_changed(&asana_story.text); // event may be for a project we are not interested in if pnames.contains(pname) { let section_changed_at: &DateTime<Utc> = &asana_story.created_at; let events = pname2t_events.entry(pname).or_insert_with(|| Vec::new()); // if a previous event for this task does not exist, it means we are // looking at the first section change event -- in that case // we assume that the task existed in the `sname_from` section at creation. if events.is_empty() { events.push((&task_created_at, task_gid, sname_from)); } // insert the event for section the task moved to events.push((section_changed_at, task_gid, sname_to)); } } } // if a task never changed sections after creation, there is no "section changed" story // so we look for such tasks and synthesize the "create" story for pname in tgid2pname2sname[task_gid].keys() { let events = pname2t_events.entry(pname).or_insert_with(|| Vec::new()); if events.is_empty() { let task_curr_sname = tgid2pname2sname[task_gid][pname]; events.push((task_created_at, task_gid, task_curr_sname)); } events.sort_by_cached_key(|entry| entry.0); } } return pname2t_events; } fn parse_section_changed(text: &str) -> (&str, &str, &str) { lazy_static! { static ref RE: Regex = Regex::new(r#"^moved this Task from "([^"]+?)" to "([^"]+?)" in (.+)$"#).unwrap(); } let caps = RE.captures(text).unwrap(); return ( caps.get(1).unwrap().as_str(), caps.get(2).unwrap().as_str(), caps.get(3).unwrap().as_str(), ); } fn output_gnuplot_data(report_project: &Project, output_dir_path: &Path) { let name = report_project.name; let label = report_project.label; println!("Output for {}: {}", label, name); let cfd_states = &report_project.cfd.cfd_states; let done_states = &report_project.cfd.done_states; // --------- // CFD Data File // --------- let mut buffer = String::new(); // header write!(&mut buffer, "# date").unwrap(); for state in cfd_states { write!(&mut buffer, " \"{}\"", state).unwrap(); } write!(&mut buffer, "\n").unwrap(); // record for period_count in report_project.cfd.period_counts.iter() { let date = period_count.date; write!( &mut buffer, "{:04}-{:02}-{:02}", date.year(), date.month(), date.day() ) .unwrap(); for count in period_count.cfd_state_counts.iter() { write!(&mut buffer, " {}", count).unwrap(); } write!(&mut buffer, "\n").unwrap(); } // data file let cfd_data_file_name = format!("{}_cfd.dat", label); let mut cfd_data_file_path = PathBuf::from(output_dir_path); cfd_data_file_path.push(&cfd_data_file_name); File::create(&cfd_data_file_path) .unwrap() .write_all(buffer.as_bytes()) .unwrap(); println!("Wrote {}", cfd_data_file_path.to_str().unwrap()); // --------- // P90 Durations Data File // --------- let mut buffer = String::new(); // header write!(&mut buffer, "# date").unwrap(); for state in cfd_states { write!(&mut buffer, " \"{}\"", state).unwrap(); } write!(&mut buffer, "\n").unwrap(); // record for period_durations in report_project.cfd.period_durations.iter() { let date = period_durations.date; write!( &mut buffer, "{:04}-{:02}-{:02}", date.year(), date.month(), date.day() ) .unwrap(); for duration in period_durations.p90_duration_seconds.iter() { write!( &mut buffer, " {}", (*duration as f32) / (24.0 * 60.0 * 60.0) ) .unwrap(); } write!(&mut buffer, "\n").unwrap(); } // data file let duration_data_file_name = format!("{}_p90_durations.dat", label); let mut duration_data_file_path = PathBuf::from(output_dir_path); duration_data_file_path.push(&duration_data_file_name); File::create(&duration_data_file_path) .unwrap() .write_all(buffer.as_bytes()) .unwrap(); println!("Wrote {}", duration_data_file_path.to_str().unwrap()); // --------- // Done Count Data File // --------- let mut buffer = String::new(); // header writeln!(&mut buffer, "# date done_count").unwrap(); // record for period_counts in report_project.cfd.period_counts.iter() { let date = period_counts.date; let done_count = period_counts.done_count; writeln!( &mut buffer, "{:04}-{:02}-{:02} {}", date.year(), date.month(), date.day(), done_count ) .unwrap(); } // data file let done_count_data_file_name = format!("{}_done.dat", label); let mut done_count_data_file_path = PathBuf::from(output_dir_path); done_count_data_file_path.push(&done_count_data_file_name); File::create(&done_count_data_file_path) .unwrap() .write_all(buffer.as_bytes()) .unwrap(); println!("Wrote {}", done_count_data_file_path.to_str().unwrap()); // --------- // Gnuplot // --------- let mut buffer = String::new(); writeln!( &mut buffer, r#" set terminal png enhanced font "Arial,10" fontscale 1.0 size 1024,768 set output "{label}.png" set multiplot layout 3,1 title "{name}""#, label = label, name = name ) .unwrap(); // CFD - Counts writeln!( &mut buffer, r#"# CFD set title "Cumulative Tasks in State - Count" set key left top outside set xdata time set timefmt "%Y-%m-%d" {plotline}"#, plotline = make_gnuplot_cfdline(&cfd_data_file_name, &cfd_states) ) .unwrap(); // P90 Durations (Hours) writeln!( &mut buffer, r#"# P90 Duration (Days) set title "P90 Age Tasks in State - Days" set key left top outside set xdata time set timefmt "%Y-%m-%d" {plotline}"#, plotline = make_gnuplot_cfdline(&duration_data_file_name, &cfd_states) ) .unwrap(); // Task "Done" per period writeln!( &mut buffer, r#"# Tasks "Done" per period set title "Throughput - Tasks Transitioning Into {done_state_names} - Count" unset key set xdata time set timefmt "%Y-%m-%d" plot "{data_file_name}" using 1:2 with filledcurve x1"#, done_state_names = done_states.join(", "), data_file_name = done_count_data_file_name ) .unwrap(); // gnuplot file let gnuplot_file_name = format!("{}.gnuplot", label); let mut gnuplot_file_path = PathBuf::from(output_dir_path); gnuplot_file_path.push(&gnuplot_file_name); let mut gf = File::create(&gnuplot_file_path).unwrap(); gf.write_all(buffer.as_bytes()).unwrap(); println!("Wrote {}", gnuplot_file_path.to_str().unwrap()); } fn make_gnuplot_cfdline(file_name: &str, states: &Vec<&str>) -> String { let mut buffer = String::from("plot"); // gnuplot: columns in data files start from 1 // col 1 is the date col; state cols are 2, 3, ... states.len() + 1 let max_gnuplot_col = states.len() + 1; for (idx, state) in states.iter().enumerate() { // idx starts from 0 if idx > 0 { write!(&mut buffer, ",").unwrap() }; let gnuplot_column = idx + 2; write!( &mut buffer, r#" "{file_name}" using 1:({col}) with filledcurve x1 title "{state}""#, file_name = file_name, col = make_col_expression(gnuplot_column as u32, max_gnuplot_col as u32), state = state ) .unwrap(); } write!(&mut buffer, "\n").unwrap(); return buffer; } fn make_col_expression(cur_col: u32, max_col: u32) -> String { // return "$<cur_col>+$<cur_col+1>+...$max_col" let mut buffer = String::new(); for i in cur_col..=max_col { if i > cur_col { write!(&mut buffer, "+").unwrap(); }; write!(&mut buffer, "${}", i).unwrap(); } return buffer; }
use num::traits::{Num, NumCast}; use super::cm::{CM, ToCM}; use super::mm::{MM, ToMM}; use super::m::{M, ToM}; /// ToKM is the canonical trait to use for taking input in kilometers. /// /// For example the millimeters type (MM) implements the ToKM trait and thus /// millimeters can be given as a parameter to any input that seeks kilometers. pub trait ToKM{ type Output; /// to_km returns these units in kilometers, performing conversion if needed. /// /// # Examples /// /// ``` /// use fiz_math::unit::{M, ToKM}; /// use fiz_math::Num; /// use std::fmt::Debug; /// /// fn walk<T: ToKM<Output=U>, U: Num+Debug>(dist: T) { /// println!("{:?}", dist.to_km().0) /// } /// walk(M(2.0)); /// walk(M::<i32>(2)); /// ``` fn to_km(self) -> KM<Self::Output>; } /// KM represents kilometers (1000 meters). /// /// # Examples /// /// ``` /// use fiz_math::unit::KM; /// /// let x = KM(1.0); /// println!("{:?}", x); /// ``` unit!(KM); impl<T: Num + NumCast> ToMM for KM<T> { type Output = T; /// to_mm returns these kilometers converted to millimeters. /// /// # Examples /// /// ``` /// use fiz_math::unit::{KM, MM, ToMM}; /// /// assert_eq!(KM(1.0).to_mm(), MM(1000000.0)); /// ``` fn to_mm(self) -> MM<T> { MM(self.0 * T::from(1000000).unwrap()) } } impl<T: Num + NumCast> ToCM for KM<T> { type Output = T; /// to_cm returns these kilometers converted to centimeters. /// /// # Examples /// /// ``` /// use fiz_math::unit::{KM, CM, ToCM}; /// /// assert_eq!(KM(1.0).to_cm(), CM(100000.0)); /// ``` fn to_cm(self) -> CM<T> { CM(self.0 * T::from(100000).unwrap()) } } impl<T: Num + NumCast> ToM for KM<T> { type Output = T; /// to_m returns these kilometers converted to meters. /// /// # Examples /// /// ``` /// use fiz_math::unit::{KM, M, ToM}; /// /// assert_eq!(KM(1.0).to_m(), M(1000.0)); /// ``` fn to_m(self) -> M<T> { M(self.0 * T::from(1000).unwrap()) } } impl<T: Num + NumCast> ToKM for KM<T> { type Output = T; /// to_km simply returns self. /// /// # Examples /// /// ``` /// use fiz_math::unit::{KM, ToKM}; /// /// assert_eq!(KM(1.0).to_km(), KM(1.0)); /// ``` fn to_km(self) -> KM<T> { self } }
use crate::egml::{AnyModel, Real, RealValue, Converter, Fill, Stroke, Transform}; #[derive(Default, Clone)] pub struct Text { pub id: Option<String>, pub x: RealValue, pub y: RealValue, pub font_name: String, pub font_size: RealValue, pub align: (AlignHor, AlignVer), pub stroke: Option<Stroke>, pub fill: Option<Fill>, pub transform: Option<Transform>, pub modifier: Option<fn(&mut Text, &dyn AnyModel)>, } impl Text { pub fn id(&self) -> Option<&str> { self.id.as_ref().map(|s| s.as_str()) } #[inline] pub fn intersect(&self, _x: Real, _y: Real) -> bool { // TODO: calvulate intersect // let (x, y) = self.transform.as_ref() // .map(|t| (x - t.matrix[4], y - t.matrix[5])) // .unwrap_or((x, y)); false } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum AlignHor { Left, Right, Center, } impl Default for AlignHor { fn default() -> Self { AlignHor::Left } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum AlignVer { Bottom, Middle, Baseline, Top, } impl Default for AlignVer { fn default() -> Self { AlignVer::Top } } impl<'a> Converter<(AlignHor, AlignVer)> for AlignHor { fn convert(self) -> (AlignHor, AlignVer) { (self, AlignVer::default()) } } impl<'a> Converter<(AlignHor, AlignVer)> for AlignVer { fn convert(self) -> (AlignHor, AlignVer) { (AlignHor::default(), self) } }
#[doc = "Register `MPCBB1_VCTR24` reader"] pub type R = crate::R<MPCBB1_VCTR24_SPEC>; #[doc = "Register `MPCBB1_VCTR24` writer"] pub type W = crate::W<MPCBB1_VCTR24_SPEC>; #[doc = "Field `B768` reader - B768"] pub type B768_R = crate::BitReader; #[doc = "Field `B768` writer - B768"] pub type B768_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B769` reader - B769"] pub type B769_R = crate::BitReader; #[doc = "Field `B769` writer - B769"] pub type B769_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B770` reader - B770"] pub type B770_R = crate::BitReader; #[doc = "Field `B770` writer - B770"] pub type B770_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B771` reader - B771"] pub type B771_R = crate::BitReader; #[doc = "Field `B771` writer - B771"] pub type B771_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B772` reader - B772"] pub type B772_R = crate::BitReader; #[doc = "Field `B772` writer - B772"] pub type B772_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B773` reader - B773"] pub type B773_R = crate::BitReader; #[doc = "Field `B773` writer - B773"] pub type B773_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B774` reader - B774"] pub type B774_R = crate::BitReader; #[doc = "Field `B774` writer - B774"] pub type B774_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B775` reader - B775"] pub type B775_R = crate::BitReader; #[doc = "Field `B775` writer - B775"] pub type B775_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B776` reader - B776"] pub type B776_R = crate::BitReader; #[doc = "Field `B776` writer - B776"] pub type B776_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B777` reader - B777"] pub type B777_R = crate::BitReader; #[doc = "Field `B777` writer - B777"] pub type B777_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B778` reader - B778"] pub type B778_R = crate::BitReader; #[doc = "Field `B778` writer - B778"] pub type B778_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B779` reader - B779"] pub type B779_R = crate::BitReader; #[doc = "Field `B779` writer - B779"] pub type B779_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B780` reader - B780"] pub type B780_R = crate::BitReader; #[doc = "Field `B780` writer - B780"] pub type B780_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B781` reader - B781"] pub type B781_R = crate::BitReader; #[doc = "Field `B781` writer - B781"] pub type B781_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B782` reader - B782"] pub type B782_R = crate::BitReader; #[doc = "Field `B782` writer - B782"] pub type B782_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B783` reader - B783"] pub type B783_R = crate::BitReader; #[doc = "Field `B783` writer - B783"] pub type B783_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B784` reader - B784"] pub type B784_R = crate::BitReader; #[doc = "Field `B784` writer - B784"] pub type B784_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B785` reader - B785"] pub type B785_R = crate::BitReader; #[doc = "Field `B785` writer - B785"] pub type B785_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B786` reader - B786"] pub type B786_R = crate::BitReader; #[doc = "Field `B786` writer - B786"] pub type B786_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B787` reader - B787"] pub type B787_R = crate::BitReader; #[doc = "Field `B787` writer - B787"] pub type B787_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B788` reader - B788"] pub type B788_R = crate::BitReader; #[doc = "Field `B788` writer - B788"] pub type B788_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B789` reader - B789"] pub type B789_R = crate::BitReader; #[doc = "Field `B789` writer - B789"] pub type B789_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B790` reader - B790"] pub type B790_R = crate::BitReader; #[doc = "Field `B790` writer - B790"] pub type B790_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B791` reader - B791"] pub type B791_R = crate::BitReader; #[doc = "Field `B791` writer - B791"] pub type B791_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B792` reader - B792"] pub type B792_R = crate::BitReader; #[doc = "Field `B792` writer - B792"] pub type B792_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B793` reader - B793"] pub type B793_R = crate::BitReader; #[doc = "Field `B793` writer - B793"] pub type B793_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B794` reader - B794"] pub type B794_R = crate::BitReader; #[doc = "Field `B794` writer - B794"] pub type B794_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B795` reader - B795"] pub type B795_R = crate::BitReader; #[doc = "Field `B795` writer - B795"] pub type B795_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B796` reader - B796"] pub type B796_R = crate::BitReader; #[doc = "Field `B796` writer - B796"] pub type B796_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B797` reader - B797"] pub type B797_R = crate::BitReader; #[doc = "Field `B797` writer - B797"] pub type B797_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B798` reader - B798"] pub type B798_R = crate::BitReader; #[doc = "Field `B798` writer - B798"] pub type B798_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B799` reader - B799"] pub type B799_R = crate::BitReader; #[doc = "Field `B799` writer - B799"] pub type B799_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - B768"] #[inline(always)] pub fn b768(&self) -> B768_R { B768_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - B769"] #[inline(always)] pub fn b769(&self) -> B769_R { B769_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - B770"] #[inline(always)] pub fn b770(&self) -> B770_R { B770_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - B771"] #[inline(always)] pub fn b771(&self) -> B771_R { B771_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - B772"] #[inline(always)] pub fn b772(&self) -> B772_R { B772_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - B773"] #[inline(always)] pub fn b773(&self) -> B773_R { B773_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - B774"] #[inline(always)] pub fn b774(&self) -> B774_R { B774_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - B775"] #[inline(always)] pub fn b775(&self) -> B775_R { B775_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - B776"] #[inline(always)] pub fn b776(&self) -> B776_R { B776_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - B777"] #[inline(always)] pub fn b777(&self) -> B777_R { B777_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - B778"] #[inline(always)] pub fn b778(&self) -> B778_R { B778_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - B779"] #[inline(always)] pub fn b779(&self) -> B779_R { B779_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - B780"] #[inline(always)] pub fn b780(&self) -> B780_R { B780_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - B781"] #[inline(always)] pub fn b781(&self) -> B781_R { B781_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - B782"] #[inline(always)] pub fn b782(&self) -> B782_R { B782_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - B783"] #[inline(always)] pub fn b783(&self) -> B783_R { B783_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - B784"] #[inline(always)] pub fn b784(&self) -> B784_R { B784_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - B785"] #[inline(always)] pub fn b785(&self) -> B785_R { B785_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - B786"] #[inline(always)] pub fn b786(&self) -> B786_R { B786_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - B787"] #[inline(always)] pub fn b787(&self) -> B787_R { B787_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - B788"] #[inline(always)] pub fn b788(&self) -> B788_R { B788_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - B789"] #[inline(always)] pub fn b789(&self) -> B789_R { B789_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - B790"] #[inline(always)] pub fn b790(&self) -> B790_R { B790_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - B791"] #[inline(always)] pub fn b791(&self) -> B791_R { B791_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 24 - B792"] #[inline(always)] pub fn b792(&self) -> B792_R { B792_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - B793"] #[inline(always)] pub fn b793(&self) -> B793_R { B793_R::new(((self.bits >> 25) & 1) != 0) } #[doc = "Bit 26 - B794"] #[inline(always)] pub fn b794(&self) -> B794_R { B794_R::new(((self.bits >> 26) & 1) != 0) } #[doc = "Bit 27 - B795"] #[inline(always)] pub fn b795(&self) -> B795_R { B795_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 28 - B796"] #[inline(always)] pub fn b796(&self) -> B796_R { B796_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - B797"] #[inline(always)] pub fn b797(&self) -> B797_R { B797_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - B798"] #[inline(always)] pub fn b798(&self) -> B798_R { B798_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - B799"] #[inline(always)] pub fn b799(&self) -> B799_R { B799_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - B768"] #[inline(always)] #[must_use] pub fn b768(&mut self) -> B768_W<MPCBB1_VCTR24_SPEC, 0> { B768_W::new(self) } #[doc = "Bit 1 - B769"] #[inline(always)] #[must_use] pub fn b769(&mut self) -> B769_W<MPCBB1_VCTR24_SPEC, 1> { B769_W::new(self) } #[doc = "Bit 2 - B770"] #[inline(always)] #[must_use] pub fn b770(&mut self) -> B770_W<MPCBB1_VCTR24_SPEC, 2> { B770_W::new(self) } #[doc = "Bit 3 - B771"] #[inline(always)] #[must_use] pub fn b771(&mut self) -> B771_W<MPCBB1_VCTR24_SPEC, 3> { B771_W::new(self) } #[doc = "Bit 4 - B772"] #[inline(always)] #[must_use] pub fn b772(&mut self) -> B772_W<MPCBB1_VCTR24_SPEC, 4> { B772_W::new(self) } #[doc = "Bit 5 - B773"] #[inline(always)] #[must_use] pub fn b773(&mut self) -> B773_W<MPCBB1_VCTR24_SPEC, 5> { B773_W::new(self) } #[doc = "Bit 6 - B774"] #[inline(always)] #[must_use] pub fn b774(&mut self) -> B774_W<MPCBB1_VCTR24_SPEC, 6> { B774_W::new(self) } #[doc = "Bit 7 - B775"] #[inline(always)] #[must_use] pub fn b775(&mut self) -> B775_W<MPCBB1_VCTR24_SPEC, 7> { B775_W::new(self) } #[doc = "Bit 8 - B776"] #[inline(always)] #[must_use] pub fn b776(&mut self) -> B776_W<MPCBB1_VCTR24_SPEC, 8> { B776_W::new(self) } #[doc = "Bit 9 - B777"] #[inline(always)] #[must_use] pub fn b777(&mut self) -> B777_W<MPCBB1_VCTR24_SPEC, 9> { B777_W::new(self) } #[doc = "Bit 10 - B778"] #[inline(always)] #[must_use] pub fn b778(&mut self) -> B778_W<MPCBB1_VCTR24_SPEC, 10> { B778_W::new(self) } #[doc = "Bit 11 - B779"] #[inline(always)] #[must_use] pub fn b779(&mut self) -> B779_W<MPCBB1_VCTR24_SPEC, 11> { B779_W::new(self) } #[doc = "Bit 12 - B780"] #[inline(always)] #[must_use] pub fn b780(&mut self) -> B780_W<MPCBB1_VCTR24_SPEC, 12> { B780_W::new(self) } #[doc = "Bit 13 - B781"] #[inline(always)] #[must_use] pub fn b781(&mut self) -> B781_W<MPCBB1_VCTR24_SPEC, 13> { B781_W::new(self) } #[doc = "Bit 14 - B782"] #[inline(always)] #[must_use] pub fn b782(&mut self) -> B782_W<MPCBB1_VCTR24_SPEC, 14> { B782_W::new(self) } #[doc = "Bit 15 - B783"] #[inline(always)] #[must_use] pub fn b783(&mut self) -> B783_W<MPCBB1_VCTR24_SPEC, 15> { B783_W::new(self) } #[doc = "Bit 16 - B784"] #[inline(always)] #[must_use] pub fn b784(&mut self) -> B784_W<MPCBB1_VCTR24_SPEC, 16> { B784_W::new(self) } #[doc = "Bit 17 - B785"] #[inline(always)] #[must_use] pub fn b785(&mut self) -> B785_W<MPCBB1_VCTR24_SPEC, 17> { B785_W::new(self) } #[doc = "Bit 18 - B786"] #[inline(always)] #[must_use] pub fn b786(&mut self) -> B786_W<MPCBB1_VCTR24_SPEC, 18> { B786_W::new(self) } #[doc = "Bit 19 - B787"] #[inline(always)] #[must_use] pub fn b787(&mut self) -> B787_W<MPCBB1_VCTR24_SPEC, 19> { B787_W::new(self) } #[doc = "Bit 20 - B788"] #[inline(always)] #[must_use] pub fn b788(&mut self) -> B788_W<MPCBB1_VCTR24_SPEC, 20> { B788_W::new(self) } #[doc = "Bit 21 - B789"] #[inline(always)] #[must_use] pub fn b789(&mut self) -> B789_W<MPCBB1_VCTR24_SPEC, 21> { B789_W::new(self) } #[doc = "Bit 22 - B790"] #[inline(always)] #[must_use] pub fn b790(&mut self) -> B790_W<MPCBB1_VCTR24_SPEC, 22> { B790_W::new(self) } #[doc = "Bit 23 - B791"] #[inline(always)] #[must_use] pub fn b791(&mut self) -> B791_W<MPCBB1_VCTR24_SPEC, 23> { B791_W::new(self) } #[doc = "Bit 24 - B792"] #[inline(always)] #[must_use] pub fn b792(&mut self) -> B792_W<MPCBB1_VCTR24_SPEC, 24> { B792_W::new(self) } #[doc = "Bit 25 - B793"] #[inline(always)] #[must_use] pub fn b793(&mut self) -> B793_W<MPCBB1_VCTR24_SPEC, 25> { B793_W::new(self) } #[doc = "Bit 26 - B794"] #[inline(always)] #[must_use] pub fn b794(&mut self) -> B794_W<MPCBB1_VCTR24_SPEC, 26> { B794_W::new(self) } #[doc = "Bit 27 - B795"] #[inline(always)] #[must_use] pub fn b795(&mut self) -> B795_W<MPCBB1_VCTR24_SPEC, 27> { B795_W::new(self) } #[doc = "Bit 28 - B796"] #[inline(always)] #[must_use] pub fn b796(&mut self) -> B796_W<MPCBB1_VCTR24_SPEC, 28> { B796_W::new(self) } #[doc = "Bit 29 - B797"] #[inline(always)] #[must_use] pub fn b797(&mut self) -> B797_W<MPCBB1_VCTR24_SPEC, 29> { B797_W::new(self) } #[doc = "Bit 30 - B798"] #[inline(always)] #[must_use] pub fn b798(&mut self) -> B798_W<MPCBB1_VCTR24_SPEC, 30> { B798_W::new(self) } #[doc = "Bit 31 - B799"] #[inline(always)] #[must_use] pub fn b799(&mut self) -> B799_W<MPCBB1_VCTR24_SPEC, 31> { B799_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mpcbb1_vctr24::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mpcbb1_vctr24::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct MPCBB1_VCTR24_SPEC; impl crate::RegisterSpec for MPCBB1_VCTR24_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`mpcbb1_vctr24::R`](R) reader structure"] impl crate::Readable for MPCBB1_VCTR24_SPEC {} #[doc = "`write(|w| ..)` method takes [`mpcbb1_vctr24::W`](W) writer structure"] impl crate::Writable for MPCBB1_VCTR24_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets MPCBB1_VCTR24 to value 0"] impl crate::Resettable for MPCBB1_VCTR24_SPEC { const RESET_VALUE: Self::Ux = 0; }
/// An enum to represent all characters in the NKo block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum NKo { /// \u{7c0}: '߀' NkoDigitZero, /// \u{7c1}: '߁' NkoDigitOne, /// \u{7c2}: '߂' NkoDigitTwo, /// \u{7c3}: '߃' NkoDigitThree, /// \u{7c4}: '߄' NkoDigitFour, /// \u{7c5}: '߅' NkoDigitFive, /// \u{7c6}: '߆' NkoDigitSix, /// \u{7c7}: '߇' NkoDigitSeven, /// \u{7c8}: '߈' NkoDigitEight, /// \u{7c9}: '߉' NkoDigitNine, /// \u{7ca}: 'ߊ' NkoLetterA, /// \u{7cb}: 'ߋ' NkoLetterEe, /// \u{7cc}: 'ߌ' NkoLetterI, /// \u{7cd}: 'ߍ' NkoLetterE, /// \u{7ce}: 'ߎ' NkoLetterU, /// \u{7cf}: 'ߏ' NkoLetterOo, /// \u{7d0}: 'ߐ' NkoLetterO, /// \u{7d1}: 'ߑ' NkoLetterDagbasinna, /// \u{7d2}: 'ߒ' NkoLetterN, /// \u{7d3}: 'ߓ' NkoLetterBa, /// \u{7d4}: 'ߔ' NkoLetterPa, /// \u{7d5}: 'ߕ' NkoLetterTa, /// \u{7d6}: 'ߖ' NkoLetterJa, /// \u{7d7}: 'ߗ' NkoLetterCha, /// \u{7d8}: 'ߘ' NkoLetterDa, /// \u{7d9}: 'ߙ' NkoLetterRa, /// \u{7da}: 'ߚ' NkoLetterRra, /// \u{7db}: 'ߛ' NkoLetterSa, /// \u{7dc}: 'ߜ' NkoLetterGba, /// \u{7dd}: 'ߝ' NkoLetterFa, /// \u{7de}: 'ߞ' NkoLetterKa, /// \u{7df}: 'ߟ' NkoLetterLa, /// \u{7e0}: 'ߠ' NkoLetterNaWoloso, /// \u{7e1}: 'ߡ' NkoLetterMa, /// \u{7e2}: 'ߢ' NkoLetterNya, /// \u{7e3}: 'ߣ' NkoLetterNa, /// \u{7e4}: 'ߤ' NkoLetterHa, /// \u{7e5}: 'ߥ' NkoLetterWa, /// \u{7e6}: 'ߦ' NkoLetterYa, /// \u{7e7}: 'ߧ' NkoLetterNyaWoloso, /// \u{7e8}: 'ߨ' NkoLetterJonaJa, /// \u{7e9}: 'ߩ' NkoLetterJonaCha, /// \u{7ea}: 'ߪ' NkoLetterJonaRa, /// \u{7eb}: '߫' NkoCombiningShortHighTone, /// \u{7ec}: '߬' NkoCombiningShortLowTone, /// \u{7ed}: '߭' NkoCombiningShortRisingTone, /// \u{7ee}: '߮' NkoCombiningLongDescendingTone, /// \u{7ef}: '߯' NkoCombiningLongHighTone, /// \u{7f0}: '߰' NkoCombiningLongLowTone, /// \u{7f1}: '߱' NkoCombiningLongRisingTone, /// \u{7f2}: '߲' NkoCombiningNasalizationMark, /// \u{7f3}: '߳' NkoCombiningDoubleDotAbove, /// \u{7f4}: 'ߴ' NkoHighToneApostrophe, /// \u{7f5}: 'ߵ' NkoLowToneApostrophe, /// \u{7f6}: '߶' NkoSymbolOoDennen, /// \u{7f7}: '߷' NkoSymbolGbakurunen, /// \u{7f8}: '߸' NkoComma, /// \u{7f9}: '߹' NkoExclamationMark, /// \u{7fa}: 'ߺ' NkoLajanyalan, /// \u{7fd}: '߽' NkoDantayalan, /// \u{7fe}: '߾' NkoDoromeSign, } impl Into<char> for NKo { fn into(self) -> char { match self { NKo::NkoDigitZero => '߀', NKo::NkoDigitOne => '߁', NKo::NkoDigitTwo => '߂', NKo::NkoDigitThree => '߃', NKo::NkoDigitFour => '߄', NKo::NkoDigitFive => '߅', NKo::NkoDigitSix => '߆', NKo::NkoDigitSeven => '߇', NKo::NkoDigitEight => '߈', NKo::NkoDigitNine => '߉', NKo::NkoLetterA => 'ߊ', NKo::NkoLetterEe => 'ߋ', NKo::NkoLetterI => 'ߌ', NKo::NkoLetterE => 'ߍ', NKo::NkoLetterU => 'ߎ', NKo::NkoLetterOo => 'ߏ', NKo::NkoLetterO => 'ߐ', NKo::NkoLetterDagbasinna => 'ߑ', NKo::NkoLetterN => 'ߒ', NKo::NkoLetterBa => 'ߓ', NKo::NkoLetterPa => 'ߔ', NKo::NkoLetterTa => 'ߕ', NKo::NkoLetterJa => 'ߖ', NKo::NkoLetterCha => 'ߗ', NKo::NkoLetterDa => 'ߘ', NKo::NkoLetterRa => 'ߙ', NKo::NkoLetterRra => 'ߚ', NKo::NkoLetterSa => 'ߛ', NKo::NkoLetterGba => 'ߜ', NKo::NkoLetterFa => 'ߝ', NKo::NkoLetterKa => 'ߞ', NKo::NkoLetterLa => 'ߟ', NKo::NkoLetterNaWoloso => 'ߠ', NKo::NkoLetterMa => 'ߡ', NKo::NkoLetterNya => 'ߢ', NKo::NkoLetterNa => 'ߣ', NKo::NkoLetterHa => 'ߤ', NKo::NkoLetterWa => 'ߥ', NKo::NkoLetterYa => 'ߦ', NKo::NkoLetterNyaWoloso => 'ߧ', NKo::NkoLetterJonaJa => 'ߨ', NKo::NkoLetterJonaCha => 'ߩ', NKo::NkoLetterJonaRa => 'ߪ', NKo::NkoCombiningShortHighTone => '߫', NKo::NkoCombiningShortLowTone => '߬', NKo::NkoCombiningShortRisingTone => '߭', NKo::NkoCombiningLongDescendingTone => '߮', NKo::NkoCombiningLongHighTone => '߯', NKo::NkoCombiningLongLowTone => '߰', NKo::NkoCombiningLongRisingTone => '߱', NKo::NkoCombiningNasalizationMark => '߲', NKo::NkoCombiningDoubleDotAbove => '߳', NKo::NkoHighToneApostrophe => 'ߴ', NKo::NkoLowToneApostrophe => 'ߵ', NKo::NkoSymbolOoDennen => '߶', NKo::NkoSymbolGbakurunen => '߷', NKo::NkoComma => '߸', NKo::NkoExclamationMark => '߹', NKo::NkoLajanyalan => 'ߺ', NKo::NkoDantayalan => '߽', NKo::NkoDoromeSign => '߾', } } } impl std::convert::TryFrom<char> for NKo { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '߀' => Ok(NKo::NkoDigitZero), '߁' => Ok(NKo::NkoDigitOne), '߂' => Ok(NKo::NkoDigitTwo), '߃' => Ok(NKo::NkoDigitThree), '߄' => Ok(NKo::NkoDigitFour), '߅' => Ok(NKo::NkoDigitFive), '߆' => Ok(NKo::NkoDigitSix), '߇' => Ok(NKo::NkoDigitSeven), '߈' => Ok(NKo::NkoDigitEight), '߉' => Ok(NKo::NkoDigitNine), 'ߊ' => Ok(NKo::NkoLetterA), 'ߋ' => Ok(NKo::NkoLetterEe), 'ߌ' => Ok(NKo::NkoLetterI), 'ߍ' => Ok(NKo::NkoLetterE), 'ߎ' => Ok(NKo::NkoLetterU), 'ߏ' => Ok(NKo::NkoLetterOo), 'ߐ' => Ok(NKo::NkoLetterO), 'ߑ' => Ok(NKo::NkoLetterDagbasinna), 'ߒ' => Ok(NKo::NkoLetterN), 'ߓ' => Ok(NKo::NkoLetterBa), 'ߔ' => Ok(NKo::NkoLetterPa), 'ߕ' => Ok(NKo::NkoLetterTa), 'ߖ' => Ok(NKo::NkoLetterJa), 'ߗ' => Ok(NKo::NkoLetterCha), 'ߘ' => Ok(NKo::NkoLetterDa), 'ߙ' => Ok(NKo::NkoLetterRa), 'ߚ' => Ok(NKo::NkoLetterRra), 'ߛ' => Ok(NKo::NkoLetterSa), 'ߜ' => Ok(NKo::NkoLetterGba), 'ߝ' => Ok(NKo::NkoLetterFa), 'ߞ' => Ok(NKo::NkoLetterKa), 'ߟ' => Ok(NKo::NkoLetterLa), 'ߠ' => Ok(NKo::NkoLetterNaWoloso), 'ߡ' => Ok(NKo::NkoLetterMa), 'ߢ' => Ok(NKo::NkoLetterNya), 'ߣ' => Ok(NKo::NkoLetterNa), 'ߤ' => Ok(NKo::NkoLetterHa), 'ߥ' => Ok(NKo::NkoLetterWa), 'ߦ' => Ok(NKo::NkoLetterYa), 'ߧ' => Ok(NKo::NkoLetterNyaWoloso), 'ߨ' => Ok(NKo::NkoLetterJonaJa), 'ߩ' => Ok(NKo::NkoLetterJonaCha), 'ߪ' => Ok(NKo::NkoLetterJonaRa), '߫' => Ok(NKo::NkoCombiningShortHighTone), '߬' => Ok(NKo::NkoCombiningShortLowTone), '߭' => Ok(NKo::NkoCombiningShortRisingTone), '߮' => Ok(NKo::NkoCombiningLongDescendingTone), '߯' => Ok(NKo::NkoCombiningLongHighTone), '߰' => Ok(NKo::NkoCombiningLongLowTone), '߱' => Ok(NKo::NkoCombiningLongRisingTone), '߲' => Ok(NKo::NkoCombiningNasalizationMark), '߳' => Ok(NKo::NkoCombiningDoubleDotAbove), 'ߴ' => Ok(NKo::NkoHighToneApostrophe), 'ߵ' => Ok(NKo::NkoLowToneApostrophe), '߶' => Ok(NKo::NkoSymbolOoDennen), '߷' => Ok(NKo::NkoSymbolGbakurunen), '߸' => Ok(NKo::NkoComma), '߹' => Ok(NKo::NkoExclamationMark), 'ߺ' => Ok(NKo::NkoLajanyalan), '߽' => Ok(NKo::NkoDantayalan), '߾' => Ok(NKo::NkoDoromeSign), _ => Err(()), } } } impl Into<u32> for NKo { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for NKo { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for NKo { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl NKo { /// The character with the lowest index in this unicode block pub fn new() -> Self { NKo::NkoDigitZero } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("NKo{:#?}", self); string_morph::to_sentence_case(&s) } }
use actix_web::client::SendRequestError; use actix_web::{error, HttpResponse, ResponseError}; use awc; use core::num::ParseIntError; use mysql::Error as MySqlError; use std::fmt; //use std::option::NoneError; type Field = String; #[derive(Debug)] /// An custom error type, that handles convertion to HTTP error codes pub enum Error { /// Internal Database Errors -> 500 DatabaseError(MySqlError), /// Database Constraints, usually from invalid User input -> 400 or 500 ConstraintError(Option<Field>), /// User input is too long -> 400 DataTooLong(Field), /// User input has wrong type -> 400 IllegalValueForType(Field), /// Database is inconsistent -> 500 IllegalState, /// Invalid Json from user -> 400 JsonPayloadError(actix_web::error::JsonPayloadError), /// Backend can not authenticate with the Keycloak server-> 500 //KeycloakAuthenticationError(Box<RequestTokenError<dyn Fail, BasicErrorResponseType>>), /// No connection to Keycloak server -> 500 KeycloakConnectionError(SendRequestError), /// Keycload answer wrong -> 500 KeycloakJsonError(awc::error::JsonPayloadError), /// Authentication Token is invalid -> 401 InvalidAuthenticationError, /// Missing a required claim -> 403 YouShallNotPassError, /// Not even logged in -> 401 SpeakFriendAndEnterError, /// Missing parameter in URL -> 400 BadRequestFormat, /// ActixError(error::Error), /// No item with given id found -> 404 ItemNotFound, } impl From<MySqlError> for Error { fn from(error: MySqlError) -> Self { match error { MySqlError::MySqlError(ref e) if e.code == 1452 => Error::ConstraintError(None), /*MySqlError::MySqlError(e) => match e.code { 1452 => DatabaseError::FieldError(FieldError::ConstraintError(None)), _ => DatabaseError::GenericError(MySqlError::MySqlError(e)), },*/ _ => Error::DatabaseError(error), } } } //impl From<NoneError> for Error { // fn from(error: NoneError) -> Self { // Error::BadRequestFormat // } //} impl From<ParseIntError> for Error { fn from(_error: ParseIntError) -> Self { Error::BadRequestFormat } } impl From<actix_web::error::JsonPayloadError> for Error { fn from(error: actix_web::error::JsonPayloadError) -> Self { Error::JsonPayloadError(error) } } /* impl From<RequestTokenError<Fail, BasicErrorResponseType>> for Error { fn from(error: RequestTokenError<dyn Fail, BasicErrorResponseType>) -> Self { Error::KeycloakAuthenticationError(Box::new(error)) } } */ // impl From<error::Error> for Error { // fn from(error: error::Error) -> Self { // Error::ActixError(error) // } // } /* impl From<error::InternalError<ParseIntError>> for Error { fn from(error: error::InternalError<ParseIntError>) -> Self { Error::ActixInternalError(error) } } */ impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { //TODO: Use Field when available Error::ConstraintError(_) => write!(f, "ERROR: unknown constaint error"), Error::DataTooLong(ref field) => write!(f, "ERROR: data too long for field: {}", field), Error::IllegalValueForType(ref field) => { write!(f, "ERROR: illegal value in field: {}", field) } Error::DatabaseError(ref err) => write!(f, "{{ {} }}", err), Error::JsonPayloadError(ref err) => write!(f, "{{ {} }}", err), //Error::KeycloakAuthenticationError(ref err) => write!(f, "{{ {} }}", err), // Error::ActixError(ref err) => write!(f, "{{ {} }}", err), _ => write!(f, "ERROR: unknown error"), } } } //impl Fail for Error {} impl ResponseError for Error { fn error_response(&self) -> HttpResponse { match *self { Error::DataTooLong(ref e) => HttpResponse::BadRequest() .header("x-field", e.clone()) .body(format!("{}", self)), Error::InvalidAuthenticationError => HttpResponse::Unauthorized() .header( "WWW-Authenticate", format!("Bearer realm=\"{}\"", "liberation"), //TODO: Use config for realm name ) .finish(), Error::YouShallNotPassError => HttpResponse::Forbidden().finish(), Error::SpeakFriendAndEnterError => HttpResponse::Unauthorized().finish(), //_ => HttpResponse::InternalServerError().finish(), TODO: Debugging option // Error::ActixError(err) => err.as_response_error().error_response(), // Error::ActixInternalError(err) => err.error_response(), _ => { error!("Internal Server Error: {:?}", self); HttpResponse::InternalServerError().body(format!("{}", self)) }, } } }
use grammers_client::types::Message; use grammers_client::{ClientHandle, InputMessage}; use grammers_mtproto::mtp::RpcError; use grammers_mtsender::InvocationError; use grammers_tl_types as tl; pub async fn resend_message( old_message_id: i32, message: InputMessage, client_handler: &mut ClientHandle, peer: &tl::enums::InputPeer, ) -> i32 { client_handler .delete_messages(None, &[old_message_id]) .await .unwrap(); // TODO this method should return message instance client_handler.send_message(peer, message).await.unwrap(); last_message(client_handler, &peer).await } pub async fn last_message(client_handler: &mut ClientHandle, peer: &tl::enums::InputPeer) -> i32 { let mut messages = client_handler.search_messages(&peer); messages.next().await.unwrap().unwrap().id() } pub async fn edit_or_recreate( id: i32, message: InputMessage, message_again: InputMessage, client_handler: &mut ClientHandle, peer: &tl::enums::InputPeer, ) -> Option<i32> { let result = client_handler.edit_message(&peer, id, message).await; match result { Ok(_) => None, Err(InvocationError::Rpc(RpcError { name, .. })) => { if name == "MESSAGE_EDIT_TIME_EXPIRED" { let res = resend_message(id, message_again, client_handler, peer).await; Some(res) } else { None } } Err(e) => panic!(e), } } pub async fn get_message(client_handle: &mut ClientHandle, file_id: i32) -> Message { client_handle .get_messages_by_id(None, &[file_id]) .await .unwrap() .remove(0) .unwrap() }
/* Copyright (c) 2023 Uber Technologies, Inc. <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at <p>http://www.apache.org/licenses/LICENSE-2.0 <p>Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use std::path::PathBuf; use super::{create_rewrite_tests, execute_piranha_and_check_result, substitutions}; use crate::models::{ default_configs::SWIFT, language::PiranhaLanguage, piranha_arguments::PiranhaArgumentsBuilder, }; create_rewrite_tests! { SWIFT, // This scenario is "derived" from plugin cleanup. // Tests cascading file delete based on enum and type alias. // This cleanup requires the concept of global tags test_cascading_delete_file: "cascade_file_delete", 3, substitutions = substitutions! { "stale_flag_name" => "Premium-Icon" }, cleanup_comments = true, delete_file_if_empty= false; test_cascading_delete_file_custom_global_tag: "cascade_file_delete_custom_global_tag", 3, substitutions = substitutions! { "stale_flag_name" => "Premium-Icon" }, cleanup_comments = true, global_tag_prefix ="universal_tag.".to_string(), cleanup_comments_buffer = 3, delete_file_if_empty= false; test_leading_comma: "leading_comma", 1, substitutions = substitutions! { "stale_flag" => "one" }, cleanup_comments = true, delete_file_if_empty= false; } fn execute_piranha_with_default_swift_args(scenario: &str, substitutions: Vec<(String, String)>) { let _path = PathBuf::from("test-resources").join(SWIFT).join(scenario); let temp_dir = super::copy_folder_to_temp_dir(&_path.join("input")); let piranha_arguments = PiranhaArgumentsBuilder::default() .paths_to_codebase(vec![temp_dir.path().to_str().unwrap().to_string()]) .path_to_configurations(_path.join("configurations").to_str().unwrap().to_string()) .language(PiranhaLanguage::from(SWIFT)) .cleanup_comments(true) .substitutions(substitutions) .delete_file_if_empty(false) .build(); execute_piranha_and_check_result(&piranha_arguments, &_path.join("expected"), 1, true); temp_dir.close().unwrap(); } #[test] #[ignore] // Long running test fn test_cleanup_rules_file() { super::initialize(); execute_piranha_with_default_swift_args( "cleanup_rules", substitutions! { "stale_flag" => "stale_flag_one", "treated" => "true", "treated_complement" => "false" }, ); } #[test] #[ignore] // Long running test fn test_cleanup_with_derivatives() { super::initialize(); execute_piranha_with_default_swift_args("cleanup_with_derivatives", vec![]); } #[test] #[ignore] // Long running test fn test_local_variable_inline_file() { super::initialize(); execute_piranha_with_default_swift_args("variable_inline/local_variable_inline", vec![]); } #[test] #[ignore] // Long running test fn test_field_variable_inline_file() { super::initialize(); execute_piranha_with_default_swift_args("variable_inline/field_variable_inline", vec![]); } #[test] #[ignore] // Long running test fn test_adhoc_variable_inline_file() { super::initialize(); execute_piranha_with_default_swift_args("variable_inline/adhoc_variable_inline", vec![]); } #[test] #[ignore] // Long running test fn test_delete_everything_after_return() { super::initialize(); execute_piranha_with_default_swift_args("delete_statements_after_return", vec![]); }
#[derive(Debug)] struct NestedIterator { int_mode: bool, int: Option<i32>, its: Vec<NestedIterator>, } impl NestedIterator { fn new_from_int(i: i32) -> Self { Self { int_mode: true, int: Some(i), its: vec![], } } fn new(nestedList: Vec<NestedInteger>) -> Self { let mut its = vec![]; for ni in nestedList { match ni { NestedInteger::Int(i) => its.push(Self::new_from_int(i)), NestedInteger::List(v) => its.push(Self::new(v)), } } let mut s = Self { int_mode: false, int: None, its, }; s.reduce(); s } fn reduce(&mut self) { if self.int_mode { return; } while !self.its.is_empty() { if self.its[0].has_next() { return; } self.its.remove(0); } } fn next(&mut self) -> i32 { if self.int_mode { self.int.take().unwrap() } else { let result = self.its[0].next(); self.reduce(); result } } fn has_next(&self) -> bool { if self.int_mode { self.int.is_some() } else { !self.its.is_empty() } } }
use std::sync::Arc; use crate::{ binary::{Encoder, ReadEx}, errors::Result, types::{ column::{ column_data::{ArcColumnData, BoxColumnData}, ArcColumnWrapper, ColumnData, }, SqlType, Value, ValueRef, }, }; use chrono_tz::Tz; use either::Either; pub(crate) struct NullableColumnData { pub(crate) inner: ArcColumnData, pub(crate) nulls: Vec<u8>, } impl NullableColumnData { pub(crate) fn load<R: ReadEx>( reader: &mut R, type_name: &str, size: usize, tz: Tz, ) -> Result<Self> { let mut nulls = vec![0; size]; reader.read_bytes(nulls.as_mut())?; let inner = <dyn ColumnData>::load_data::<ArcColumnWrapper, _>(reader, type_name, size, tz)?; Ok(NullableColumnData { inner, nulls }) } } impl ColumnData for NullableColumnData { fn sql_type(&self) -> SqlType { let inner_type = self.inner.sql_type(); SqlType::Nullable(inner_type.into()) } fn save(&self, encoder: &mut Encoder, start: usize, end: usize) { let nulls: &[u8] = self.nulls.as_ref(); encoder.write_bytes(&nulls[start..end]); self.inner.save(encoder, start, end); } fn len(&self) -> usize { assert_eq!(self.nulls.len(), self.inner.len()); self.inner.len() } fn push(&mut self, value: Value) { let inner_column: &mut dyn ColumnData = Arc::get_mut(&mut self.inner).unwrap(); if let Value::Nullable(e) = value { match e { Either::Left(sql_type) => { let default_value = Value::default(sql_type.clone()); inner_column.push(default_value); self.nulls.push(true as u8); } Either::Right(inner) => { inner_column.push(*inner); self.nulls.push(false as u8); } } } else { inner_column.push(value); self.nulls.push(false as u8); } } fn at(&self, index: usize) -> ValueRef { if self.nulls[index] == 1 { let sql_type = self.inner.sql_type(); ValueRef::Nullable(Either::Left(sql_type.into())) } else { let inner_value = self.inner.at(index); ValueRef::Nullable(Either::Right(Box::new(inner_value))) } } fn clone_instance(&self) -> BoxColumnData { Box::new(Self { inner: self.inner.clone(), nulls: self.nulls.clone(), }) } unsafe fn get_internal(&self, pointers: &[*mut *const u8], level: u8, props: u32) -> Result<()> { if level == self.sql_type().level() { *pointers[0] = self.nulls.as_ptr(); *(pointers[1] as *mut usize) = self.len(); Ok(()) } else { self.inner.get_internal(pointers, level, props) } } fn cast_to(&self, _this: &ArcColumnData, target: &SqlType) -> Option<ArcColumnData> { if let SqlType::Nullable(inner_target) = target { if let Some(inner) = self.inner.cast_to(&self.inner, inner_target) { return Some(Arc::new(NullableColumnData { inner, nulls: self.nulls.clone(), })); } } None } }
// 通知rust我们要使用外部依赖,这也会调用相应的use rand,所以现在可以使用rand::前缀来调用rand crate中的任何内容 extern crate rand; // 输入输出库,获取用户输入 // io库来自于标准库(也被称为std) use std::io; use std::cmp::Ordering; use rand::Rng; fn main() { println!("Guess the number!"); // 获取1-101之间的随机数 let secret_number = rand::thread_rng().gen_range(1,101); println!("The secret number is:{}",secret_number); loop { println!("Please input your guess."); // 在rust中,变量默认是不可变的,在变量名前使用mut来使一个变量可变 // new是string类型的一个关联函数,new函数在这里创建了一个空的string,这是创建类型实例的惯用函数名 let mut guess = String::new(); // 同理,stdin就是io库的关联函数,调用stdin,调用read_line方法从标准输入句柄获取用户输入 // 我们还向read_line()传递了一个参数:&mut guess // read_line的工作是无论用户输入什么内容,都将其存入一个字符串中,因此它需要字符串作为参数,这个参数应该是 // 可变的,以便将用户的输入附加上去。 // &表示这个参数是一个引用,允许多处代码访问同一处数据,而无需在内存中多次拷贝 // 下半部分是另一个方法,最好拆成单行来写。 // 使用Result类型来处理潜在的错误,Result类型是枚举,通常也写作enums,如果不写下面那行,程序可以正常执行, // 但是会抛出一个警告,说明可能有一个潜在的错误没有解决,所以应该编写错误处理代码,如果我们就是希望出现错误时 // 程序立即奔溃,所以直接使用expect io::stdin().read_line(&mut guess) .expect("Failed to read line"); // trim消除前后的空格,消除用户按下回车之后的\n let guess: u32 = guess.trim().parse() .expect("Please type a number!"); // 使用println!占位符打印值,{}是预留的占位符,第一个{}对应第一个值,第二个对应第二个值,以此类推 println!("You guessed:{}",guess); match guess.cmp(&secret_number){ Ordering::Less => println!("Too small!"), Ordering::Greater => println!("Too big!"), Ordering::Equal => { println!("You win!"); break; }, } } }
use crate::rtb_type; rtb_type! { StartDelayMode, 0, PreRoll=0; GenericMidRoll=-1; GenericPostRoll=-3 }
use crate::encryption::{ MixnetEncryptionKeyPair, MixnetEncryptionPrivateKey, MixnetEncryptionPublicKey, }; use crate::PemStorable; use curve25519_dalek::montgomery::MontgomeryPoint; use curve25519_dalek::scalar::Scalar; // TODO: ensure this is a proper name for this considering we are not implementing entire DH here const CURVE_GENERATOR: MontgomeryPoint = curve25519_dalek::constants::X25519_BASEPOINT; pub struct KeyPair { pub(crate) private_key: PrivateKey, pub(crate) public_key: PublicKey, } impl MixnetEncryptionKeyPair<PrivateKey, PublicKey> for KeyPair { fn new() -> Self { let mut rng = rand_os::OsRng::new().unwrap(); let private_key_value = Scalar::random(&mut rng); let public_key_value = CURVE_GENERATOR * private_key_value; KeyPair { private_key: PrivateKey(private_key_value), public_key: PublicKey(public_key_value), } } fn private_key(&self) -> &PrivateKey { &self.private_key } fn public_key(&self) -> &PublicKey { &self.public_key } fn from_bytes(priv_bytes: &[u8], pub_bytes: &[u8]) -> Self { KeyPair { private_key: PrivateKey::from_bytes(priv_bytes), public_key: PublicKey::from_bytes(pub_bytes), } } } // COPY IS DERIVED ONLY TEMPORARILY UNTIL https://github.com/nymtech/nym/issues/47 is fixed #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub struct PrivateKey(pub Scalar); impl MixnetEncryptionPrivateKey for PrivateKey { type PublicKeyMaterial = PublicKey; fn to_bytes(&self) -> Vec<u8> { self.0.to_bytes().to_vec() } fn from_bytes(b: &[u8]) -> Self { let mut bytes = [0; 32]; bytes.copy_from_slice(&b[..]); let key = Scalar::from_canonical_bytes(bytes).unwrap(); Self(key) } } impl PemStorable for PrivateKey { fn pem_type(&self) -> String { String::from("X25519 PRIVATE KEY") } } #[derive(Debug, Clone, Eq, PartialEq)] pub struct PublicKey(pub MontgomeryPoint); impl<'a> From<&'a PrivateKey> for PublicKey { fn from(pk: &'a PrivateKey) -> Self { PublicKey(CURVE_GENERATOR * pk.0) } } impl MixnetEncryptionPublicKey for PublicKey { type PrivateKeyMaterial = PrivateKey; fn to_bytes(&self) -> Vec<u8> { self.0.to_bytes().to_vec() } fn from_bytes(b: &[u8]) -> Self { let mut bytes = [0; 32]; bytes.copy_from_slice(&b[..]); let key = MontgomeryPoint(bytes); Self(key) } } impl PemStorable for PublicKey { fn pem_type(&self) -> String { String::from("X25519 PUBLIC KEY") } }
use beanstalkd::Beanstalkd; pub fn pop(beanstalkd: &mut Beanstalkd) { let (id, message) = beanstalkd.reserve().unwrap(); println!("{}", message); let _ = beanstalkd.delete(id); }
#[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; mod ast; mod evaluator; mod lexer; mod object; mod parser; mod repl; mod token; use crate::lexer::*; use crate::object::*; use crate::parser::*; use std::cell::*; use std::rc::*; fn main() { let input = " let fibonacci = fn(x) { if (x == 0) { 0 } else { if (x == 1) { return 1; } else { fibonacci(x - 1) + fibonacci(x - 2); } } }; fibonacci(30); "; let l = Lexer::new(String::from(input)); let mut p = Parser::new(l); match p.parse_program() { Ok(program) => { let env = Rc::new(RefCell::new(new_environment())); let result = evaluator::evaluate(program, Rc::clone(&env)); println!("result={}", result.inspect()); } Err(err) => panic!("{:?}", err), } }
//! QuotientFilter implementation. use std::collections::hash_map::DefaultHasher; use std::collections::VecDeque; use std::hash::{BuildHasher, BuildHasherDefault, Hash, Hasher}; use std::marker::PhantomData; use fixedbitset::FixedBitSet; use succinct::{IntVec, IntVecMut, IntVector}; use crate::filters::Filter; use crate::helpers::all_zero_intvector; /// Error that signals that the QuotientFilter is full. #[derive(Debug, Clone, Copy)] pub struct QuotientFilterFull; /// Internal results for scanning the quotientfilter. struct ScanResult { /// Indicates if the requested element is already present in the filter. present: bool, /// Position where the search ended. position: usize, /// Start position of run where the search ended. start_of_run: Option<usize>, } impl ScanResult { fn has_run(&self) -> bool { self.start_of_run.is_some() } fn at_start_of_run(&self) -> bool { match self.start_of_run { Some(start) => start == self.position, None => false, } } } /// A QuotientFilter is a set-like data structure, that keeps track of elements it has seen without /// the need to store them. Looking up values has a certain false positive rate, but a false /// negative rate of 0%. /// /// # Examples /// ``` /// use pdatastructs::filters::Filter; /// use pdatastructs::filters::quotientfilter::QuotientFilter; /// /// // set up filter /// let bits_quotient = 16; /// let bits_remainder = 5; /// let mut filter = QuotientFilter::with_params(bits_quotient, bits_remainder); /// /// // add some data /// filter.insert(&"my super long string").unwrap(); /// /// // later /// assert!(filter.query(&"my super long string")); /// assert!(!filter.query(&"another super long string")); /// ``` /// /// Note that the filter is specific to `T`, so the following will not compile: /// /// ```compile_fail /// use pdatastructs::filters::Filter; /// use pdatastructs::filters::quotientfilter::QuotientFilter; /// /// // set up filter /// let bits_quotient = 16; /// let bits_remainder = 5; /// let mut filter1 = QuotientFilter::<u8>::with_params(bits_quotient, bits_remainder); /// let filter2 = QuotientFilter::<i8>::with_params(bits_quotient, bits_remainder); /// /// filter1.union(&filter2); /// ``` /// /// # Applications /// - when a lot of data should be added to the set and a moderate false positive rate is /// acceptable, was used for spell checking /// - as a pre-filter for more expensive lookups, e.g. in combination with a real set, map or /// database, so the final false positive rate is 0% /// /// # How It Works /// /// ## Setup /// There are `2^bits_quotient` slots, initial empty. For every slot, we store `bits_remainder` as /// fingerprint information, a `is_continuation` bit, a `is_occupied` bit and a `is_shifted` bit. /// All bits are initially set to false. /// /// ```text /// bits_quotient = 3 /// bits_remainder = 4 /// /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | position || 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | is_occupied || | | | | | | | | /// | is_continuation || | | | | | | | | /// | is_shifted || | | | | | | | | /// | remainder || 0x0 | 0x0 | 0x0 | 0x0 | 0x0 | 0x0 | 0x0 | 0x0 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// ``` /// /// ## Insertion /// On insertion, elements are hashed to 64 bits. From these, `bits_quotient` are used as a /// quotient and `bits_remainder` are used as remainder, the remaining bits are dropped. /// /// The quotient represents the canonical position in which the remainder should be inserted. If is /// is free, we use that position, set the `is_occupied` bit and are done. /// /// ```text /// x = "foo" /// h(x) = 0x0123456789abcda5 /// h(x) & 0x7f = 0x25 /// remainder = 0x5 /// quotient = 0x2 /// /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | position || 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | is_occupied || | | X | | | | | | /// | is_continuation || | | | | | | | | /// | is_shifted || | | | | | | | | /// | remainder || 0x0 | 0x0 | 0x2 | 0x0 | 0x0 | 0x0 | 0x0 | 0x0 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// ``` /// /// If not, linear probing is applied. If an element with the same quotient is already in the /// filter, the so called "run" of it will be extended. For extensions, the `is_continuation` bit /// is set as well as the `is_shifted` bit because the stored remainder is not in its canonical /// position: /// /// ```text /// x = "bar" /// h(x) = 0xad8caa00248af32e /// h(x) & 0x7f = 0x2e /// remainder = 0xe /// quotient = 0x2 /// /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | position || 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | is_occupied || | | X | | | | | | /// | is_continuation || | | | X | | | | | /// | is_shifted || | | | X | | | | | /// | remainder || 0x0 | 0x0 | 0x2 | 0xe | 0x0 | 0x0 | 0x0 | 0x0 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | run || [=========] | /// +-----------------++-----------------------------------------------| /// ``` /// /// While doing so, the order of remainders within the run is preserved: /// /// ```text /// x = "elephant" /// h(x) = 0x34235511eeadbc26 /// h(x) & 0x7f = 0x26 /// remainder = 0x6 /// quotient = 0x2 /// /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | position || 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | is_occupied || | | X | | | | | | /// | is_continuation || | | | X | X | | | | /// | is_shifted || | | | X | X | | | | /// | remainder || 0x0 | 0x0 | 0x2 | 0x6 | 0xe | 0x0 | 0x0 | 0x0 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | run || [===============] | /// +-----------------++-----------------------------------------------| /// ``` /// /// If a new quotient is inserted but the corresponding run cannot start at the canonical position, /// the entire run will be shifted. A sequence of runs is also called "cluster". Even though the /// run is shifted, the original position will still be marked as occupied: /// /// ```text /// x = "banana" /// h(x) = 0xdfdfdfdfdfdfdf31 /// h(x) & 0x7f = 0x31 /// remainder = 0x1 /// quotient = 0x3 /// /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | position || 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | is_occupied || | | X | X | | | | | /// | is_continuation || | | | X | X | | | | /// | is_shifted || | | | X | X | X | | | /// | remainder || 0x0 | 0x0 | 0x2 | 0x6 | 0xe | 0x1 | 0x0 | 0x0 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | run || [===============] [===] | /// | cluster || [=====================] | /// +-----------------++-----------------------------------------------| /// ``` /// /// Remainders may duplicate over multiple runs: /// /// ```text /// x = "apple" /// h(x) = 0x0000000000000072 /// h(x) & 0x7f = 0x72 /// remainder = 0x2 /// quotient = 0x7 /// /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | position || 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | is_occupied || | | X | X | | | | X | /// | is_continuation || | | | X | X | | | | /// | is_shifted || | | | X | X | X | | | /// | remainder || 0x0 | 0x0 | 0x2 | 0x6 | 0xe | 0x1 | 0x0 | 0x2 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | run || [===============] [===] [===]| /// | cluster || [=====================] [===]| /// +-----------------++-----------------------------------------------| /// ``` /// /// The entire array works like a ring-buffer and operations can over- and underflow: /// /// ```text /// x = "last" /// h(x) = 0x11355343431323f3 /// h(x) & 0x7f = 0x73 /// remainder = 0x3 /// quotient = 0x7 /// /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | position || 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | is_occupied || | | X | X | | | | X | /// | is_continuation || X | | | X | X | | | | /// | is_shifted || X | | | X | X | X | | | /// | remainder || 0x3 | 0x0 | 0x2 | 0x6 | 0xe | 0x1 | 0x0 | 0x2 | /// +-----------------++-----+-----+-----+-----+-----+-----+-----+-----+ /// | run ||====] [===============] [===] [====| /// | cluster ||====] [=====================] [====| /// +-----------------++-----------------------------------------------| /// ``` /// /// ## Lookup /// The lookup basically follows the insertion procedure. /// /// /// # See Also /// - `std::collections::HashSet`: has a false positive rate of 0%, but also needs to store all /// elements /// /// # References /// - ["Don’t Thrash: How to Cache your Hash on Flash" (short version), Michael A. Bender and others, 2012](http://static.usenix.org/events/hotstorage11/tech/final_files/Bender.pdf) /// - ["Don’t Thrash: How to Cache your Hash on Flash" (long version), Michael A. Bender and others, 2012](https://www.vldb.org/pvldb/vol5/p1627_michaelabender_vldb2012.pdf) /// - [Wikipedia: Quotient Filter](https://en.wikipedia.org/wiki/Quotient_filter) #[derive(Clone, Debug)] pub struct QuotientFilter<T, B = BuildHasherDefault<DefaultHasher>> where T: Hash + ?Sized, B: BuildHasher + Clone + Eq, { is_occupied: FixedBitSet, is_continuation: FixedBitSet, is_shifted: FixedBitSet, remainders: IntVector, bits_quotient: usize, buildhasher: B, n_elements: usize, phantom: PhantomData<fn() -> T>, } impl<T> QuotientFilter<T> where T: Hash + ?Sized, { /// Create new quotient filter with: /// /// - `bits_quotient`: number of bits used for a quotient, aka `2^bits_quotient` slots will be /// allocated /// - `bits_remainder`: number of bits used for the remainder, so every slot will require /// `bits_remainder + 3` bits of storage /// /// and a default hasher. pub fn with_params(bits_quotient: usize, bits_remainder: usize) -> Self { let buildhasher = BuildHasherDefault::<DefaultHasher>::default(); Self::with_params_and_hash(bits_quotient, bits_remainder, buildhasher) } } impl<T, B> QuotientFilter<T, B> where T: Hash + ?Sized, B: BuildHasher + Clone + Eq, { /// Create new quotient filter with: /// /// - `bits_quotient`: number of bits used for a quotient, aka `2^bits_quotient` slots will be /// allocated /// - `bits_remainder`: number of bits used for the remainder, so every slot will require /// `bits_remainder + 3` bits of storage /// - `buildhasher`: hash implementation pub fn with_params_and_hash( bits_quotient: usize, bits_remainder: usize, buildhasher: B, ) -> Self { assert!( (bits_remainder > 0) && (bits_remainder <= (usize::BITS as usize)), "bits_remainder ({}) must be greater than 0 and smaller or equal than {}", bits_remainder, usize::BITS, ); assert!( bits_quotient > 0, "bits_quotient ({}) must be greater than 0", bits_quotient, ); assert!( bits_remainder + bits_quotient <= 64, "bits_remainder ({}) + bits_quotient ({}) must be smaller or equal than 64", bits_remainder, bits_quotient, ); let len = 1 << bits_quotient; Self { is_occupied: FixedBitSet::with_capacity(len), is_continuation: FixedBitSet::with_capacity(len), is_shifted: FixedBitSet::with_capacity(len), remainders: all_zero_intvector(bits_remainder, len), bits_quotient, buildhasher, n_elements: 0, phantom: PhantomData, } } /// Number of bits used for addressing slots. pub fn bits_quotient(&self) -> usize { self.bits_quotient } /// Number of bits stored as fingeprint information. pub fn bits_remainder(&self) -> usize { self.remainders.element_bits() } fn calc_quotient_remainder(&self, obj: &T) -> (usize, usize) { let bits_remainder = self.bits_remainder(); let mut hasher = self.buildhasher.build_hasher(); obj.hash(&mut hasher); let fingerprint = hasher.finish(); let bits_trash = 64 - bits_remainder - self.bits_quotient; let trash = if bits_trash > 0 { (fingerprint >> (64 - bits_trash)) << (64 - bits_trash) } else { 0 }; let fingerprint_clean = fingerprint - trash; let quotient = fingerprint_clean >> bits_remainder; let remainder = fingerprint_clean - (quotient << bits_remainder); (quotient as usize, remainder as usize) } fn decr(&self, pos: &mut usize) { *pos = if *pos == 0 { self.is_occupied.len() - 1 } else { *pos - 1 }; } fn incr(&self, pos: &mut usize) { *pos = if *pos == self.is_occupied.len() - 1 { 0 } else { *pos + 1 } } fn scan(&self, quotient: usize, remainder: usize, on_insert: bool) -> ScanResult { let run_exists = self.is_occupied[quotient]; if (!run_exists) && (!on_insert) { // fast-path for query, since we don't need to find the correct position for the // insertion process return ScanResult { present: false, position: quotient, start_of_run: None, }; } // walk back to find the beginning of the cluster let mut b = quotient; while self.is_shifted[b] { self.decr(&mut b); } // walk forward to find the actual start of the run let mut s = b; while b != quotient { // invariant: `s` poins to first slot of bucket `b` // skip all elements in the current run loop { self.incr(&mut s); if !self.is_continuation[s] { break; } } // find the next occupied bucket loop { self.incr(&mut b); if self.is_occupied[b] || ((b == quotient) && on_insert) { break; } } } // `s` now points to the first remainder in bucket at `quotient` // search of remainder within the run if run_exists { let start_of_run = s; loop { let r = self.remainders.get(s as u64); if r == remainder { return ScanResult { present: true, position: s, start_of_run: Some(start_of_run), }; } if r > remainder { // remainders are sorted within run break; } self.incr(&mut s); if !self.is_continuation[s] { break; } } ScanResult { present: false, position: s, start_of_run: Some(start_of_run), } } else { ScanResult { present: false, position: s, start_of_run: None, } } } fn insert_internal( &mut self, quotient: usize, remainder: usize, ) -> Result<bool, QuotientFilterFull> { let scan_result = self.scan(quotient, remainder, true); // early exit if the element is already present if scan_result.present { return Ok(false); } // we need to insert the element into the filter // error out if there is no space left if self.n_elements == self.is_occupied.len() { return Err(QuotientFilterFull); } // set up swap chain let mut current_is_continuation = self.is_continuation[scan_result.position] || scan_result.at_start_of_run(); let mut current_remainder = self.remainders.get(scan_result.position as u64); let mut current_used = self.is_occupied[scan_result.position] || self.is_shifted[scan_result.position]; // set current state self.remainders.set(scan_result.position as u64, remainder); // if scan_result.position != scan_result.start_of_run.unwrap_or(scan_result.position) { if scan_result.has_run() && (!scan_result.at_start_of_run()) { // might be an append operation, ensure is_continuation and is_shifted are set self.is_continuation.set(scan_result.position, true); } if scan_result.position != quotient { // not at canonical slot self.is_shifted.set(scan_result.position, true); } // run swap chain until nothing to do let start = scan_result.position; let mut position = scan_result.position; while current_used { self.incr(&mut position); let next_is_continuation = self.is_continuation[position]; let next_remainder = self.remainders.get(position as u64); let next_used = self.is_occupied[position] || self.is_shifted[position]; self.is_shifted.set(position, true); self.is_continuation.set(position, current_is_continuation); self.remainders.set(position as u64, current_remainder); current_is_continuation = next_is_continuation; current_remainder = next_remainder; current_used = next_used; if position == start { panic!("infinite loop detected"); } } // mark canonical slot as occupied self.is_occupied.set(quotient, true); // done self.n_elements += 1; Ok(true) } } impl<T, B> Filter<T> for QuotientFilter<T, B> where T: Hash + ?Sized, B: BuildHasher + Clone + Eq, { type InsertErr = QuotientFilterFull; fn clear(&mut self) { self.is_occupied.clear(); self.is_continuation.clear(); self.is_shifted.clear(); self.remainders = IntVector::with_fill(self.remainders.element_bits(), self.remainders.len(), 0); self.n_elements = 0; } fn insert(&mut self, obj: &T) -> Result<bool, Self::InsertErr> { let (quotient, remainder) = self.calc_quotient_remainder(obj); self.insert_internal(quotient, remainder) } fn union(&mut self, other: &Self) -> Result<(), Self::InsertErr> { assert_eq!( self.bits_quotient, other.bits_quotient, "bits_quotient must be equal (left={}, right={})", self.bits_quotient, other.bits_quotient ); assert_eq!( self.bits_remainder(), other.bits_remainder(), "bits_remainder must be equal (left={}, right={})", self.bits_remainder(), other.bits_remainder() ); assert!( self.buildhasher == other.buildhasher, "buildhasher must be equal", ); // create backup of the entire state let is_occupied_backup = self.is_occupied.clone(); let is_continuation_backup = self.is_continuation.clone(); let is_shifted_backup = self.is_shifted.clone(); let remainders_backup = self.remainders.clone(); let n_elements_backup = self.n_elements; for i in 0..other.is_occupied.len() { if other.is_occupied[i] && !other.is_shifted[i] { // found cluster start let mut quotient = i; if let Err(err) = self.insert_internal(quotient, other.remainders.get(i as u64)) { self.is_occupied = is_occupied_backup; self.is_continuation = is_continuation_backup; self.is_shifted = is_shifted_backup; self.remainders = remainders_backup; self.n_elements = n_elements_backup; return Err(err); } let mut next_quotients = VecDeque::new(); let mut j = i; self.incr(&mut j); while (j != i) && other.is_shifted[j] { if other.is_occupied[j] { // this cluster contains another run, so remember the quotient next_quotients.push_back(j); } if !other.is_continuation[j] { // this is the start of another run, get the quotient quotient = next_quotients.pop_front().unwrap(); } if let Err(err) = self.insert_internal(quotient, other.remainders.get(j as u64)) { self.is_occupied = is_occupied_backup; self.is_continuation = is_continuation_backup; self.is_shifted = is_shifted_backup; self.remainders = remainders_backup; self.n_elements = n_elements_backup; return Err(err); } self.incr(&mut j) } } } Ok(()) } fn is_empty(&self) -> bool { self.n_elements == 0 } fn len(&self) -> usize { self.n_elements } fn query(&self, obj: &T) -> bool { let (quotient, remainder) = self.calc_quotient_remainder(obj); self.scan(quotient, remainder, false).present } } #[cfg(test)] mod tests { use super::QuotientFilter; use crate::filters::Filter; use crate::hash_utils::BuildHasherSeeded; use crate::test_util::{assert_send, NotSend}; #[test] #[should_panic(expected = "bits_quotient (0) must be greater than 0")] fn new_bits_quotient_0() { QuotientFilter::<u64>::with_params(0, 16); } #[cfg(target_pointer_width = "32")] #[test] #[should_panic( expected = "bits_remainder (0) must be greater than 0 and smaller or equal than 32" )] fn new_bits_remainder_0() { QuotientFilter::<u64>::with_params(3, 0); } #[cfg(target_pointer_width = "64")] #[test] #[should_panic( expected = "bits_remainder (0) must be greater than 0 and smaller or equal than 64" )] fn new_bits_remainder_0() { QuotientFilter::<u64>::with_params(3, 0); } #[cfg(target_pointer_width = "32")] #[test] #[should_panic( expected = "bits_remainder (33) must be greater than 0 and smaller or equal than 32" )] fn new_bits_remainder_too_large() { QuotientFilter::<u64>::with_params(3, 33); } #[cfg(target_pointer_width = "64")] #[test] #[should_panic( expected = "bits_remainder (65) must be greater than 0 and smaller or equal than 64" )] fn new_bits_remainder_too_large() { QuotientFilter::<u64>::with_params(3, 65); } #[test] #[should_panic( expected = "bits_remainder (5) + bits_quotient (60) must be smaller or equal than 64" )] fn new_too_many_bits() { QuotientFilter::<u64>::with_params(60, 5); } #[test] fn new() { let qf = QuotientFilter::with_params(3, 16); assert!(qf.is_empty()); assert_eq!(qf.len(), 0); assert!(!qf.query(&13)); assert_eq!(qf.bits_quotient(), 3); assert_eq!(qf.bits_remainder(), 16); } #[test] fn insert() { let mut qf = QuotientFilter::with_params(3, 16); assert!(qf.insert(&13).unwrap()); assert!(!qf.is_empty()); assert_eq!(qf.len(), 1); assert!(qf.query(&13)); assert!(!qf.query(&42)); } #[test] fn double_insert() { let mut qf = QuotientFilter::with_params(3, 16); assert!(qf.insert(&13).unwrap()); assert!(!qf.insert(&13).unwrap()); assert!(!qf.is_empty()); assert_eq!(qf.len(), 1); assert!(qf.query(&13)); assert!(!qf.query(&42)); } #[test] fn full() { let mut qf = QuotientFilter::with_params(3, 16); for i in 0..8 { qf.insert(&i).unwrap(); for j in 0..i { assert!(qf.query(&j), "Cannot find {} after inserting {}", j, i); } } assert!(qf.insert(&1000).is_err()); } #[test] fn clear() { let mut qf = QuotientFilter::with_params(3, 16); qf.insert(&13).unwrap(); qf.clear(); assert!(qf.is_empty()); assert_eq!(qf.len(), 0); assert!(!qf.query(&13)); assert_eq!(qf.bits_quotient(), 3); assert_eq!(qf.bits_remainder(), 16); } #[test] fn clone() { let mut qf1 = QuotientFilter::with_params(3, 16); qf1.insert(&13).unwrap(); let mut qf2 = qf1.clone(); qf2.insert(&42).unwrap(); assert_eq!(qf1.len(), 1); assert!(qf1.query(&13)); assert!(!qf1.query(&42)); assert_eq!(qf2.len(), 2); assert!(qf2.query(&13)); assert!(qf2.query(&42)); } #[test] fn union() { let mut qf1 = QuotientFilter::with_params(3, 16); let mut qf2 = QuotientFilter::with_params(3, 16); qf1.insert(&13).unwrap(); qf1.insert(&42).unwrap(); qf2.insert(&130).unwrap(); qf2.insert(&420).unwrap(); qf1.union(&qf2).unwrap(); assert!(qf1.query(&13)); assert!(qf1.query(&42)); assert!(qf1.query(&130)); assert!(qf1.query(&420)); assert!(!qf2.query(&13)); assert!(!qf2.query(&42)); assert!(qf2.query(&130)); assert!(qf2.query(&420)); } #[test] #[should_panic(expected = "bits_quotient must be equal (left=3, right=4)")] fn union_panics_bits_quotient() { let mut qf1 = QuotientFilter::<i32>::with_params(3, 16); let qf2 = QuotientFilter::with_params(4, 16); qf1.union(&qf2).unwrap(); } #[test] #[should_panic(expected = "bits_remainder must be equal (left=16, right=32)")] fn union_panics_bits_remainder() { let mut qf1 = QuotientFilter::<i32>::with_params(3, 16); let qf2 = QuotientFilter::with_params(3, 32); qf1.union(&qf2).unwrap(); } #[test] #[should_panic(expected = "buildhasher must be equal")] fn union_panics_buildhasher() { let mut qf1 = QuotientFilter::<i32, BuildHasherSeeded>::with_params_and_hash( 3, 16, BuildHasherSeeded::new(0), ); let qf2 = QuotientFilter::with_params_and_hash(3, 16, BuildHasherSeeded::new(1)); qf1.union(&qf2).unwrap(); } #[test] fn union_full() { let mut qf1 = QuotientFilter::with_params(3, 16); let mut qf2 = QuotientFilter::with_params(3, 16); // fill up cf1 let mut obj = 0; loop { if qf1.insert(&obj).is_err() { break; } obj += 1; } assert!(qf1.query(&0)); // add some payload to cf2 let n_qf2 = 3; for i in 0..n_qf2 { qf2.insert(&-i).unwrap(); } assert_eq!(qf2.len(), n_qf2 as usize); assert!(!qf2.query(&1)); // union with failure, state must not be altered assert!(qf2.union(&qf1).is_err()); assert_eq!(qf2.len(), n_qf2 as usize); assert!(!qf2.query(&1)); } #[test] fn insert_unsized() { let mut qf = QuotientFilter::with_params(3, 16); assert!(qf.insert("test1").unwrap()); assert!(!qf.is_empty()); assert_eq!(qf.len(), 1); assert!(qf.query("test1")); assert!(!qf.query("test2")); } #[test] fn send() { let qf = QuotientFilter::<NotSend>::with_params(3, 16); assert_send(&qf); } }
use hacspec_hkdf::*; use hacspec_lib::prelude::*; struct HKDFTestVectors<'a> { ikm: &'a str, salt: &'a str, info: &'a str, l: usize, prk: &'a str, okm: &'a str, } // https://tools.ietf.org/html/rfc5869 const HKDF_KAT: [HKDFTestVectors; 3] = [ HKDFTestVectors { ikm: "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", salt: "000102030405060708090a0b0c", info: "f0f1f2f3f4f5f6f7f8f9", l: 42, prk: "077709362c2e32df0ddc3f0dc47bba6390b6c73bb50f9c3122ec844ad7c2b3e5", okm: "3cb25f25faacd57a90434f64d0362f2a2d2d0a90cf1a5a4c5db02d56ecc4c5bf34007208d5b887185865" }, HKDFTestVectors { ikm: "000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f", salt: "606162636465666768696a6b6c6d6e6f707172737475767778797a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeaf", info: "b0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff", l: 82, prk: "06a6b88c5853361a06104c9ceb35b45cef760014904671014a193f40c15fc244", okm: "b11e398dc80327a1c8e7f78c596a49344f012eda2d4efad8a050cc4c19afa97c59045a99cac7827271cb41c65e590e09da3275600c2f09b8367793a9aca3db71cc30c58179ec3e87c14c01d5c1f3434f1d87" }, HKDFTestVectors { ikm: "0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b", salt: "", info: "", l: 42, prk: "19ef24a32c717b167f33a91d6f648bdf96596776afdb6377ac434c1c293ccb04", okm: "8da4e775a563c18f715f802a063c5a31b8a11f5c5ee1879ec3454e5f3c738d2d9d201395faa4b61a96c8" } ]; #[test] fn test_kat() { for kat in HKDF_KAT.iter() { let prk = extract(&ByteSeq::from_hex(kat.salt), &ByteSeq::from_hex(kat.ikm)); assert_eq!(kat.prk, prk.to_hex()); let okm = match expand( &ByteSeq::from_seq(&prk), &ByteSeq::from_hex(kat.info), kat.l, ) { Ok(okm) => okm, Err(_) => panic!("Invalid HKDF output length"), }; assert_eq!(kat.okm, okm.to_hex()); } } #[test] fn test_invalid_output_len() { expand( &ByteSeq::from_hex("deadbeef"), &ByteSeq::from_hex("deadbeef"), 10000, ) .expect_err("The output should have been too long"); }
use pyo3::prelude::*; use rand::distributions::Distribution; use std::convert::TryInto; use ocl::ProQue; use failure::Fallible; /// Simulation parameters /// /// The idea is these are things that would stay the same across invocations /// /// Simulation has two natures. It lives in the Python world and has an impl accessible there. /// It also lives in the Rust world. Different methods are used here too. We need that so that /// it is easier to test it. #[pyclass(module = "rustsim")] struct Simulation { safety_stock: usize, lead_time: usize, order_quantity: usize, job_lot_zipf_precomp: Vec<u32>, itemwise_traffic_zipf_precomp: Vec<u32>, } /// Simulation implementation /// /// The following methods are all available from Python #[pymethods] impl Simulation { /// Implementation of python Simulation.__init__() (just wraps rust Simulation::new()) #[new] fn init( obj: &PyRawObject, safety_stock: usize, lead_time: usize, order_quantity: usize, job_lot_zipf: Option<f64>, itemwise_traffic_zipf: Option<f64>, ) { obj.init(Simulation::new( safety_stock, lead_time, order_quantity, job_lot_zipf, itemwise_traffic_zipf )); } /// Calls the appropriate OpenCL function fn repeat_simulate_demand(&self, starting_quantity: usize, count: usize) -> (usize, usize, usize, usize, f64, f64) { self.ocl_repeat_simulate_demand(starting_quantity, count).unwrap() } } /// Simulation Implementation, continued /// /// This group doesn't mention pymethods, and isn't visible from Python impl Simulation { fn new( safety_stock: usize, lead_time: usize, order_quantity: usize, job_lot_zipf: Option<f64>, itemwise_traffic_zipf: Option<f64>, ) -> Simulation { let job_lot_zipf = job_lot_zipf.unwrap_or(2.75); let itemwise_traffic_zipf = itemwise_traffic_zipf.unwrap_or(4.0); Simulation { safety_stock, lead_time, order_quantity, job_lot_zipf_precomp: precompute_zipf_buffer(1000, job_lot_zipf), itemwise_traffic_zipf_precomp: precompute_zipf_buffer(1000, itemwise_traffic_zipf) } } /// OpenCL implementation of repeat_simulate_demand /// There are several differences: /// /// 1. I don't want the bulk of the computation to be generating a perfect zipf distribution /// when I know we got that by eyeballing the curve anyway. So instead I generate a /// pretty large sample and put up with a small period (of like 16M elements) /// /// 2. The source code for the inner simulation in OpenCL is in simulation.cl. We read it /// into this program at compile time. using include_str!(filename) /// fn ocl_repeat_simulate_demand(&self, starting_quantity: usize, simulation_samples: usize) -> Fallible<(usize, usize, usize, usize, f64, f64)> { let chunk_size = simulation_samples / 1000; let chunk_count = 1000; // Think of this program queue as your connection to the device let pro_que = ProQue::builder() .src(include_str!("simulation.cl")) .dims(chunk_count) .build()?; // These two are precomputed zipf distributions, to make sampling from these distributions // faster and simpler to implement. A lot of the latency comes from precomputing these // so in an ideal world you may do this in opencl too. let job_lot_zipf_precomp = pro_que.buffer_builder() .len(self.job_lot_zipf_precomp.len()) .copy_host_slice(&self.job_lot_zipf_precomp[..]) .build()?; let itemwise_traffic_zipf_precomp = pro_que.buffer_builder() .len(self.itemwise_traffic_zipf_precomp.len()) .copy_host_slice(&self.itemwise_traffic_zipf_precomp[..]) .build()?; // We also need to seed the simple uniform random number generator on ocl because it has no randomness of its own // So first we compute it on the CPU (the Host) let seed : Vec<u32> = (0..chunk_count).into_iter().map(|_| rand::random()).collect(); // Then send it to the device let seed = pro_que.buffer_builder::<u32>() .len(chunk_count) .copy_host_slice(&seed[..]) .build()?; // These four are the resulting statistics, to be filled in by the device let successful_transactions = pro_que.create_buffer::<u64>()?; let successful_sales = pro_que.create_buffer::<u64>()?; let failed_transactions = pro_que.create_buffer::<u64>()?; let failed_sales = pro_que.create_buffer::<u64>()?; let kernel = pro_que.kernel_builder("ocl_simulate_demand") .arg(&seed) .arg(&job_lot_zipf_precomp) .arg(&itemwise_traffic_zipf_precomp) .arg(&successful_transactions) .arg(&successful_sales) .arg(&failed_transactions) .arg(&failed_sales) .arg(starting_quantity) .arg(self.lead_time.min(10)) .arg(self.safety_stock as i32) .arg(self.order_quantity as i32) .arg(self.itemwise_traffic_zipf_precomp.len()) .arg(chunk_size) .build()?; unsafe { kernel.enq()?; } // Copy the statistics back. It doesn't have to be this hard. // But I want to explain it all in detail because I figure you'll spend a lot of your time // doing exactly this. // I did it by making a single vector, which the closure will take control of (hence "move") let mut vec = vec![0u64; chunk_count]; let mut get_sum = move |buffer: &ocl::Buffer<u64>| -> ocl::Result<usize> { // This copies the device buffer into our host vector. buffer.read(&mut vec).enq()?; // This iterates over it and sums it into a u64. // It would be a good idea to keep it as u64 because - who knows - maybe we want to // sell more than 4 billion widgets. But they are purposely inconvenient to work with // because they are also inconvenient for some computers to work with and they will // slow you down on the GPU. Usize, however, is whichever size numbers your computer // naturally uses. So we convert it to that and ignore the possible tragedy. We'll // just show the max we can if we are limited. Good? No. But easy and maybe good enough Ok(vec.iter().copied().sum::<u64>().try_into().unwrap_or(::std::usize::MAX)) }; let st = get_sum(&successful_transactions)?; let ss = get_sum(&successful_sales)?; let ft = get_sum(&failed_transactions)?; let fs = get_sum(&failed_sales)?; Ok((st, ss, ft, fs, st as f64 / (st as f64 + ft as f64), ss as f64 / (ss as f64 + fs as f64))) } } /// Precompute some values for a zipf distribution /// Used by Simulation but not intended to be visible to Python. fn precompute_zipf_buffer(num_elements: usize, exponent: f64) -> Vec<u32> { let z = zipf::ZipfDistribution::new(num_elements, exponent).unwrap(); let mut rng = rand::thread_rng(); (0..(16 << 20)).into_iter().map(|_| z.sample(&mut rng) as u32).collect() } /// This module is a python module implemented in Rust. #[pymodule] fn rustoclsim(_py: Python, m: &PyModule) -> PyResult<()> { m.add_class::<Simulation>()?; Ok(()) } #[test] fn test_ocl() { let sim = Simulation { safety_stock: 10, lead_time: 10, order_quantity: 7, job_lot_zipf: 2.75, itemwise_traffic_zipf: 4.0, }; sim.ocl_repeat_simulate_demand(10, 10000).expect("OCL Failed"); }
mod gc_count; mod to_string; mod escaping;
use std::collections::HashMap; use std::io::{BufRead, BufReader}; #[cfg(unix)] use std::os::unix::process::CommandExt; use std::process::{Child, ChildStdout, Command}; use std::sync::{Arc, RwLock}; use anyhow::{bail, Context, Result}; use async_process::Stdio; use serde::{Deserialize, Serialize}; use tempfile::TempDir; use super::progress::ProgressTracker; pub static TERRAFORM_ALPHABET: [char; 16] = [ '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b', 'c', 'd', 'e', 'f', ]; /// Keeps track of resources which may need to be cleaned up. #[derive(Default)] pub struct TerraformPool { counter: u32, active_applies: HashMap<u32, Arc<tokio::sync::RwLock<TerraformApply>>>, } impl TerraformPool { fn create_apply( &mut self, deployment_folder: TempDir, ) -> Result<(u32, Arc<tokio::sync::RwLock<TerraformApply>>)> { let next_counter = self.counter; self.counter += 1; let mut apply_command = Command::new("terraform"); apply_command .current_dir(deployment_folder.path()) .arg("apply") .arg("-auto-approve") .arg("-no-color"); #[cfg(unix)] { apply_command.process_group(0); } let spawned_child = apply_command .stdout(Stdio::piped()) .spawn() .context("Failed to spawn `terraform`. Is it installed?")?; let spawned_id = spawned_child.id(); let deployment = Arc::new(tokio::sync::RwLock::new(TerraformApply { child: Some((spawned_id, Arc::new(RwLock::new(spawned_child)))), deployment_folder: Some(deployment_folder), })); self.active_applies.insert(next_counter, deployment.clone()); Ok((next_counter, deployment)) } fn drop_apply(&mut self, counter: u32) { self.active_applies.remove(&counter); } } impl Drop for TerraformPool { fn drop(&mut self) { for (_, apply) in self.active_applies.drain() { debug_assert_eq!(Arc::strong_count(&apply), 1); } } } #[derive(Serialize, Deserialize)] pub struct TerraformBatch { pub terraform: TerraformConfig, #[serde(skip_serializing_if = "HashMap::is_empty")] pub data: HashMap<String, HashMap<String, serde_json::Value>>, pub resource: HashMap<String, HashMap<String, serde_json::Value>>, pub output: HashMap<String, TerraformOutput>, } impl Default for TerraformBatch { fn default() -> TerraformBatch { TerraformBatch { terraform: TerraformConfig { required_providers: HashMap::new(), }, data: HashMap::new(), resource: HashMap::new(), output: HashMap::new(), } } } impl TerraformBatch { pub async fn provision(self, pool: &mut TerraformPool) -> Result<TerraformResult> { if self.terraform.required_providers.is_empty() && self.resource.is_empty() && self.data.is_empty() && self.output.is_empty() { return Ok(TerraformResult { outputs: HashMap::new(), deployment_folder: None, }); } ProgressTracker::with_group("terraform", || async { let dothydro_folder = std::env::current_dir().unwrap().join(".hydro"); std::fs::create_dir_all(&dothydro_folder).unwrap(); let deployment_folder = tempfile::tempdir_in(dothydro_folder).unwrap(); std::fs::write( deployment_folder.path().join("main.tf.json"), serde_json::to_string(&self).unwrap(), ) .unwrap(); if !Command::new("terraform") .current_dir(deployment_folder.path()) .arg("init") .stdout(Stdio::null()) .spawn() .context("Failed to spawn `terraform`. Is it installed?")? .wait() .context("Failed to launch terraform init command")? .success() { bail!("Failed to initialize terraform"); } let (apply_id, apply) = pool.create_apply(deployment_folder)?; let output = ProgressTracker::with_group("apply", || async { apply.write().await.output().await }) .await; pool.drop_apply(apply_id); output }) .await } } struct TerraformApply { child: Option<(u32, Arc<RwLock<Child>>)>, deployment_folder: Option<TempDir>, } async fn display_apply_outputs(stdout: &mut ChildStdout) { let lines = BufReader::new(stdout).lines(); let mut waiting_for_result = HashMap::new(); for line in lines { if let Ok(line) = line { let mut split = line.split(':'); if let Some(first) = split.next() { if first.chars().all(|c| c != ' ') && split.next().is_some() && split.next().is_none() { if line.starts_with("Plan:") || line.starts_with("Outputs:") || line.contains(": Still creating...") || line.contains(": Reading...") || line.contains(": Still reading...") || line.contains(": Read complete after") { } else if line.ends_with(": Creating...") { let id = line.split(':').next().unwrap().trim().to_string(); let (channel_send, channel_recv) = tokio::sync::oneshot::channel(); waiting_for_result.insert( id.to_string(), ( channel_send, tokio::task::spawn(ProgressTracker::leaf(id, async move { channel_recv.await.unwrap(); })), ), ); } else if line.contains(": Creation complete after") { let id = line.split(':').next().unwrap().trim(); let (sender, to_await) = waiting_for_result.remove(id).unwrap(); let _ = sender.send(()); to_await.await.unwrap(); } else { panic!("Unexpected from Terraform: {}", line); } } } } else { break; } } } fn filter_terraform_logs(child: &mut Child) { let lines = BufReader::new(child.stdout.take().unwrap()).lines(); for line in lines { if let Ok(line) = line { let mut split = line.split(':'); if let Some(first) = split.next() { if first.chars().all(|c| c != ' ') && split.next().is_some() && split.next().is_none() { println!("[terraform] {}", line); } } } else { break; } } } impl TerraformApply { async fn output(&mut self) -> Result<TerraformResult> { let (_, child) = self.child.as_ref().unwrap().clone(); let mut stdout = child.write().unwrap().stdout.take().unwrap(); let status = tokio::task::spawn_blocking(move || { // it is okay for this thread to keep running even if the future is cancelled child.write().unwrap().wait().unwrap() }); display_apply_outputs(&mut stdout).await; let status = status.await; self.child = None; if !status.unwrap().success() { bail!("Terraform deployment failed"); } let mut output_command = Command::new("terraform"); output_command .current_dir(self.deployment_folder.as_ref().unwrap().path()) .arg("output") .arg("-json"); #[cfg(unix)] { output_command.process_group(0); } let output = output_command .output() .context("Failed to read Terraform outputs")?; Ok(TerraformResult { outputs: serde_json::from_slice(&output.stdout).unwrap(), deployment_folder: self.deployment_folder.take(), }) } } fn destroy_deployment(deployment_folder: &TempDir) { println!( "Destroying terraform deployment at {}", deployment_folder.path().display() ); let mut destroy_command = Command::new("terraform"); destroy_command .current_dir(deployment_folder.path()) .arg("destroy") .arg("-auto-approve") .arg("-no-color") .stdout(Stdio::piped()); #[cfg(unix)] { destroy_command.process_group(0); } let mut destroy_child = destroy_command .spawn() .expect("Failed to spawn terraform destroy command"); filter_terraform_logs(&mut destroy_child); if !destroy_child .wait() .expect("Failed to destroy terraform deployment") .success() { eprintln!("WARNING: failed to destroy terraform deployment"); } } impl Drop for TerraformApply { fn drop(&mut self) { if let Some((pid, child)) = self.child.take() { #[cfg(unix)] nix::sys::signal::kill( nix::unistd::Pid::from_raw(pid as i32), nix::sys::signal::Signal::SIGINT, ) .unwrap(); #[cfg(not(unix))] let _ = pid; let mut child_write = child.write().unwrap(); if child_write.try_wait().unwrap().is_none() { println!("Waiting for Terraform apply to finish..."); child_write.wait().unwrap(); } } if let Some(deployment_folder) = &self.deployment_folder { destroy_deployment(deployment_folder); } } } #[derive(Serialize, Deserialize)] pub struct TerraformConfig { pub required_providers: HashMap<String, TerraformProvider>, } #[derive(Serialize, Deserialize)] pub struct TerraformProvider { pub source: String, pub version: String, } #[derive(Serialize, Deserialize, Debug)] pub struct TerraformOutput { pub value: String, } #[derive(Debug)] pub struct TerraformResult { pub outputs: HashMap<String, TerraformOutput>, /// `None` if no deployment was performed pub deployment_folder: Option<TempDir>, } impl Drop for TerraformResult { fn drop(&mut self) { if let Some(deployment_folder) = &self.deployment_folder { destroy_deployment(deployment_folder); } } } #[derive(Serialize, Deserialize)] pub struct TerraformResultOutput { value: String, }
use const_format::{concatcp, formatcp}; use serde::{ de::{self, Unexpected}, Deserialize, Deserializer, }; use serenity::builder::CreateEmbed; use crate::smmo::world_boss::WorldBoss; pub(crate) mod smmo_player; pub(crate) mod world_boss; fn bool_from_int<'de, D>(deserializer: D) -> Result<bool, D::Error> where D: Deserializer<'de>, { match u8::deserialize(deserializer)? { 0 => Ok(false), 1 => Ok(true), other => Err(de::Error::invalid_value( Unexpected::Unsigned(other as u64), &"zero or one", )), } } pub trait SmmoModel { const TYPE_NAME: &'static str; fn to_embed<'a, 'b>(&'a self, embed: &'b mut CreateEmbed) -> &'b mut CreateEmbed; fn to_field(&self) -> (String, String, bool); } // impl<T: SmmoModel> SmmoModel for Vec<T> { // const TYPE_NAME: &'static str = "Vec"; // fn to_embed<'a, 'b>(&'a self, embed: &'b mut CreateEmbed) -> &'b mut CreateEmbed { // embed.fields(self.into_iter().map(|t| t.to_field())) // } // fn to_field(&self) -> (String, String, bool) { // ("test".into(), "test".into(), true) // } // } impl SmmoModel for Vec<WorldBoss> { const TYPE_NAME: &'static str = "Vec"; fn to_embed<'a, 'b>(&'a self, embed: &'b mut CreateEmbed) -> &'b mut CreateEmbed { embed.fields(self.iter().map(|t| t.to_field())) } fn to_field(&self) -> (String, String, bool) { ("test".into(), "test".into(), true) } } mod date_time { use chrono::{DateTime, NaiveDateTime, TimeZone, Utc}; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; pub fn serialize_option_datefmt<S: Serializer>( time: &DateTime<Utc>, serializer: S, ) -> Result<S::Ok, S::Error> { time.to_rfc3339().serialize(serializer) } const FORMAT: &str = "%Y-%m-%d %H:%M:%S"; fn datefmt<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; Utc.datetime_from_str(&s, FORMAT) .map_err(serde::de::Error::custom) } pub fn deserialize_option_datefmt<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error> where D: Deserializer<'de>, { #[derive(Deserialize)] struct Wrapper(#[serde(deserialize_with = "datefmt")] DateTime<Utc>); let v = Option::deserialize(deserializer)?; Ok(v.map(|Wrapper(a)| a)) } }
//! Contains the implementation of the Mac OS X tray icon in the top bar. use std; use cocoa::appkit::{NSApp, NSApplication, NSButton, NSImage, NSStatusBar, NSStatusItem, NSSquareStatusItemLength}; use cocoa::base::{id, nil}; use cocoa::foundation::{NSData, NSSize, NSAutoreleasePool}; use SystrayError; /// The generation representation of the Mac OS X application. pub struct Window { /// A mutable reference to the `NSApplication` instance of the currently running application. application: id, /// It seems that we have to use `NSAutoreleasePool` to prevent memory leaks. autorelease_pool: id, } impl Window { /// Creates a new instance of the `Window`. pub fn new() -> Result<Window, SystrayError> { Ok(Window { application: unsafe { NSApp() }, autorelease_pool: unsafe { NSAutoreleasePool::new(nil) }, }) } /// Closes the current application. pub fn quit(&self) { let _: () = unsafe { msg_send![self.application, terminate] }; } /// Sets the tooltip (not available for this platfor). pub fn set_tooltip(&self, _: &String) -> Result<(), SystrayError> { Err(SystrayError::OsError("This operating system does not support tooltips for the tray \ items".to_owned())) } /// Adds an additional item to the tray icon menu. pub fn add_menu_item<F>(&self, _: &String, _: F) -> Result<u32, SystrayError> where F: std::ops::Fn(&Window) -> () + 'static { unimplemented!() } /// Sets the application icon displayed in the tray bar. Accepts a `buffer` to the underlying /// image, you can pass even encoded PNG images here. Supports the same list of formats as /// `NSImage`. pub fn set_icon_from_buffer(&mut self, buffer: &'static [u8], _: u32, _: u32) -> Result<(), SystrayError> { const ICON_WIDTH: f64 = 18.0; const ICON_HEIGHT: f64 = 18.0; let tray_entry = unsafe { NSStatusBar::systemStatusBar(nil).statusItemWithLength_(NSSquareStatusItemLength) .autorelease() }; let nsdata = unsafe { NSData::dataWithBytes_length_(nil, buffer.as_ptr() as *const std::os::raw::c_void, buffer.len() as u64).autorelease() }; if nsdata == nil { return Err(SystrayError::OsError("Could not create `NSData` out of the passed buffer" .to_owned())); } let nsimage = unsafe { NSImage::initWithData_(NSImage::alloc(nil), nsdata).autorelease() }; if nsimage == nil { return Err(SystrayError::OsError("Could not create `NSImage` out of the created \ `NSData` buffer".to_owned())); } unsafe { let new_size = NSSize::new(ICON_WIDTH, ICON_HEIGHT); let _: () = msg_send![nsimage, setSize:new_size]; tray_entry.button().setImage_(nsimage); } Ok(()) } /// Starts the application event loop. Calling this function will block the current thread. pub fn wait_for_message(&mut self) { unsafe { self.application.run() }; } }
mod auth; use tonic::{Request, Response, Status, transport::{Server, server::{RouterService, Unimplemented}}}; use hello_world::greeter_server::{Greeter, GreeterServer}; use hello_world::{HelloReply, HelloRequest}; mod hello_world { tonic::include_proto!("helloworld"); } type Service = RouterService<GreeterServer<MyGreeter>, Unimplemented>; pub fn create_grpc_server() -> Service { let greeter_service = GreeterServer::new(MyGreeter::default()); Server::builder() .add_service(greeter_service) .into_service() } #[derive(Default, Debug)] pub struct MyGreeter {} #[tonic::async_trait] impl Greeter for MyGreeter { async fn say_hello( &self, request: Request<HelloRequest>, ) -> Result<Response<HelloReply>, Status> { auth::validate_token(&request).await?; let reply = hello_world::HelloReply { message: format!("New server, who dis? Name: {}", request.into_inner().name), }; Ok(Response::new(reply)) } }
/* chapter 4 syntax and semantics */ use std::cell::RefCell; fn main() { let a = RefCell::new(42); println!("{:?}", a); let b = a.borrow_mut(); // will panic /* let c = a.borrow_mut(); */ println!("{:?}", b); } // output should be: /* */
/// An enum to represent all characters in the Runic block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum Runic { /// \u{16a0}: 'ᚠ' LetterFehuFeohFeF, /// \u{16a1}: 'ᚡ' LetterV, /// \u{16a2}: 'ᚢ' LetterUruzUrU, /// \u{16a3}: 'ᚣ' LetterYr, /// \u{16a4}: 'ᚤ' LetterY, /// \u{16a5}: 'ᚥ' LetterW, /// \u{16a6}: 'ᚦ' LetterThurisazThursThorn, /// \u{16a7}: 'ᚧ' LetterEth, /// \u{16a8}: 'ᚨ' LetterAnsuzA, /// \u{16a9}: 'ᚩ' LetterOsO, /// \u{16aa}: 'ᚪ' LetterAcA, /// \u{16ab}: 'ᚫ' LetterAesc, /// \u{16ac}: 'ᚬ' LetterLongDashBranchDashOssO, /// \u{16ad}: 'ᚭ' LetterShortDashTwigDashOssO, /// \u{16ae}: 'ᚮ' LetterO, /// \u{16af}: 'ᚯ' LetterOe, /// \u{16b0}: 'ᚰ' LetterOn, /// \u{16b1}: 'ᚱ' LetterRaidoRadReidR, /// \u{16b2}: 'ᚲ' LetterKauna, /// \u{16b3}: 'ᚳ' LetterCen, /// \u{16b4}: 'ᚴ' LetterKaunK, /// \u{16b5}: 'ᚵ' LetterG, /// \u{16b6}: 'ᚶ' LetterEng, /// \u{16b7}: 'ᚷ' LetterGeboGyfuG, /// \u{16b8}: 'ᚸ' LetterGar, /// \u{16b9}: 'ᚹ' LetterWunjoWynnW, /// \u{16ba}: 'ᚺ' LetterHaglazH, /// \u{16bb}: 'ᚻ' LetterHaeglH, /// \u{16bc}: 'ᚼ' LetterLongDashBranchDashHagallH, /// \u{16bd}: 'ᚽ' LetterShortDashTwigDashHagallH, /// \u{16be}: 'ᚾ' LetterNaudizNydNaudN, /// \u{16bf}: 'ᚿ' LetterShortDashTwigDashNaudN, /// \u{16c0}: 'ᛀ' LetterDottedDashN, /// \u{16c1}: 'ᛁ' LetterIsazIsIssI, /// \u{16c2}: 'ᛂ' LetterE, /// \u{16c3}: 'ᛃ' LetterJeranJ, /// \u{16c4}: 'ᛄ' LetterGer, /// \u{16c5}: 'ᛅ' LetterLongDashBranchDashArAe, /// \u{16c6}: 'ᛆ' LetterShortDashTwigDashArA, /// \u{16c7}: 'ᛇ' LetterIwazEoh, /// \u{16c8}: 'ᛈ' LetterPerthoPeorthP, /// \u{16c9}: 'ᛉ' LetterAlgizEolhx, /// \u{16ca}: 'ᛊ' LetterSowiloS, /// \u{16cb}: 'ᛋ' LetterSigelLongDashBranchDashSolS, /// \u{16cc}: 'ᛌ' LetterShortDashTwigDashSolS, /// \u{16cd}: 'ᛍ' LetterC, /// \u{16ce}: 'ᛎ' LetterZ, /// \u{16cf}: 'ᛏ' LetterTiwazTirTyrT, /// \u{16d0}: 'ᛐ' LetterShortDashTwigDashTyrT, /// \u{16d1}: 'ᛑ' LetterD, /// \u{16d2}: 'ᛒ' LetterBerkananBeorcBjarkanB, /// \u{16d3}: 'ᛓ' LetterShortDashTwigDashBjarkanB, /// \u{16d4}: 'ᛔ' LetterDottedDashP, /// \u{16d5}: 'ᛕ' LetterOpenDashP, /// \u{16d6}: 'ᛖ' LetterEhwazEhE, /// \u{16d7}: 'ᛗ' LetterMannazManM, /// \u{16d8}: 'ᛘ' LetterLongDashBranchDashMadrM, /// \u{16d9}: 'ᛙ' LetterShortDashTwigDashMadrM, /// \u{16da}: 'ᛚ' LetterLaukazLaguLogrL, /// \u{16db}: 'ᛛ' LetterDottedDashL, /// \u{16dc}: 'ᛜ' LetterIngwaz, /// \u{16dd}: 'ᛝ' LetterIng, /// \u{16de}: 'ᛞ' LetterDagazDaegD, /// \u{16df}: 'ᛟ' LetterOthalanEthelO, /// \u{16e0}: 'ᛠ' LetterEar, /// \u{16e1}: 'ᛡ' LetterIor, /// \u{16e2}: 'ᛢ' LetterCweorth, /// \u{16e3}: 'ᛣ' LetterCalc, /// \u{16e4}: 'ᛤ' LetterCealc, /// \u{16e5}: 'ᛥ' LetterStan, /// \u{16e6}: 'ᛦ' LetterLongDashBranchDashYr, /// \u{16e7}: 'ᛧ' LetterShortDashTwigDashYr, /// \u{16e8}: 'ᛨ' LetterIcelandicDashYr, /// \u{16e9}: 'ᛩ' LetterQ, /// \u{16ea}: 'ᛪ' LetterX, /// \u{16eb}: '᛫' SinglePunctuation, /// \u{16ec}: '᛬' MultiplePunctuation, /// \u{16ed}: '᛭' CrossPunctuation, /// \u{16ee}: 'ᛮ' ArlaugSymbol, /// \u{16ef}: 'ᛯ' TvimadurSymbol, /// \u{16f0}: 'ᛰ' BelgthorSymbol, /// \u{16f1}: 'ᛱ' LetterK, /// \u{16f2}: 'ᛲ' LetterSh, /// \u{16f3}: 'ᛳ' LetterOo, /// \u{16f4}: 'ᛴ' LetterFranksCasketOs, /// \u{16f5}: 'ᛵ' LetterFranksCasketIs, /// \u{16f6}: 'ᛶ' LetterFranksCasketEh, /// \u{16f7}: 'ᛷ' LetterFranksCasketAc, /// \u{16f8}: 'ᛸ' LetterFranksCasketAesc, } impl Into<char> for Runic { fn into(self) -> char { match self { Runic::LetterFehuFeohFeF => 'ᚠ', Runic::LetterV => 'ᚡ', Runic::LetterUruzUrU => 'ᚢ', Runic::LetterYr => 'ᚣ', Runic::LetterY => 'ᚤ', Runic::LetterW => 'ᚥ', Runic::LetterThurisazThursThorn => 'ᚦ', Runic::LetterEth => 'ᚧ', Runic::LetterAnsuzA => 'ᚨ', Runic::LetterOsO => 'ᚩ', Runic::LetterAcA => 'ᚪ', Runic::LetterAesc => 'ᚫ', Runic::LetterLongDashBranchDashOssO => 'ᚬ', Runic::LetterShortDashTwigDashOssO => 'ᚭ', Runic::LetterO => 'ᚮ', Runic::LetterOe => 'ᚯ', Runic::LetterOn => 'ᚰ', Runic::LetterRaidoRadReidR => 'ᚱ', Runic::LetterKauna => 'ᚲ', Runic::LetterCen => 'ᚳ', Runic::LetterKaunK => 'ᚴ', Runic::LetterG => 'ᚵ', Runic::LetterEng => 'ᚶ', Runic::LetterGeboGyfuG => 'ᚷ', Runic::LetterGar => 'ᚸ', Runic::LetterWunjoWynnW => 'ᚹ', Runic::LetterHaglazH => 'ᚺ', Runic::LetterHaeglH => 'ᚻ', Runic::LetterLongDashBranchDashHagallH => 'ᚼ', Runic::LetterShortDashTwigDashHagallH => 'ᚽ', Runic::LetterNaudizNydNaudN => 'ᚾ', Runic::LetterShortDashTwigDashNaudN => 'ᚿ', Runic::LetterDottedDashN => 'ᛀ', Runic::LetterIsazIsIssI => 'ᛁ', Runic::LetterE => 'ᛂ', Runic::LetterJeranJ => 'ᛃ', Runic::LetterGer => 'ᛄ', Runic::LetterLongDashBranchDashArAe => 'ᛅ', Runic::LetterShortDashTwigDashArA => 'ᛆ', Runic::LetterIwazEoh => 'ᛇ', Runic::LetterPerthoPeorthP => 'ᛈ', Runic::LetterAlgizEolhx => 'ᛉ', Runic::LetterSowiloS => 'ᛊ', Runic::LetterSigelLongDashBranchDashSolS => 'ᛋ', Runic::LetterShortDashTwigDashSolS => 'ᛌ', Runic::LetterC => 'ᛍ', Runic::LetterZ => 'ᛎ', Runic::LetterTiwazTirTyrT => 'ᛏ', Runic::LetterShortDashTwigDashTyrT => 'ᛐ', Runic::LetterD => 'ᛑ', Runic::LetterBerkananBeorcBjarkanB => 'ᛒ', Runic::LetterShortDashTwigDashBjarkanB => 'ᛓ', Runic::LetterDottedDashP => 'ᛔ', Runic::LetterOpenDashP => 'ᛕ', Runic::LetterEhwazEhE => 'ᛖ', Runic::LetterMannazManM => 'ᛗ', Runic::LetterLongDashBranchDashMadrM => 'ᛘ', Runic::LetterShortDashTwigDashMadrM => 'ᛙ', Runic::LetterLaukazLaguLogrL => 'ᛚ', Runic::LetterDottedDashL => 'ᛛ', Runic::LetterIngwaz => 'ᛜ', Runic::LetterIng => 'ᛝ', Runic::LetterDagazDaegD => 'ᛞ', Runic::LetterOthalanEthelO => 'ᛟ', Runic::LetterEar => 'ᛠ', Runic::LetterIor => 'ᛡ', Runic::LetterCweorth => 'ᛢ', Runic::LetterCalc => 'ᛣ', Runic::LetterCealc => 'ᛤ', Runic::LetterStan => 'ᛥ', Runic::LetterLongDashBranchDashYr => 'ᛦ', Runic::LetterShortDashTwigDashYr => 'ᛧ', Runic::LetterIcelandicDashYr => 'ᛨ', Runic::LetterQ => 'ᛩ', Runic::LetterX => 'ᛪ', Runic::SinglePunctuation => '᛫', Runic::MultiplePunctuation => '᛬', Runic::CrossPunctuation => '᛭', Runic::ArlaugSymbol => 'ᛮ', Runic::TvimadurSymbol => 'ᛯ', Runic::BelgthorSymbol => 'ᛰ', Runic::LetterK => 'ᛱ', Runic::LetterSh => 'ᛲ', Runic::LetterOo => 'ᛳ', Runic::LetterFranksCasketOs => 'ᛴ', Runic::LetterFranksCasketIs => 'ᛵ', Runic::LetterFranksCasketEh => 'ᛶ', Runic::LetterFranksCasketAc => 'ᛷ', Runic::LetterFranksCasketAesc => 'ᛸ', } } } impl std::convert::TryFrom<char> for Runic { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { 'ᚠ' => Ok(Runic::LetterFehuFeohFeF), 'ᚡ' => Ok(Runic::LetterV), 'ᚢ' => Ok(Runic::LetterUruzUrU), 'ᚣ' => Ok(Runic::LetterYr), 'ᚤ' => Ok(Runic::LetterY), 'ᚥ' => Ok(Runic::LetterW), 'ᚦ' => Ok(Runic::LetterThurisazThursThorn), 'ᚧ' => Ok(Runic::LetterEth), 'ᚨ' => Ok(Runic::LetterAnsuzA), 'ᚩ' => Ok(Runic::LetterOsO), 'ᚪ' => Ok(Runic::LetterAcA), 'ᚫ' => Ok(Runic::LetterAesc), 'ᚬ' => Ok(Runic::LetterLongDashBranchDashOssO), 'ᚭ' => Ok(Runic::LetterShortDashTwigDashOssO), 'ᚮ' => Ok(Runic::LetterO), 'ᚯ' => Ok(Runic::LetterOe), 'ᚰ' => Ok(Runic::LetterOn), 'ᚱ' => Ok(Runic::LetterRaidoRadReidR), 'ᚲ' => Ok(Runic::LetterKauna), 'ᚳ' => Ok(Runic::LetterCen), 'ᚴ' => Ok(Runic::LetterKaunK), 'ᚵ' => Ok(Runic::LetterG), 'ᚶ' => Ok(Runic::LetterEng), 'ᚷ' => Ok(Runic::LetterGeboGyfuG), 'ᚸ' => Ok(Runic::LetterGar), 'ᚹ' => Ok(Runic::LetterWunjoWynnW), 'ᚺ' => Ok(Runic::LetterHaglazH), 'ᚻ' => Ok(Runic::LetterHaeglH), 'ᚼ' => Ok(Runic::LetterLongDashBranchDashHagallH), 'ᚽ' => Ok(Runic::LetterShortDashTwigDashHagallH), 'ᚾ' => Ok(Runic::LetterNaudizNydNaudN), 'ᚿ' => Ok(Runic::LetterShortDashTwigDashNaudN), 'ᛀ' => Ok(Runic::LetterDottedDashN), 'ᛁ' => Ok(Runic::LetterIsazIsIssI), 'ᛂ' => Ok(Runic::LetterE), 'ᛃ' => Ok(Runic::LetterJeranJ), 'ᛄ' => Ok(Runic::LetterGer), 'ᛅ' => Ok(Runic::LetterLongDashBranchDashArAe), 'ᛆ' => Ok(Runic::LetterShortDashTwigDashArA), 'ᛇ' => Ok(Runic::LetterIwazEoh), 'ᛈ' => Ok(Runic::LetterPerthoPeorthP), 'ᛉ' => Ok(Runic::LetterAlgizEolhx), 'ᛊ' => Ok(Runic::LetterSowiloS), 'ᛋ' => Ok(Runic::LetterSigelLongDashBranchDashSolS), 'ᛌ' => Ok(Runic::LetterShortDashTwigDashSolS), 'ᛍ' => Ok(Runic::LetterC), 'ᛎ' => Ok(Runic::LetterZ), 'ᛏ' => Ok(Runic::LetterTiwazTirTyrT), 'ᛐ' => Ok(Runic::LetterShortDashTwigDashTyrT), 'ᛑ' => Ok(Runic::LetterD), 'ᛒ' => Ok(Runic::LetterBerkananBeorcBjarkanB), 'ᛓ' => Ok(Runic::LetterShortDashTwigDashBjarkanB), 'ᛔ' => Ok(Runic::LetterDottedDashP), 'ᛕ' => Ok(Runic::LetterOpenDashP), 'ᛖ' => Ok(Runic::LetterEhwazEhE), 'ᛗ' => Ok(Runic::LetterMannazManM), 'ᛘ' => Ok(Runic::LetterLongDashBranchDashMadrM), 'ᛙ' => Ok(Runic::LetterShortDashTwigDashMadrM), 'ᛚ' => Ok(Runic::LetterLaukazLaguLogrL), 'ᛛ' => Ok(Runic::LetterDottedDashL), 'ᛜ' => Ok(Runic::LetterIngwaz), 'ᛝ' => Ok(Runic::LetterIng), 'ᛞ' => Ok(Runic::LetterDagazDaegD), 'ᛟ' => Ok(Runic::LetterOthalanEthelO), 'ᛠ' => Ok(Runic::LetterEar), 'ᛡ' => Ok(Runic::LetterIor), 'ᛢ' => Ok(Runic::LetterCweorth), 'ᛣ' => Ok(Runic::LetterCalc), 'ᛤ' => Ok(Runic::LetterCealc), 'ᛥ' => Ok(Runic::LetterStan), 'ᛦ' => Ok(Runic::LetterLongDashBranchDashYr), 'ᛧ' => Ok(Runic::LetterShortDashTwigDashYr), 'ᛨ' => Ok(Runic::LetterIcelandicDashYr), 'ᛩ' => Ok(Runic::LetterQ), 'ᛪ' => Ok(Runic::LetterX), '᛫' => Ok(Runic::SinglePunctuation), '᛬' => Ok(Runic::MultiplePunctuation), '᛭' => Ok(Runic::CrossPunctuation), 'ᛮ' => Ok(Runic::ArlaugSymbol), 'ᛯ' => Ok(Runic::TvimadurSymbol), 'ᛰ' => Ok(Runic::BelgthorSymbol), 'ᛱ' => Ok(Runic::LetterK), 'ᛲ' => Ok(Runic::LetterSh), 'ᛳ' => Ok(Runic::LetterOo), 'ᛴ' => Ok(Runic::LetterFranksCasketOs), 'ᛵ' => Ok(Runic::LetterFranksCasketIs), 'ᛶ' => Ok(Runic::LetterFranksCasketEh), 'ᛷ' => Ok(Runic::LetterFranksCasketAc), 'ᛸ' => Ok(Runic::LetterFranksCasketAesc), _ => Err(()), } } } impl Into<u32> for Runic { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for Runic { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for Runic { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl Runic { /// The character with the lowest index in this unicode block pub fn new() -> Self { Runic::LetterFehuFeohFeF } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("Runic{:#?}", self); string_morph::to_sentence_case(&s) } }
/// An enum to represent all characters in the OldTurkic block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum OldTurkic { /// \u{10c00}: '𐰀' LetterOrkhonA, /// \u{10c01}: '𐰁' LetterYeniseiA, /// \u{10c02}: '𐰂' LetterYeniseiAe, /// \u{10c03}: '𐰃' LetterOrkhonI, /// \u{10c04}: '𐰄' LetterYeniseiI, /// \u{10c05}: '𐰅' LetterYeniseiE, /// \u{10c06}: '𐰆' LetterOrkhonO, /// \u{10c07}: '𐰇' LetterOrkhonOe, /// \u{10c08}: '𐰈' LetterYeniseiOe, /// \u{10c09}: '𐰉' LetterOrkhonAb, /// \u{10c0a}: '𐰊' LetterYeniseiAb, /// \u{10c0b}: '𐰋' LetterOrkhonAeb, /// \u{10c0c}: '𐰌' LetterYeniseiAeb, /// \u{10c0d}: '𐰍' LetterOrkhonAg, /// \u{10c0e}: '𐰎' LetterYeniseiAg, /// \u{10c0f}: '𐰏' LetterOrkhonAeg, /// \u{10c10}: '𐰐' LetterYeniseiAeg, /// \u{10c11}: '𐰑' LetterOrkhonAd, /// \u{10c12}: '𐰒' LetterYeniseiAd, /// \u{10c13}: '𐰓' LetterOrkhonAed, /// \u{10c14}: '𐰔' LetterOrkhonEz, /// \u{10c15}: '𐰕' LetterYeniseiEz, /// \u{10c16}: '𐰖' LetterOrkhonAy, /// \u{10c17}: '𐰗' LetterYeniseiAy, /// \u{10c18}: '𐰘' LetterOrkhonAey, /// \u{10c19}: '𐰙' LetterYeniseiAey, /// \u{10c1a}: '𐰚' LetterOrkhonAek, /// \u{10c1b}: '𐰛' LetterYeniseiAek, /// \u{10c1c}: '𐰜' LetterOrkhonOek, /// \u{10c1d}: '𐰝' LetterYeniseiOek, /// \u{10c1e}: '𐰞' LetterOrkhonAl, /// \u{10c1f}: '𐰟' LetterYeniseiAl, /// \u{10c20}: '𐰠' LetterOrkhonAel, /// \u{10c21}: '𐰡' LetterOrkhonElt, /// \u{10c22}: '𐰢' LetterOrkhonEm, /// \u{10c23}: '𐰣' LetterOrkhonAn, /// \u{10c24}: '𐰤' LetterOrkhonAen, /// \u{10c25}: '𐰥' LetterYeniseiAen, /// \u{10c26}: '𐰦' LetterOrkhonEnt, /// \u{10c27}: '𐰧' LetterYeniseiEnt, /// \u{10c28}: '𐰨' LetterOrkhonEnc, /// \u{10c29}: '𐰩' LetterYeniseiEnc, /// \u{10c2a}: '𐰪' LetterOrkhonEny, /// \u{10c2b}: '𐰫' LetterYeniseiEny, /// \u{10c2c}: '𐰬' LetterYeniseiAng, /// \u{10c2d}: '𐰭' LetterOrkhonEng, /// \u{10c2e}: '𐰮' LetterYeniseiAeng, /// \u{10c2f}: '𐰯' LetterOrkhonEp, /// \u{10c30}: '𐰰' LetterOrkhonOp, /// \u{10c31}: '𐰱' LetterOrkhonIc, /// \u{10c32}: '𐰲' LetterOrkhonEc, /// \u{10c33}: '𐰳' LetterYeniseiEc, /// \u{10c34}: '𐰴' LetterOrkhonAq, /// \u{10c35}: '𐰵' LetterYeniseiAq, /// \u{10c36}: '𐰶' LetterOrkhonIq, /// \u{10c37}: '𐰷' LetterYeniseiIq, /// \u{10c38}: '𐰸' LetterOrkhonOq, /// \u{10c39}: '𐰹' LetterYeniseiOq, /// \u{10c3a}: '𐰺' LetterOrkhonAr, /// \u{10c3b}: '𐰻' LetterYeniseiAr, /// \u{10c3c}: '𐰼' LetterOrkhonAer, /// \u{10c3d}: '𐰽' LetterOrkhonAs, /// \u{10c3e}: '𐰾' LetterOrkhonAes, /// \u{10c3f}: '𐰿' LetterOrkhonAsh, /// \u{10c40}: '𐱀' LetterYeniseiAsh, /// \u{10c41}: '𐱁' LetterOrkhonEsh, /// \u{10c42}: '𐱂' LetterYeniseiEsh, /// \u{10c43}: '𐱃' LetterOrkhonAt, /// \u{10c44}: '𐱄' LetterYeniseiAt, /// \u{10c45}: '𐱅' LetterOrkhonAet, /// \u{10c46}: '𐱆' LetterYeniseiAet, /// \u{10c47}: '𐱇' LetterOrkhonOt, /// \u{10c48}: '𐱈' LetterOrkhonBash, } impl Into<char> for OldTurkic { fn into(self) -> char { match self { OldTurkic::LetterOrkhonA => '𐰀', OldTurkic::LetterYeniseiA => '𐰁', OldTurkic::LetterYeniseiAe => '𐰂', OldTurkic::LetterOrkhonI => '𐰃', OldTurkic::LetterYeniseiI => '𐰄', OldTurkic::LetterYeniseiE => '𐰅', OldTurkic::LetterOrkhonO => '𐰆', OldTurkic::LetterOrkhonOe => '𐰇', OldTurkic::LetterYeniseiOe => '𐰈', OldTurkic::LetterOrkhonAb => '𐰉', OldTurkic::LetterYeniseiAb => '𐰊', OldTurkic::LetterOrkhonAeb => '𐰋', OldTurkic::LetterYeniseiAeb => '𐰌', OldTurkic::LetterOrkhonAg => '𐰍', OldTurkic::LetterYeniseiAg => '𐰎', OldTurkic::LetterOrkhonAeg => '𐰏', OldTurkic::LetterYeniseiAeg => '𐰐', OldTurkic::LetterOrkhonAd => '𐰑', OldTurkic::LetterYeniseiAd => '𐰒', OldTurkic::LetterOrkhonAed => '𐰓', OldTurkic::LetterOrkhonEz => '𐰔', OldTurkic::LetterYeniseiEz => '𐰕', OldTurkic::LetterOrkhonAy => '𐰖', OldTurkic::LetterYeniseiAy => '𐰗', OldTurkic::LetterOrkhonAey => '𐰘', OldTurkic::LetterYeniseiAey => '𐰙', OldTurkic::LetterOrkhonAek => '𐰚', OldTurkic::LetterYeniseiAek => '𐰛', OldTurkic::LetterOrkhonOek => '𐰜', OldTurkic::LetterYeniseiOek => '𐰝', OldTurkic::LetterOrkhonAl => '𐰞', OldTurkic::LetterYeniseiAl => '𐰟', OldTurkic::LetterOrkhonAel => '𐰠', OldTurkic::LetterOrkhonElt => '𐰡', OldTurkic::LetterOrkhonEm => '𐰢', OldTurkic::LetterOrkhonAn => '𐰣', OldTurkic::LetterOrkhonAen => '𐰤', OldTurkic::LetterYeniseiAen => '𐰥', OldTurkic::LetterOrkhonEnt => '𐰦', OldTurkic::LetterYeniseiEnt => '𐰧', OldTurkic::LetterOrkhonEnc => '𐰨', OldTurkic::LetterYeniseiEnc => '𐰩', OldTurkic::LetterOrkhonEny => '𐰪', OldTurkic::LetterYeniseiEny => '𐰫', OldTurkic::LetterYeniseiAng => '𐰬', OldTurkic::LetterOrkhonEng => '𐰭', OldTurkic::LetterYeniseiAeng => '𐰮', OldTurkic::LetterOrkhonEp => '𐰯', OldTurkic::LetterOrkhonOp => '𐰰', OldTurkic::LetterOrkhonIc => '𐰱', OldTurkic::LetterOrkhonEc => '𐰲', OldTurkic::LetterYeniseiEc => '𐰳', OldTurkic::LetterOrkhonAq => '𐰴', OldTurkic::LetterYeniseiAq => '𐰵', OldTurkic::LetterOrkhonIq => '𐰶', OldTurkic::LetterYeniseiIq => '𐰷', OldTurkic::LetterOrkhonOq => '𐰸', OldTurkic::LetterYeniseiOq => '𐰹', OldTurkic::LetterOrkhonAr => '𐰺', OldTurkic::LetterYeniseiAr => '𐰻', OldTurkic::LetterOrkhonAer => '𐰼', OldTurkic::LetterOrkhonAs => '𐰽', OldTurkic::LetterOrkhonAes => '𐰾', OldTurkic::LetterOrkhonAsh => '𐰿', OldTurkic::LetterYeniseiAsh => '𐱀', OldTurkic::LetterOrkhonEsh => '𐱁', OldTurkic::LetterYeniseiEsh => '𐱂', OldTurkic::LetterOrkhonAt => '𐱃', OldTurkic::LetterYeniseiAt => '𐱄', OldTurkic::LetterOrkhonAet => '𐱅', OldTurkic::LetterYeniseiAet => '𐱆', OldTurkic::LetterOrkhonOt => '𐱇', OldTurkic::LetterOrkhonBash => '𐱈', } } } impl std::convert::TryFrom<char> for OldTurkic { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '𐰀' => Ok(OldTurkic::LetterOrkhonA), '𐰁' => Ok(OldTurkic::LetterYeniseiA), '𐰂' => Ok(OldTurkic::LetterYeniseiAe), '𐰃' => Ok(OldTurkic::LetterOrkhonI), '𐰄' => Ok(OldTurkic::LetterYeniseiI), '𐰅' => Ok(OldTurkic::LetterYeniseiE), '𐰆' => Ok(OldTurkic::LetterOrkhonO), '𐰇' => Ok(OldTurkic::LetterOrkhonOe), '𐰈' => Ok(OldTurkic::LetterYeniseiOe), '𐰉' => Ok(OldTurkic::LetterOrkhonAb), '𐰊' => Ok(OldTurkic::LetterYeniseiAb), '𐰋' => Ok(OldTurkic::LetterOrkhonAeb), '𐰌' => Ok(OldTurkic::LetterYeniseiAeb), '𐰍' => Ok(OldTurkic::LetterOrkhonAg), '𐰎' => Ok(OldTurkic::LetterYeniseiAg), '𐰏' => Ok(OldTurkic::LetterOrkhonAeg), '𐰐' => Ok(OldTurkic::LetterYeniseiAeg), '𐰑' => Ok(OldTurkic::LetterOrkhonAd), '𐰒' => Ok(OldTurkic::LetterYeniseiAd), '𐰓' => Ok(OldTurkic::LetterOrkhonAed), '𐰔' => Ok(OldTurkic::LetterOrkhonEz), '𐰕' => Ok(OldTurkic::LetterYeniseiEz), '𐰖' => Ok(OldTurkic::LetterOrkhonAy), '𐰗' => Ok(OldTurkic::LetterYeniseiAy), '𐰘' => Ok(OldTurkic::LetterOrkhonAey), '𐰙' => Ok(OldTurkic::LetterYeniseiAey), '𐰚' => Ok(OldTurkic::LetterOrkhonAek), '𐰛' => Ok(OldTurkic::LetterYeniseiAek), '𐰜' => Ok(OldTurkic::LetterOrkhonOek), '𐰝' => Ok(OldTurkic::LetterYeniseiOek), '𐰞' => Ok(OldTurkic::LetterOrkhonAl), '𐰟' => Ok(OldTurkic::LetterYeniseiAl), '𐰠' => Ok(OldTurkic::LetterOrkhonAel), '𐰡' => Ok(OldTurkic::LetterOrkhonElt), '𐰢' => Ok(OldTurkic::LetterOrkhonEm), '𐰣' => Ok(OldTurkic::LetterOrkhonAn), '𐰤' => Ok(OldTurkic::LetterOrkhonAen), '𐰥' => Ok(OldTurkic::LetterYeniseiAen), '𐰦' => Ok(OldTurkic::LetterOrkhonEnt), '𐰧' => Ok(OldTurkic::LetterYeniseiEnt), '𐰨' => Ok(OldTurkic::LetterOrkhonEnc), '𐰩' => Ok(OldTurkic::LetterYeniseiEnc), '𐰪' => Ok(OldTurkic::LetterOrkhonEny), '𐰫' => Ok(OldTurkic::LetterYeniseiEny), '𐰬' => Ok(OldTurkic::LetterYeniseiAng), '𐰭' => Ok(OldTurkic::LetterOrkhonEng), '𐰮' => Ok(OldTurkic::LetterYeniseiAeng), '𐰯' => Ok(OldTurkic::LetterOrkhonEp), '𐰰' => Ok(OldTurkic::LetterOrkhonOp), '𐰱' => Ok(OldTurkic::LetterOrkhonIc), '𐰲' => Ok(OldTurkic::LetterOrkhonEc), '𐰳' => Ok(OldTurkic::LetterYeniseiEc), '𐰴' => Ok(OldTurkic::LetterOrkhonAq), '𐰵' => Ok(OldTurkic::LetterYeniseiAq), '𐰶' => Ok(OldTurkic::LetterOrkhonIq), '𐰷' => Ok(OldTurkic::LetterYeniseiIq), '𐰸' => Ok(OldTurkic::LetterOrkhonOq), '𐰹' => Ok(OldTurkic::LetterYeniseiOq), '𐰺' => Ok(OldTurkic::LetterOrkhonAr), '𐰻' => Ok(OldTurkic::LetterYeniseiAr), '𐰼' => Ok(OldTurkic::LetterOrkhonAer), '𐰽' => Ok(OldTurkic::LetterOrkhonAs), '𐰾' => Ok(OldTurkic::LetterOrkhonAes), '𐰿' => Ok(OldTurkic::LetterOrkhonAsh), '𐱀' => Ok(OldTurkic::LetterYeniseiAsh), '𐱁' => Ok(OldTurkic::LetterOrkhonEsh), '𐱂' => Ok(OldTurkic::LetterYeniseiEsh), '𐱃' => Ok(OldTurkic::LetterOrkhonAt), '𐱄' => Ok(OldTurkic::LetterYeniseiAt), '𐱅' => Ok(OldTurkic::LetterOrkhonAet), '𐱆' => Ok(OldTurkic::LetterYeniseiAet), '𐱇' => Ok(OldTurkic::LetterOrkhonOt), '𐱈' => Ok(OldTurkic::LetterOrkhonBash), _ => Err(()), } } } impl Into<u32> for OldTurkic { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for OldTurkic { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for OldTurkic { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl OldTurkic { /// The character with the lowest index in this unicode block pub fn new() -> Self { OldTurkic::LetterOrkhonA } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("OldTurkic{:#?}", self); string_morph::to_sentence_case(&s) } }
fn get_digits(n : i32) -> Vec<u8> { let mut digits : Vec<u8> = vec![0; 6]; digits[0] = ( n / 100000) as u8; digits[1] = ((n / 10000) % 10) as u8; digits[2] = ((n / 1000) % 10) as u8; digits[3] = ((n / 100) % 10) as u8; digits[4] = ((n / 10) % 10) as u8; digits[5] = ( n % 10) as u8; return digits; } fn valid_password(n : i32) -> bool { let digits = get_digits(n); let mut valid = false; for i in 0..digits.len()-1 { if (i == 0 && digits[i] == digits[i+1] && digits[i+1] != digits[i+2]) || (i == digits.len() - 2 && digits[i-1] != digits[i] && digits[i] == digits[i+1]) || (0 < i && i < digits.len() - 2 && digits[i-1] != digits[i] && digits[i] == digits[i+1] && digits[i+1] != digits[i+2]) { valid = true; } if digits[i] > digits[i+1] { valid = false; break; } } return valid; } #[cfg(test)] mod tests { use super::*; #[test] fn valid_password_works() { assert!( valid_password(122345)); assert!(!valid_password(111123)); assert!(!valid_password(135679)); assert!(!valid_password(111111)); assert!(!valid_password(223450)); assert!(!valid_password(123789)); assert!( valid_password(112233)); assert!(!valid_password(123444)); assert!( valid_password(111122)); } } fn main() { let lower_bound = 284639; let higher_bound = 748759; let mut counter = 0; for i in (lower_bound + 1)..higher_bound { if valid_password(i) { counter += 1; } } println!("Solution: {}", counter); }
use core::ops::Deref; use diesel; use diesel::prelude::*; use rand; use ring::{digest, pbkdf2}; use db::ConnectionSource; use db::users; use errors::*; #[derive(Debug, Insertable, Queryable)] #[table_name="users"] pub struct User { pub name: String, pub password_salt: Vec<u8>, pub password_hash: Vec<u8>, pub admin: i32, } static DIGEST_ALG: &'static digest::Algorithm = &digest::SHA256; const CREDENTIAL_LEN: usize = digest::SHA256_OUTPUT_LEN; const HASH_ITERATIONS: u32 = 10000; type PasswordHash = [u8; CREDENTIAL_LEN]; impl User { pub fn new(name: &str, password: &str, admin: bool) -> User { let salt = rand::random::<[u8; 16]>().to_vec(); let hash = User::hash_password(&salt, password); User { name: name.to_owned(), password_salt: salt, password_hash: hash, admin: admin as i32, } } pub fn verify_password(&self, attempted_password: &str) -> bool { pbkdf2::verify(DIGEST_ALG, HASH_ITERATIONS, &self.password_salt, attempted_password.as_bytes(), &self.password_hash) .is_ok() } fn hash_password(salt: &Vec<u8>, password: &str) -> Vec<u8> { let mut hash: PasswordHash = [0; CREDENTIAL_LEN]; pbkdf2::derive(DIGEST_ALG, HASH_ITERATIONS, salt, password.as_bytes(), &mut hash); hash.to_vec() } } pub fn auth<T>(db: &T, username: &str, password: &str) -> Result<bool> where T: ConnectionSource { use db::users::dsl::*; let connection = db.get_connection(); let user: QueryResult<User> = users .select((name, password_salt, password_hash, admin)) .filter(name.eq(username)) .get_result(connection.deref()); match user { Err(diesel::result::Error::NotFound) => Ok(false), Ok(u) => Ok(u.verify_password(password)), Err(e) => Err(e.into()), } } pub fn count<T>(db: &T) -> Result<i64> where T: ConnectionSource { use db::users::dsl::*; let connection = db.get_connection(); let count = users.count().get_result(connection.deref())?; Ok(count) } pub fn is_admin<T>(db: &T, username: &str) -> Result<bool> where T: ConnectionSource { use db::users::dsl::*; let connection = db.get_connection(); let is_admin: i32 = users .filter(name.eq(username)) .select(admin) .get_result(connection.deref())?; Ok(is_admin != 0) }
use sdl2::keyboard::Keycode; pub struct Keys { key: [bool; 16], } impl Keys { pub fn new() -> Keys { Keys { key: [false; 16] } } pub fn is_down(& self, id: usize) -> bool { self.key[id] } pub fn set_keys(&mut self, option_code: Option<Keycode>, state: bool) { match option_code { None => return, Some(code) => { match code { Keycode::Num1 => self.key[0x1] = state, Keycode::Num2 => self.key[0x2] = state, Keycode::Num3 => self.key[0x3] = state, Keycode::Num4 => self.key[0xC] = state, Keycode::Q => self.key[0x4] = state, Keycode::W => self.key[0x5] = state, Keycode::E => self.key[0x6] = state, Keycode::R => self.key[0xD] = state, Keycode::A => self.key[0x7] = state, Keycode::S => self.key[0x8] = state, Keycode::D => self.key[0x9] = state, Keycode::F => self.key[0xE] = state, Keycode::Z => self.key[0xA] = state, Keycode::X => self.key[0x0] = state, Keycode::C => self.key[0xB] = state, Keycode::V => self.key[0xF] = state, _ => {}, } } } } }
use wallet::Wallet; pub fn run(wallet_path: Path, args: &[String]) { assert!(args.is_empty()); let wallet = Wallet::new(&wallet_path); match wallet.save() { Ok(_) => println!("New wallet saved to {}.", wallet_path.display()), Err(e) => println!("Error saving wallet: {}", e) }; }
use winapi::shared::winerror::ERROR_SUCCESS; use winapi::um::accctrl::*; use winapi::um::aclapi::*; use winapi::um::minwinbase::{LPTR, PSECURITY_ATTRIBUTES, SECURITY_ATTRIBUTES}; use winapi::um::securitybaseapi::*; use winapi::um::winbase::{LocalAlloc, LocalFree}; use winapi::um::winnt::*; use std::io; use std::marker; use std::mem; use std::ptr; /// Security attributes. pub struct SecurityAttributes { attributes: Option<InnerAttributes>, } impl SecurityAttributes { /// New default security attributes. pub fn empty() -> SecurityAttributes { SecurityAttributes { attributes: None } } /// New default security attributes that allow everyone to connect. pub fn allow_everyone_connect(&self) -> io::Result<SecurityAttributes> { let attributes = Some(InnerAttributes::allow_everyone( GENERIC_READ | FILE_WRITE_DATA, )?); Ok(SecurityAttributes { attributes }) } /// Set a custom permission on the socket pub fn set_mode(mut self, _mode: u32) -> io::Result<Self> { // for now, does nothing. Ok(self) } /// New default security attributes that allow everyone to create. pub fn allow_everyone_create() -> io::Result<SecurityAttributes> { let attributes = Some(InnerAttributes::allow_everyone( GENERIC_READ | GENERIC_WRITE, )?); Ok(SecurityAttributes { attributes }) } /// Return raw handle of security attributes. pub(crate) unsafe fn as_ptr(&mut self) -> PSECURITY_ATTRIBUTES { match self.attributes.as_mut() { Some(attributes) => attributes.as_ptr(), None => ptr::null_mut(), } } } unsafe impl Send for SecurityAttributes {} struct Sid { sid_ptr: PSID, } impl Sid { fn everyone_sid() -> io::Result<Sid> { let mut sid_ptr = ptr::null_mut(); let result = unsafe { AllocateAndInitializeSid( SECURITY_WORLD_SID_AUTHORITY.as_mut_ptr() as *mut _, 1, SECURITY_WORLD_RID, 0, 0, 0, 0, 0, 0, 0, &mut sid_ptr, ) }; if result == 0 { Err(io::Error::last_os_error()) } else { Ok(Sid { sid_ptr }) } } // Unsafe - the returned pointer is only valid for the lifetime of self. unsafe fn as_ptr(&self) -> PSID { self.sid_ptr } } impl Drop for Sid { fn drop(&mut self) { if !self.sid_ptr.is_null() { unsafe { FreeSid(self.sid_ptr); } } } } struct AceWithSid<'a> { explicit_access: EXPLICIT_ACCESS_W, _marker: marker::PhantomData<&'a Sid>, } impl<'a> AceWithSid<'a> { fn new(sid: &'a Sid, trustee_type: u32) -> AceWithSid<'a> { let mut explicit_access = unsafe { mem::zeroed::<EXPLICIT_ACCESS_W>() }; explicit_access.Trustee.TrusteeForm = TRUSTEE_IS_SID; explicit_access.Trustee.TrusteeType = trustee_type; explicit_access.Trustee.ptstrName = unsafe { sid.as_ptr() as *mut _ }; AceWithSid { explicit_access, _marker: marker::PhantomData, } } fn set_access_mode(&mut self, access_mode: u32) -> &mut Self { self.explicit_access.grfAccessMode = access_mode; self } fn set_access_permissions(&mut self, access_permissions: u32) -> &mut Self { self.explicit_access.grfAccessPermissions = access_permissions; self } fn allow_inheritance(&mut self, inheritance_flags: u32) -> &mut Self { self.explicit_access.grfInheritance = inheritance_flags; self } } struct Acl { acl_ptr: PACL, } impl Acl { fn empty() -> io::Result<Acl> { Self::new(&mut []) } fn new(entries: &mut [AceWithSid<'_>]) -> io::Result<Acl> { let mut acl_ptr = ptr::null_mut(); let result = unsafe { SetEntriesInAclW( entries.len() as u32, entries.as_mut_ptr() as *mut _, ptr::null_mut(), &mut acl_ptr, ) }; if result != ERROR_SUCCESS { return Err(io::Error::from_raw_os_error(result as i32)); } Ok(Acl { acl_ptr }) } unsafe fn as_ptr(&self) -> PACL { self.acl_ptr } } impl Drop for Acl { fn drop(&mut self) { if !self.acl_ptr.is_null() { unsafe { LocalFree(self.acl_ptr as *mut _) }; } } } struct SecurityDescriptor { descriptor_ptr: PSECURITY_DESCRIPTOR, } impl SecurityDescriptor { fn new() -> io::Result<Self> { let descriptor_ptr = unsafe { LocalAlloc(LPTR, SECURITY_DESCRIPTOR_MIN_LENGTH) }; if descriptor_ptr.is_null() { return Err(io::Error::new( io::ErrorKind::Other, "Failed to allocate security descriptor", )); } if unsafe { InitializeSecurityDescriptor(descriptor_ptr, SECURITY_DESCRIPTOR_REVISION) == 0 } { return Err(io::Error::last_os_error()); }; Ok(SecurityDescriptor { descriptor_ptr }) } fn set_dacl(&mut self, acl: &Acl) -> io::Result<()> { if unsafe { SetSecurityDescriptorDacl(self.descriptor_ptr, true as i32, acl.as_ptr(), false as i32) == 0 } { return Err(io::Error::last_os_error()); } Ok(()) } unsafe fn as_ptr(&self) -> PSECURITY_DESCRIPTOR { self.descriptor_ptr } } impl Drop for SecurityDescriptor { fn drop(&mut self) { if !self.descriptor_ptr.is_null() { unsafe { LocalFree(self.descriptor_ptr) }; self.descriptor_ptr = ptr::null_mut(); } } } struct InnerAttributes { descriptor: SecurityDescriptor, acl: Acl, attrs: SECURITY_ATTRIBUTES, } impl InnerAttributes { fn empty() -> io::Result<InnerAttributes> { let descriptor = SecurityDescriptor::new()?; let mut attrs = unsafe { mem::zeroed::<SECURITY_ATTRIBUTES>() }; attrs.nLength = mem::size_of::<SECURITY_ATTRIBUTES>() as u32; attrs.lpSecurityDescriptor = unsafe { descriptor.as_ptr() }; attrs.bInheritHandle = false as i32; let acl = Acl::empty().expect("this should never fail"); Ok(InnerAttributes { acl, descriptor, attrs, }) } fn allow_everyone(permissions: u32) -> io::Result<InnerAttributes> { let mut attributes = Self::empty()?; let sid = Sid::everyone_sid()?; println!("pisec"); let mut everyone_ace = AceWithSid::new(&sid, TRUSTEE_IS_WELL_KNOWN_GROUP); everyone_ace .set_access_mode(SET_ACCESS) .set_access_permissions(permissions) .allow_inheritance(false as u32); let mut entries = vec![everyone_ace]; attributes.acl = Acl::new(&mut entries)?; attributes.descriptor.set_dacl(&attributes.acl)?; Ok(attributes) } unsafe fn as_ptr(&mut self) -> PSECURITY_ATTRIBUTES { &mut self.attrs as *mut _ } } #[cfg(test)] mod test { use super::SecurityAttributes; #[test] fn test_allow_everyone_everything() { SecurityAttributes::allow_everyone_create() .expect("failed to create security attributes that allow everyone to create a pipe"); } #[test] fn test_allow_eveyone_read_write() { SecurityAttributes::empty() .allow_everyone_connect() .expect("failed to create security attributes that allow everyone to read and write to/from a pipe"); } }
use super::*; use std::io; use std::sync::mpsc::channel; use std::sync::mpsc::Receiver; use std::sync::mpsc::Sender; struct FakeReader { receiver: Receiver<String>, } impl FakeReader { fn new() -> (Sender<String>, FakeReader) { let (sender, receiver) = channel(); return (sender, FakeReader { receiver: receiver }); } } impl Read for FakeReader { fn read_packet(&mut self) -> Result<String, io::Error> { self.receiver .recv() .map_err(|_| io::Error::new(io::ErrorKind::UnexpectedEof, "EOF encountered")) } } struct FakeWriter { sender: Sender<String>, } impl FakeWriter { fn new() -> (Receiver<String>, FakeWriter) { let (sender, receiver) = channel(); return (receiver, FakeWriter { sender: sender }); } } impl Write for FakeWriter { fn write_packet(&self, packet: String) -> Result<(), io::Error> { self.sender.send(packet).unwrap(); Ok(()) } } struct Client { sender: Sender<String>, receiver: Receiver<String>, } impl Client { fn recv(&self) -> Result<serde_json::Value, ()> { Ok(self.receiver.recv().unwrap().parse().unwrap()) } fn send(&self, req: serde_json::Value) -> Result<(), ()> { self.sender.send(req.to_string()).unwrap(); Ok(()) } } fn create_client_and_server() -> (Client, Server<FakeReader, FakeWriter>) { let (writer_ch, writer) = FakeWriter::new(); let (reader_ch, reader) = FakeReader::new(); let client = Client { sender: reader_ch, receiver: writer_ch, }; let server = Server::new(reader, writer); return (client, server); } #[test] fn responds_to_initialize() { let (client, server) = create_client_and_server(); let t = std::thread::spawn(move || { run(server); }); client .send(json!({ "jsonrpc": "2.0", "id": 1, "method": "initialize", "params": { "processId": serde_json::Value::Null, "rootUri": serde_json::Value::Null, "capabilities": { }, }, })) .unwrap(); client.recv().unwrap(); client .send(json!({ "jsonrpc": "2.0", "method": "exit", })) .unwrap(); t.join().unwrap(); } #[test] fn document_hightlight_highlights_the_same_variable() { // TODO: This has to be refactor to make writing tests easy. let (client, server) = create_client_and_server(); let t = std::thread::spawn(move || { run(server); }); // Initialize client .send(json!({ "jsonrpc": "2.0", "id": 1, "method": "initialize", "params": { "processId": serde_json::Value::Null, "rootUri": serde_json::Value::Null, "capabilities": { }, }, })) .unwrap(); // Receive initialized // TODO: verify this. client.recv().unwrap(); // Open document (notification) client .send(json!({ "jsonrpc": "2.0", "method": "textDocument/didOpen", "params": { "textDocument": { "uri": "file:///home/user/test.vim", "languageId": "vim", "version": 1, "text": "let myvar = 1\nlet myvar = 2\n", }, }, })) .unwrap(); // Diagnostic notification client.recv().unwrap(); // Request hightlights client .send(json!({ "jsonrpc": "2.0", "id": 1, "method": "textDocument/documentHighlight", "params": { "textDocument": { "uri": "file:///home/user/test.vim", }, "position": { "line": 0, "character": 5, }, }, })) .unwrap(); let response = client.recv().unwrap(); let result = response.get("result").unwrap().clone(); let x: Vec<DocumentHighlight> = serde_json::from_value(result).unwrap(); // TODO: This is invalid response, we should actually report both variables not just the // first one. assert_eq!( x, vec![DocumentHighlight { kind: None, range: Range { start: Position { line: 0, character: 4, }, end: Position { line: 0, character: 9, }, }, }] ); // Exit client .send(json!({ "jsonrpc": "2.0", "method": "exit", })) .unwrap(); t.join().unwrap(); }
use std::{ convert::TryFrom, fs::File, io::{BufReader, Seek}, pin::Pin, sync::Arc, task::{Context, Poll}, time::SystemTime, }; use rustls::{ client::{ClientConfig, ServerCertVerified, ServerCertVerifier, ServerName}, Certificate, Error as TlsError, OwnedTrustAnchor, RootCertStore, }; use rustls_pemfile::{certs, read_one, Item}; use tokio::io::{AsyncRead, AsyncWrite}; use tokio_rustls::TlsConnector; use webpki_roots::TLS_SERVER_ROOTS; use crate::{ client::options::TlsOptions, error::{ErrorKind, Result}, }; use super::stream::AsyncTcpStream; #[derive(Debug)] pub(crate) struct AsyncTlsStream { inner: tokio_rustls::client::TlsStream<AsyncTcpStream>, } /// Configuration required to use TLS. Creating this is expensive, so its best to cache this value /// and reuse it for multiple connections. #[derive(Clone)] pub(crate) struct TlsConfig { connector: TlsConnector, } impl TlsConfig { /// Create a new `TlsConfig` from the provided options from the user. /// This operation is expensive, so the resultant `TlsConfig` should be cached. pub(crate) fn new(options: TlsOptions) -> Result<TlsConfig> { let mut tls_config = make_rustls_config(options)?; tls_config.enable_sni = true; let connector: TlsConnector = Arc::new(tls_config).into(); Ok(TlsConfig { connector }) } } impl AsyncTlsStream { pub(crate) async fn connect( host: &str, tcp_stream: AsyncTcpStream, cfg: &TlsConfig, ) -> Result<Self> { let name = ServerName::try_from(host).map_err(|e| ErrorKind::DnsResolve { message: format!("could not resolve {:?}: {}", host, e), })?; let conn = cfg .connector .connect_with(name, tcp_stream, |c| { c.set_buffer_limit(None); }) .await?; Ok(Self { inner: conn }) } } impl AsyncRead for AsyncTlsStream { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut tokio::io::ReadBuf<'_>, ) -> Poll<std::io::Result<()>> { Pin::new(&mut self.inner).poll_read(cx, buf) } } impl AsyncWrite for AsyncTlsStream { fn poll_write( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<std::io::Result<usize>> { Pin::new(&mut self.inner).poll_write(cx, buf) } fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<std::io::Result<()>> { Pin::new(&mut self.inner).poll_flush(cx) } fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<std::io::Result<()>> { Pin::new(&mut self.inner).poll_shutdown(cx) } } /// Converts `TlsOptions` into a rustls::ClientConfig. fn make_rustls_config(cfg: TlsOptions) -> Result<rustls::ClientConfig> { let mut store = RootCertStore::empty(); if let Some(path) = cfg.ca_file_path { let ders = certs(&mut BufReader::new(File::open(&path)?)).map_err(|_| { ErrorKind::InvalidTlsConfig { message: format!( "Unable to parse PEM-encoded root certificate from {}", path.display() ), } })?; store.add_parsable_certificates(&ders); } else { let trust_anchors = TLS_SERVER_ROOTS.iter().map(|ta| { OwnedTrustAnchor::from_subject_spki_name_constraints( ta.subject, ta.spki, ta.name_constraints, ) }); store.add_trust_anchors(trust_anchors); } let mut config = if let Some(path) = cfg.cert_key_file_path { let mut file = BufReader::new(File::open(&path)?); let certs = match certs(&mut file) { Ok(certs) => certs.into_iter().map(Certificate).collect(), Err(error) => { return Err(ErrorKind::InvalidTlsConfig { message: format!( "Unable to parse PEM-encoded client certificate from {}: {}", path.display(), error, ), } .into()) } }; file.rewind()?; let key = loop { match read_one(&mut file) { Ok(Some(Item::PKCS8Key(bytes))) | Ok(Some(Item::RSAKey(bytes))) => { break rustls::PrivateKey(bytes) } Ok(Some(_)) => continue, Ok(None) => { return Err(ErrorKind::InvalidTlsConfig { message: format!("No PEM-encoded keys in {}", path.display()), } .into()) } Err(_) => { return Err(ErrorKind::InvalidTlsConfig { message: format!( "Unable to parse PEM-encoded item from {}", path.display() ), } .into()) } } }; ClientConfig::builder() .with_safe_defaults() .with_root_certificates(store) .with_client_auth_cert(certs, key) .map_err(|error| ErrorKind::InvalidTlsConfig { message: error.to_string(), })? } else { ClientConfig::builder() .with_safe_defaults() .with_root_certificates(store) .with_no_client_auth() }; if let Some(true) = cfg.allow_invalid_certificates { config .dangerous() .set_certificate_verifier(Arc::new(NoCertVerifier {})); } Ok(config) } struct NoCertVerifier {} impl ServerCertVerifier for NoCertVerifier { fn verify_server_cert( &self, _: &Certificate, _: &[Certificate], _: &ServerName, _: &mut dyn Iterator<Item = &[u8]>, _: &[u8], _: SystemTime, ) -> std::result::Result<ServerCertVerified, TlsError> { Ok(ServerCertVerified::assertion()) } }
macro_rules! _nums { () => { include_str!("input.txt") .lines() .map(|line| { line.trim() .parse() .expect(&format!("unparsable number {}!", line.trim())) }) .collect() }; } // This hack appears to be the only way to properly export our macro at the proper scope. pub(crate) use _nums as nums;
use rhombus_core::hex::coordinates::{ axial::AxialVector, cubic::CubicVector, direction::{HexagonalDirection, NUM_DIRECTIONS}, }; #[derive(Clone, Copy, Debug)] pub struct Range { start: isize, end: isize, } impl Range { pub fn start(&self) -> isize { self.start } pub fn end(&self) -> isize { self.end } pub fn start_mut(&mut self) -> &mut isize { &mut self.start } pub fn end_mut(&mut self) -> &mut isize { &mut self.end } pub fn contains(&self, value: isize) -> bool { self.start <= value && value <= self.end } } impl From<(isize, isize)> for Range { fn from(tuple: (isize, isize)) -> Self { Range { start: tuple.0, end: tuple.1, } } } #[derive(Clone, Debug)] pub struct CubicRangeShape { range_x: Range, range_y: Range, range_z: Range, } impl CubicRangeShape { pub fn new<R: Into<Range>>(range_x: R, range_y: R, range_z: R) -> Self { let range_x = range_x.into(); let range_y = range_y.into(); let range_z = range_z.into(); if !Self::are_ranges_valid(&range_x, &range_y, &range_z) { panic!( "Invalid CubicRangeShape [{}, {}], [{}, {}], [{}, {}]", range_x.start(), range_x.end(), range_y.start(), range_y.end(), range_z.start(), range_z.end() ); } Self { range_x, range_y, range_z, } } pub fn range_x(&self) -> &Range { &self.range_x } pub fn range_y(&self) -> &Range { &self.range_y } pub fn range_z(&self) -> &Range { &self.range_z } #[allow(dead_code)] fn edges_length(&self) -> [usize; 6] { let signed = Self::signed_edges_lengths(&self.range_x, &self.range_y, &self.range_z); [ signed[0] as usize, signed[1] as usize, signed[2] as usize, signed[3] as usize, signed[4] as usize, signed[5] as usize, ] } pub fn are_ranges_valid(range_x: &Range, range_y: &Range, range_z: &Range) -> bool { let edges_lengths = Self::signed_edges_lengths(range_x, range_y, range_z); for edge_length in &edges_lengths { if *edge_length < 0 { return false; } } return true; } fn signed_edges_lengths(range_x: &Range, range_y: &Range, range_z: &Range) -> [isize; 6] { [ -range_x.start() - range_y.start() - range_z.end(), range_x.end() + range_y.start() + range_z.end(), -range_x.end() - range_y.start() - range_z.start(), range_x.end() + range_y.end() + range_z.start(), -range_x.start() - range_y.end() - range_z.start(), range_x.start() + range_y.end() + range_z.end(), ] } pub fn vertices(&self) -> [AxialVector; 6] { [ CubicVector::new( self.range_x.start(), -self.range_x.start() - self.range_z.end(), self.range_z.end(), ) .into(), CubicVector::new( -self.range_y.start() - self.range_z.end(), self.range_y.start(), self.range_z.end(), ) .into(), CubicVector::new( self.range_x.end(), self.range_y.start(), -self.range_x.end() - self.range_y.start(), ) .into(), CubicVector::new( self.range_x.end(), -self.range_x.end() - self.range_z.start(), self.range_z.start(), ) .into(), CubicVector::new( -self.range_y.end() - self.range_z.start(), self.range_y.end(), self.range_z.start(), ) .into(), CubicVector::new( self.range_x.start(), self.range_y.end(), -self.range_x.start() - self.range_y.end(), ) .into(), ] } pub fn perimeter(&self) -> PerimeterIter { PerimeterIter::new( self.edges_length(), CubicVector::new( self.range_x.start(), -self.range_x.start() - self.range_z.end(), self.range_z.end(), ) .into(), ) } pub fn contains_position(&self, position: AxialVector) -> bool { let cubic = CubicVector::from(position); self.range_x.contains(cubic.x()) && self.range_y.contains(cubic.y()) && self.range_z.contains(cubic.z()) } pub fn intersects(&self, other: &Self) -> bool { if self.contains_position(other.vertices()[0]) { return true; } for pos in self.perimeter() { if other.contains_position(pos) { return true; } } return false; } pub fn center(&self) -> AxialVector { AxialVector::new( (self.range_x.start() + self.range_x.end() - (self.range_y.start() + self.range_y.end() + self.range_z.start() + self.range_z.end()) / 2) / 3, (self.range_z.start() + self.range_z.end() - (self.range_x.start() + self.range_x.end() + self.range_y.start() + self.range_y.end()) / 2) / 3, ) } pub fn stretch_x_start(&mut self, amount: usize) -> bool { Self::stretch_axis_start( &mut self.range_x, &mut self.range_y, &mut self.range_z, amount, ) } pub fn stretch_y_start(&mut self, amount: usize) -> bool { Self::stretch_axis_start( &mut self.range_y, &mut self.range_z, &mut self.range_x, amount, ) } pub fn stretch_z_start(&mut self, amount: usize) -> bool { Self::stretch_axis_start( &mut self.range_z, &mut self.range_x, &mut self.range_y, amount, ) } fn stretch_axis_start(a: &mut Range, b: &mut Range, c: &mut Range, amount: usize) -> bool { *a.start_mut() -= amount as isize; if a.start() + b.end() + c.end() < 0 { *b.end_mut() += amount as isize; *c.end_mut() += amount as isize; } true } pub fn stretch_x_end(&mut self, amount: usize) -> bool { Self::stretch_axis_end( &mut self.range_x, &mut self.range_y, &mut self.range_z, amount, ) } pub fn stretch_y_end(&mut self, amount: usize) -> bool { Self::stretch_axis_end( &mut self.range_y, &mut self.range_z, &mut self.range_x, amount, ) } pub fn stretch_z_end(&mut self, amount: usize) -> bool { Self::stretch_axis_end( &mut self.range_z, &mut self.range_x, &mut self.range_y, amount, ) } fn stretch_axis_end(a: &mut Range, b: &mut Range, c: &mut Range, amount: usize) -> bool { *a.end_mut() += amount as isize; if -a.end() - b.start() - c.start() < 0 { *b.start_mut() -= amount as isize; *c.start_mut() -= amount as isize; } true } pub fn shrink_x_start(&mut self, amount: usize) -> bool { Self::shrink_axis_start( &mut self.range_x, &mut self.range_y, &mut self.range_z, amount, ) } pub fn shrink_y_start(&mut self, amount: usize) -> bool { Self::shrink_axis_start( &mut self.range_y, &mut self.range_z, &mut self.range_x, amount, ) } pub fn shrink_z_start(&mut self, amount: usize) -> bool { Self::shrink_axis_start( &mut self.range_z, &mut self.range_x, &mut self.range_y, amount, ) } fn shrink_axis_start(a: &mut Range, b: &mut Range, c: &mut Range, amount: usize) -> bool { if a.start() + amount as isize <= a.end() { *a.start_mut() += amount as isize; if -a.start() - b.end() - c.start() < 0 { *b.end_mut() -= amount as isize; } if -a.start() - b.start() - c.end() < 0 { *c.end_mut() -= amount as isize; } true } else { false } } pub fn shrink_x_end(&mut self, amount: usize) -> bool { Self::shrink_axis_end( &mut self.range_x, &mut self.range_y, &mut self.range_z, amount, ) } pub fn shrink_y_end(&mut self, amount: usize) -> bool { Self::shrink_axis_end( &mut self.range_y, &mut self.range_z, &mut self.range_x, amount, ) } pub fn shrink_z_end(&mut self, amount: usize) -> bool { Self::shrink_axis_end( &mut self.range_z, &mut self.range_x, &mut self.range_y, amount, ) } fn shrink_axis_end(a: &mut Range, b: &mut Range, c: &mut Range, amount: usize) -> bool { if a.start() + amount as isize <= a.end() { *a.end_mut() -= amount as isize; if a.end() + b.start() + c.end() < 0 { *b.start_mut() += amount as isize; } if a.end() + b.end() + c.start() < 0 { *c.start_mut() += amount as isize; } true } else { false } } } impl Default for CubicRangeShape { fn default() -> Self { CubicRangeShape::new((-1, 1), (-1, 1), (-1, 1)) } } pub struct PerimeterIter { edges_lengths: [usize; 6], direction: usize, next: AxialVector, edge_index: usize, } impl PerimeterIter { pub fn new(edges_lengths: [usize; 6], initial: AxialVector) -> Self { let mut direction = 0; // Drain all but last edge so that: // - the state is ready for next iteration // - the ring of size 0 case is handled correctly (it returns // the first value, and no more then) while direction < 5 && edges_lengths[direction] == 0 { direction += 1; } Self { edges_lengths, direction, next: initial, edge_index: 1, } } pub fn peek(&mut self) -> Option<&AxialVector> { if self.direction < NUM_DIRECTIONS { Some(&self.next) } else { None } } } impl Iterator for PerimeterIter { type Item = AxialVector; fn next(&mut self) -> Option<Self::Item> { let edges_lengths = self.edges_lengths; let direction = self.direction; if direction < NUM_DIRECTIONS { let next = self.next; self.next = next.neighbor(direction); let ei = self.edge_index; if ei < edges_lengths[direction] { self.edge_index = ei + 1; } else { self.edge_index = 1; self.direction = direction + 1; while self.direction < NUM_DIRECTIONS && edges_lengths[self.direction] == 0 { self.direction += 1; } } Some(next) } else { None } } fn size_hint(&self) -> (usize, Option<usize>) { let length = self.edges_lengths.iter().sum(); (length, Some(length)) } }
#![no_std] #![no_main] #![feature(abi_x86_interrupt)] #![feature(custom_test_frameworks)] #![test_runner(xagima::testing::runner)] #![reexport_test_harness_main = "test_main"] #![feature(default_alloc_error_handler)] extern crate alloc; use alloc::boxed::Box; use bootloader::BootInfo; use core::panic::PanicInfo; #[panic_handler] fn panic(info: &PanicInfo) -> ! { xagima::testing::default_panic_handler(info) } #[no_mangle] pub extern "C" fn _start(boot_info: &'static BootInfo) -> ! { xagima::init(boot_info); test_main(); xagima::halt(); } #[test_case] fn trivial_test() { assert_eq!(1, 1); } #[test_case] fn can_create_boxes() { let answer = Box::new(42); assert_eq!(*answer, 42); }
use Score; use DocId; use docset::{DocSet, SkipResult}; use postings::SegmentPostings; use query::Scorer; use postings::Postings; use fastfield::FastFieldReader; pub struct TermScorer { pub idf: Score, pub fieldnorm_reader_opt: Option<FastFieldReader<u64>>, pub postings: SegmentPostings, } impl TermScorer { pub fn postings(&self) -> &SegmentPostings { &self.postings } } impl DocSet for TermScorer { fn advance(&mut self) -> bool { self.postings.advance() } fn doc(&self) -> DocId { self.postings.doc() } fn size_hint(&self) -> u32 { self.postings.size_hint() } fn skip_next(&mut self, target: DocId) -> SkipResult { self.postings.skip_next(target) } } impl Scorer for TermScorer { fn score(&mut self) -> Score { let doc = self.postings.doc(); let tf = match self.fieldnorm_reader_opt { Some(ref fieldnorm_reader) => { let field_norm = fieldnorm_reader.get(doc); (self.postings.term_freq() as f32 / field_norm as f32) } None => self.postings.term_freq() as f32, }; self.idf * tf.sqrt() } }
use crate::directories::*; use crate::EditorConfig; use crate::ScrollConfig; use rider_lexers::Language; use rider_themes::Theme; use std::collections::HashMap; use std::fs; pub type LanguageMapping = HashMap<String, Language>; #[derive(Debug, Clone)] pub struct Config { width: u32, height: u32, menu_height: u16, editor_config: EditorConfig, theme: Theme, extensions_mapping: LanguageMapping, scroll: ScrollConfig, directories: Directories, } impl Config { pub fn new() -> Self { let directories = Directories::new(None, None); let editor_config = EditorConfig::new(&directories); let mut extensions_mapping = HashMap::new(); extensions_mapping.insert(".".to_string(), Language::PlainText); extensions_mapping.insert("txt".to_string(), Language::PlainText); extensions_mapping.insert("rs".to_string(), Language::Rust); extensions_mapping.insert("toml".to_string(), Language::Toml); Self { width: 1024, height: 860, menu_height: 40, theme: Theme::default(), editor_config, extensions_mapping, scroll: ScrollConfig::new(), directories, } } pub fn width(&self) -> u32 { self.width } pub fn set_width(&mut self, w: u32) { self.width = w; } pub fn height(&self) -> u32 { self.height } pub fn set_height(&mut self, h: u32) { self.height = h; } pub fn editor_config(&self) -> &EditorConfig { &self.editor_config } pub fn theme(&self) -> &Theme { &self.theme } pub fn menu_height(&self) -> u16 { self.menu_height } pub fn editor_top_margin(&self) -> i32 { i32::from(self.menu_height()) + i32::from(self.editor_config().margin_top()) } pub fn editor_left_margin(&self) -> i32 { i32::from(self.editor_config().margin_left()) } pub fn extensions_mapping(&self) -> &LanguageMapping { &self.extensions_mapping } pub fn scroll(&self) -> &ScrollConfig { &self.scroll } pub fn scroll_mut(&mut self) -> &mut ScrollConfig { &mut self.scroll } pub fn directories(&self) -> &Directories { &self.directories } pub fn set_theme(&mut self, theme: String) { self.theme = self.load_theme(theme); } } impl Config { pub fn load_theme(&self, theme_name: String) -> Theme { let home_dir = dirs::config_dir().unwrap(); #[cfg_attr(tarpaulin, skip)] fs::create_dir_all(&home_dir.join("rider")) .unwrap_or_else(|_| panic!("Cannot create config directory")); self.load_theme_content(format!("{}.json", theme_name).as_str()) } fn load_theme_content(&self, file_name: &str) -> Theme { let config_file = self.directories.themes_dir.clone(); let contents = fs::read_to_string(&config_file.join(file_name)).unwrap_or_default(); serde_json::from_str(&contents).unwrap_or_default() } } #[cfg(test)] mod tests { use super::*; #[test] fn must_return_language_mapping() { let config = Config::new(); let mapping = config.extensions_mapping(); { let mut keys: Vec<String> = mapping.keys().map(|s| s.to_string()).collect(); let mut expected: Vec<String> = vec![ ".".to_string(), "txt".to_string(), "rs".to_string(), "toml".to_string(), ]; keys.sort(); expected.sort(); assert_eq!(keys, expected); } { let mut keys: Vec<Language> = mapping.values().map(|s| s.clone()).collect(); let mut expected: Vec<Language> = vec![ Language::PlainText, Language::PlainText, Language::Rust, Language::Toml, ]; keys.sort(); expected.sort(); assert_eq!(keys, expected); } } #[test] fn assert_scroll() { let config = Config::new(); let result = config.scroll(); let expected = ScrollConfig::new(); assert_eq!(result.clone(), expected); } #[test] fn assert_scroll_mut() { let mut config = Config::new(); let result = config.scroll_mut(); result.set_margin_right(1236); let mut expected = ScrollConfig::new(); expected.set_margin_right(1236); assert_eq!(result.clone(), expected); } } #[cfg(test)] mod test_getters { use super::*; #[test] fn assert_width() { let config = Config::new(); let result = config.width(); let expected = 1024; assert_eq!(result, expected); } #[test] fn assert_height() { let config = Config::new(); let result = config.height(); let expected = 860; assert_eq!(result, expected); } #[test] fn assert_editor_config() { let config = Config::new(); let result = config.editor_config(); let expected = EditorConfig::new(&Directories::new(None, None)); assert_eq!(result, &expected); } #[test] fn assert_theme() { let config = Config::new(); let result = config.theme(); let expected = Theme::default(); assert_eq!(result, &expected); } #[test] fn assert_menu_height() { let config = Config::new(); let result = config.menu_height(); let expected = 40; assert_eq!(result, expected); } #[test] fn assert_editor_top_margin() { let config = Config::new(); let result = config.editor_top_margin(); let expected = config.menu_height() as i32 + config.editor_config().margin_top() as i32; assert_eq!(result, expected); } #[test] fn assert_editor_left_margin() { let config = Config::new(); let result = config.editor_left_margin(); let expected = 10; assert_eq!(result, expected); } #[test] fn assert_extensions_mapping() { let config = Config::new(); let mut result: Vec<String> = config .extensions_mapping() .keys() .map(|s| s.to_owned()) .collect(); result.sort(); let mut expected: Vec<String> = vec![ "rs".to_string(), "txt".to_string(), ".".to_string(), "toml".to_string(), ]; expected.sort(); assert_eq!(result, expected); } }
/// An enum to represent all characters in the Nushu block. #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum Nushu { /// \u{1b170}: '𛅰' CharacterDash1b170, /// \u{1b171}: '𛅱' CharacterDash1b171, /// \u{1b172}: '𛅲' CharacterDash1b172, /// \u{1b173}: '𛅳' CharacterDash1b173, /// \u{1b174}: '𛅴' CharacterDash1b174, /// \u{1b175}: '𛅵' CharacterDash1b175, /// \u{1b176}: '𛅶' CharacterDash1b176, /// \u{1b177}: '𛅷' CharacterDash1b177, /// \u{1b178}: '𛅸' CharacterDash1b178, /// \u{1b179}: '𛅹' CharacterDash1b179, /// \u{1b17a}: '𛅺' CharacterDash1b17a, /// \u{1b17b}: '𛅻' CharacterDash1b17b, /// \u{1b17c}: '𛅼' CharacterDash1b17c, /// \u{1b17d}: '𛅽' CharacterDash1b17d, /// \u{1b17e}: '𛅾' CharacterDash1b17e, /// \u{1b17f}: '𛅿' CharacterDash1b17f, /// \u{1b180}: '𛆀' CharacterDash1b180, /// \u{1b181}: '𛆁' CharacterDash1b181, /// \u{1b182}: '𛆂' CharacterDash1b182, /// \u{1b183}: '𛆃' CharacterDash1b183, /// \u{1b184}: '𛆄' CharacterDash1b184, /// \u{1b185}: '𛆅' CharacterDash1b185, /// \u{1b186}: '𛆆' CharacterDash1b186, /// \u{1b187}: '𛆇' CharacterDash1b187, /// \u{1b188}: '𛆈' CharacterDash1b188, /// \u{1b189}: '𛆉' CharacterDash1b189, /// \u{1b18a}: '𛆊' CharacterDash1b18a, /// \u{1b18b}: '𛆋' CharacterDash1b18b, /// \u{1b18c}: '𛆌' CharacterDash1b18c, /// \u{1b18d}: '𛆍' CharacterDash1b18d, /// \u{1b18e}: '𛆎' CharacterDash1b18e, /// \u{1b18f}: '𛆏' CharacterDash1b18f, /// \u{1b190}: '𛆐' CharacterDash1b190, /// \u{1b191}: '𛆑' CharacterDash1b191, /// \u{1b192}: '𛆒' CharacterDash1b192, /// \u{1b193}: '𛆓' CharacterDash1b193, /// \u{1b194}: '𛆔' CharacterDash1b194, /// \u{1b195}: '𛆕' CharacterDash1b195, /// \u{1b196}: '𛆖' CharacterDash1b196, /// \u{1b197}: '𛆗' CharacterDash1b197, /// \u{1b198}: '𛆘' CharacterDash1b198, /// \u{1b199}: '𛆙' CharacterDash1b199, /// \u{1b19a}: '𛆚' CharacterDash1b19a, /// \u{1b19b}: '𛆛' CharacterDash1b19b, /// \u{1b19c}: '𛆜' CharacterDash1b19c, /// \u{1b19d}: '𛆝' CharacterDash1b19d, /// \u{1b19e}: '𛆞' CharacterDash1b19e, /// \u{1b19f}: '𛆟' CharacterDash1b19f, /// \u{1b1a0}: '𛆠' CharacterDash1b1a0, /// \u{1b1a1}: '𛆡' CharacterDash1b1a1, /// \u{1b1a2}: '𛆢' CharacterDash1b1a2, /// \u{1b1a3}: '𛆣' CharacterDash1b1a3, /// \u{1b1a4}: '𛆤' CharacterDash1b1a4, /// \u{1b1a5}: '𛆥' CharacterDash1b1a5, /// \u{1b1a6}: '𛆦' CharacterDash1b1a6, /// \u{1b1a7}: '𛆧' CharacterDash1b1a7, /// \u{1b1a8}: '𛆨' CharacterDash1b1a8, /// \u{1b1a9}: '𛆩' CharacterDash1b1a9, /// \u{1b1aa}: '𛆪' CharacterDash1b1aa, /// \u{1b1ab}: '𛆫' CharacterDash1b1ab, /// \u{1b1ac}: '𛆬' CharacterDash1b1ac, /// \u{1b1ad}: '𛆭' CharacterDash1b1ad, /// \u{1b1ae}: '𛆮' CharacterDash1b1ae, /// \u{1b1af}: '𛆯' CharacterDash1b1af, /// \u{1b1b0}: '𛆰' CharacterDash1b1b0, /// \u{1b1b1}: '𛆱' CharacterDash1b1b1, /// \u{1b1b2}: '𛆲' CharacterDash1b1b2, /// \u{1b1b3}: '𛆳' CharacterDash1b1b3, /// \u{1b1b4}: '𛆴' CharacterDash1b1b4, /// \u{1b1b5}: '𛆵' CharacterDash1b1b5, /// \u{1b1b6}: '𛆶' CharacterDash1b1b6, /// \u{1b1b7}: '𛆷' CharacterDash1b1b7, /// \u{1b1b8}: '𛆸' CharacterDash1b1b8, /// \u{1b1b9}: '𛆹' CharacterDash1b1b9, /// \u{1b1ba}: '𛆺' CharacterDash1b1ba, /// \u{1b1bb}: '𛆻' CharacterDash1b1bb, /// \u{1b1bc}: '𛆼' CharacterDash1b1bc, /// \u{1b1bd}: '𛆽' CharacterDash1b1bd, /// \u{1b1be}: '𛆾' CharacterDash1b1be, /// \u{1b1bf}: '𛆿' CharacterDash1b1bf, /// \u{1b1c0}: '𛇀' CharacterDash1b1c0, /// \u{1b1c1}: '𛇁' CharacterDash1b1c1, /// \u{1b1c2}: '𛇂' CharacterDash1b1c2, /// \u{1b1c3}: '𛇃' CharacterDash1b1c3, /// \u{1b1c4}: '𛇄' CharacterDash1b1c4, /// \u{1b1c5}: '𛇅' CharacterDash1b1c5, /// \u{1b1c6}: '𛇆' CharacterDash1b1c6, /// \u{1b1c7}: '𛇇' CharacterDash1b1c7, /// \u{1b1c8}: '𛇈' CharacterDash1b1c8, /// \u{1b1c9}: '𛇉' CharacterDash1b1c9, /// \u{1b1ca}: '𛇊' CharacterDash1b1ca, /// \u{1b1cb}: '𛇋' CharacterDash1b1cb, /// \u{1b1cc}: '𛇌' CharacterDash1b1cc, /// \u{1b1cd}: '𛇍' CharacterDash1b1cd, /// \u{1b1ce}: '𛇎' CharacterDash1b1ce, /// \u{1b1cf}: '𛇏' CharacterDash1b1cf, /// \u{1b1d0}: '𛇐' CharacterDash1b1d0, /// \u{1b1d1}: '𛇑' CharacterDash1b1d1, /// \u{1b1d2}: '𛇒' CharacterDash1b1d2, /// \u{1b1d3}: '𛇓' CharacterDash1b1d3, /// \u{1b1d4}: '𛇔' CharacterDash1b1d4, /// \u{1b1d5}: '𛇕' CharacterDash1b1d5, /// \u{1b1d6}: '𛇖' CharacterDash1b1d6, /// \u{1b1d7}: '𛇗' CharacterDash1b1d7, /// \u{1b1d8}: '𛇘' CharacterDash1b1d8, /// \u{1b1d9}: '𛇙' CharacterDash1b1d9, /// \u{1b1da}: '𛇚' CharacterDash1b1da, /// \u{1b1db}: '𛇛' CharacterDash1b1db, /// \u{1b1dc}: '𛇜' CharacterDash1b1dc, /// \u{1b1dd}: '𛇝' CharacterDash1b1dd, /// \u{1b1de}: '𛇞' CharacterDash1b1de, /// \u{1b1df}: '𛇟' CharacterDash1b1df, /// \u{1b1e0}: '𛇠' CharacterDash1b1e0, /// \u{1b1e1}: '𛇡' CharacterDash1b1e1, /// \u{1b1e2}: '𛇢' CharacterDash1b1e2, /// \u{1b1e3}: '𛇣' CharacterDash1b1e3, /// \u{1b1e4}: '𛇤' CharacterDash1b1e4, /// \u{1b1e5}: '𛇥' CharacterDash1b1e5, /// \u{1b1e6}: '𛇦' CharacterDash1b1e6, /// \u{1b1e7}: '𛇧' CharacterDash1b1e7, /// \u{1b1e8}: '𛇨' CharacterDash1b1e8, /// \u{1b1e9}: '𛇩' CharacterDash1b1e9, /// \u{1b1ea}: '𛇪' CharacterDash1b1ea, /// \u{1b1eb}: '𛇫' CharacterDash1b1eb, /// \u{1b1ec}: '𛇬' CharacterDash1b1ec, /// \u{1b1ed}: '𛇭' CharacterDash1b1ed, /// \u{1b1ee}: '𛇮' CharacterDash1b1ee, /// \u{1b1ef}: '𛇯' CharacterDash1b1ef, /// \u{1b1f0}: '𛇰' CharacterDash1b1f0, /// \u{1b1f1}: '𛇱' CharacterDash1b1f1, /// \u{1b1f2}: '𛇲' CharacterDash1b1f2, /// \u{1b1f3}: '𛇳' CharacterDash1b1f3, /// \u{1b1f4}: '𛇴' CharacterDash1b1f4, /// \u{1b1f5}: '𛇵' CharacterDash1b1f5, /// \u{1b1f6}: '𛇶' CharacterDash1b1f6, /// \u{1b1f7}: '𛇷' CharacterDash1b1f7, /// \u{1b1f8}: '𛇸' CharacterDash1b1f8, /// \u{1b1f9}: '𛇹' CharacterDash1b1f9, /// \u{1b1fa}: '𛇺' CharacterDash1b1fa, /// \u{1b1fb}: '𛇻' CharacterDash1b1fb, /// \u{1b1fc}: '𛇼' CharacterDash1b1fc, /// \u{1b1fd}: '𛇽' CharacterDash1b1fd, /// \u{1b1fe}: '𛇾' CharacterDash1b1fe, /// \u{1b1ff}: '𛇿' CharacterDash1b1ff, /// \u{1b200}: '𛈀' CharacterDash1b200, /// \u{1b201}: '𛈁' CharacterDash1b201, /// \u{1b202}: '𛈂' CharacterDash1b202, /// \u{1b203}: '𛈃' CharacterDash1b203, /// \u{1b204}: '𛈄' CharacterDash1b204, /// \u{1b205}: '𛈅' CharacterDash1b205, /// \u{1b206}: '𛈆' CharacterDash1b206, /// \u{1b207}: '𛈇' CharacterDash1b207, /// \u{1b208}: '𛈈' CharacterDash1b208, /// \u{1b209}: '𛈉' CharacterDash1b209, /// \u{1b20a}: '𛈊' CharacterDash1b20a, /// \u{1b20b}: '𛈋' CharacterDash1b20b, /// \u{1b20c}: '𛈌' CharacterDash1b20c, /// \u{1b20d}: '𛈍' CharacterDash1b20d, /// \u{1b20e}: '𛈎' CharacterDash1b20e, /// \u{1b20f}: '𛈏' CharacterDash1b20f, /// \u{1b210}: '𛈐' CharacterDash1b210, /// \u{1b211}: '𛈑' CharacterDash1b211, /// \u{1b212}: '𛈒' CharacterDash1b212, /// \u{1b213}: '𛈓' CharacterDash1b213, /// \u{1b214}: '𛈔' CharacterDash1b214, /// \u{1b215}: '𛈕' CharacterDash1b215, /// \u{1b216}: '𛈖' CharacterDash1b216, /// \u{1b217}: '𛈗' CharacterDash1b217, /// \u{1b218}: '𛈘' CharacterDash1b218, /// \u{1b219}: '𛈙' CharacterDash1b219, /// \u{1b21a}: '𛈚' CharacterDash1b21a, /// \u{1b21b}: '𛈛' CharacterDash1b21b, /// \u{1b21c}: '𛈜' CharacterDash1b21c, /// \u{1b21d}: '𛈝' CharacterDash1b21d, /// \u{1b21e}: '𛈞' CharacterDash1b21e, /// \u{1b21f}: '𛈟' CharacterDash1b21f, /// \u{1b220}: '𛈠' CharacterDash1b220, /// \u{1b221}: '𛈡' CharacterDash1b221, /// \u{1b222}: '𛈢' CharacterDash1b222, /// \u{1b223}: '𛈣' CharacterDash1b223, /// \u{1b224}: '𛈤' CharacterDash1b224, /// \u{1b225}: '𛈥' CharacterDash1b225, /// \u{1b226}: '𛈦' CharacterDash1b226, /// \u{1b227}: '𛈧' CharacterDash1b227, /// \u{1b228}: '𛈨' CharacterDash1b228, /// \u{1b229}: '𛈩' CharacterDash1b229, /// \u{1b22a}: '𛈪' CharacterDash1b22a, /// \u{1b22b}: '𛈫' CharacterDash1b22b, /// \u{1b22c}: '𛈬' CharacterDash1b22c, /// \u{1b22d}: '𛈭' CharacterDash1b22d, /// \u{1b22e}: '𛈮' CharacterDash1b22e, /// \u{1b22f}: '𛈯' CharacterDash1b22f, /// \u{1b230}: '𛈰' CharacterDash1b230, /// \u{1b231}: '𛈱' CharacterDash1b231, /// \u{1b232}: '𛈲' CharacterDash1b232, /// \u{1b233}: '𛈳' CharacterDash1b233, /// \u{1b234}: '𛈴' CharacterDash1b234, /// \u{1b235}: '𛈵' CharacterDash1b235, /// \u{1b236}: '𛈶' CharacterDash1b236, /// \u{1b237}: '𛈷' CharacterDash1b237, /// \u{1b238}: '𛈸' CharacterDash1b238, /// \u{1b239}: '𛈹' CharacterDash1b239, /// \u{1b23a}: '𛈺' CharacterDash1b23a, /// \u{1b23b}: '𛈻' CharacterDash1b23b, /// \u{1b23c}: '𛈼' CharacterDash1b23c, /// \u{1b23d}: '𛈽' CharacterDash1b23d, /// \u{1b23e}: '𛈾' CharacterDash1b23e, /// \u{1b23f}: '𛈿' CharacterDash1b23f, /// \u{1b240}: '𛉀' CharacterDash1b240, /// \u{1b241}: '𛉁' CharacterDash1b241, /// \u{1b242}: '𛉂' CharacterDash1b242, /// \u{1b243}: '𛉃' CharacterDash1b243, /// \u{1b244}: '𛉄' CharacterDash1b244, /// \u{1b245}: '𛉅' CharacterDash1b245, /// \u{1b246}: '𛉆' CharacterDash1b246, /// \u{1b247}: '𛉇' CharacterDash1b247, /// \u{1b248}: '𛉈' CharacterDash1b248, /// \u{1b249}: '𛉉' CharacterDash1b249, /// \u{1b24a}: '𛉊' CharacterDash1b24a, /// \u{1b24b}: '𛉋' CharacterDash1b24b, /// \u{1b24c}: '𛉌' CharacterDash1b24c, /// \u{1b24d}: '𛉍' CharacterDash1b24d, /// \u{1b24e}: '𛉎' CharacterDash1b24e, /// \u{1b24f}: '𛉏' CharacterDash1b24f, /// \u{1b250}: '𛉐' CharacterDash1b250, /// \u{1b251}: '𛉑' CharacterDash1b251, /// \u{1b252}: '𛉒' CharacterDash1b252, /// \u{1b253}: '𛉓' CharacterDash1b253, /// \u{1b254}: '𛉔' CharacterDash1b254, /// \u{1b255}: '𛉕' CharacterDash1b255, /// \u{1b256}: '𛉖' CharacterDash1b256, /// \u{1b257}: '𛉗' CharacterDash1b257, /// \u{1b258}: '𛉘' CharacterDash1b258, /// \u{1b259}: '𛉙' CharacterDash1b259, /// \u{1b25a}: '𛉚' CharacterDash1b25a, /// \u{1b25b}: '𛉛' CharacterDash1b25b, /// \u{1b25c}: '𛉜' CharacterDash1b25c, /// \u{1b25d}: '𛉝' CharacterDash1b25d, /// \u{1b25e}: '𛉞' CharacterDash1b25e, /// \u{1b25f}: '𛉟' CharacterDash1b25f, /// \u{1b260}: '𛉠' CharacterDash1b260, /// \u{1b261}: '𛉡' CharacterDash1b261, /// \u{1b262}: '𛉢' CharacterDash1b262, /// \u{1b263}: '𛉣' CharacterDash1b263, /// \u{1b264}: '𛉤' CharacterDash1b264, /// \u{1b265}: '𛉥' CharacterDash1b265, /// \u{1b266}: '𛉦' CharacterDash1b266, /// \u{1b267}: '𛉧' CharacterDash1b267, /// \u{1b268}: '𛉨' CharacterDash1b268, /// \u{1b269}: '𛉩' CharacterDash1b269, /// \u{1b26a}: '𛉪' CharacterDash1b26a, /// \u{1b26b}: '𛉫' CharacterDash1b26b, /// \u{1b26c}: '𛉬' CharacterDash1b26c, /// \u{1b26d}: '𛉭' CharacterDash1b26d, /// \u{1b26e}: '𛉮' CharacterDash1b26e, /// \u{1b26f}: '𛉯' CharacterDash1b26f, /// \u{1b270}: '𛉰' CharacterDash1b270, /// \u{1b271}: '𛉱' CharacterDash1b271, /// \u{1b272}: '𛉲' CharacterDash1b272, /// \u{1b273}: '𛉳' CharacterDash1b273, /// \u{1b274}: '𛉴' CharacterDash1b274, /// \u{1b275}: '𛉵' CharacterDash1b275, /// \u{1b276}: '𛉶' CharacterDash1b276, /// \u{1b277}: '𛉷' CharacterDash1b277, /// \u{1b278}: '𛉸' CharacterDash1b278, /// \u{1b279}: '𛉹' CharacterDash1b279, /// \u{1b27a}: '𛉺' CharacterDash1b27a, /// \u{1b27b}: '𛉻' CharacterDash1b27b, /// \u{1b27c}: '𛉼' CharacterDash1b27c, /// \u{1b27d}: '𛉽' CharacterDash1b27d, /// \u{1b27e}: '𛉾' CharacterDash1b27e, /// \u{1b27f}: '𛉿' CharacterDash1b27f, /// \u{1b280}: '𛊀' CharacterDash1b280, /// \u{1b281}: '𛊁' CharacterDash1b281, /// \u{1b282}: '𛊂' CharacterDash1b282, /// \u{1b283}: '𛊃' CharacterDash1b283, /// \u{1b284}: '𛊄' CharacterDash1b284, /// \u{1b285}: '𛊅' CharacterDash1b285, /// \u{1b286}: '𛊆' CharacterDash1b286, /// \u{1b287}: '𛊇' CharacterDash1b287, /// \u{1b288}: '𛊈' CharacterDash1b288, /// \u{1b289}: '𛊉' CharacterDash1b289, /// \u{1b28a}: '𛊊' CharacterDash1b28a, /// \u{1b28b}: '𛊋' CharacterDash1b28b, /// \u{1b28c}: '𛊌' CharacterDash1b28c, /// \u{1b28d}: '𛊍' CharacterDash1b28d, /// \u{1b28e}: '𛊎' CharacterDash1b28e, /// \u{1b28f}: '𛊏' CharacterDash1b28f, /// \u{1b290}: '𛊐' CharacterDash1b290, /// \u{1b291}: '𛊑' CharacterDash1b291, /// \u{1b292}: '𛊒' CharacterDash1b292, /// \u{1b293}: '𛊓' CharacterDash1b293, /// \u{1b294}: '𛊔' CharacterDash1b294, /// \u{1b295}: '𛊕' CharacterDash1b295, /// \u{1b296}: '𛊖' CharacterDash1b296, /// \u{1b297}: '𛊗' CharacterDash1b297, /// \u{1b298}: '𛊘' CharacterDash1b298, /// \u{1b299}: '𛊙' CharacterDash1b299, /// \u{1b29a}: '𛊚' CharacterDash1b29a, /// \u{1b29b}: '𛊛' CharacterDash1b29b, /// \u{1b29c}: '𛊜' CharacterDash1b29c, /// \u{1b29d}: '𛊝' CharacterDash1b29d, /// \u{1b29e}: '𛊞' CharacterDash1b29e, /// \u{1b29f}: '𛊟' CharacterDash1b29f, /// \u{1b2a0}: '𛊠' CharacterDash1b2a0, /// \u{1b2a1}: '𛊡' CharacterDash1b2a1, /// \u{1b2a2}: '𛊢' CharacterDash1b2a2, /// \u{1b2a3}: '𛊣' CharacterDash1b2a3, /// \u{1b2a4}: '𛊤' CharacterDash1b2a4, /// \u{1b2a5}: '𛊥' CharacterDash1b2a5, /// \u{1b2a6}: '𛊦' CharacterDash1b2a6, /// \u{1b2a7}: '𛊧' CharacterDash1b2a7, /// \u{1b2a8}: '𛊨' CharacterDash1b2a8, /// \u{1b2a9}: '𛊩' CharacterDash1b2a9, /// \u{1b2aa}: '𛊪' CharacterDash1b2aa, /// \u{1b2ab}: '𛊫' CharacterDash1b2ab, /// \u{1b2ac}: '𛊬' CharacterDash1b2ac, /// \u{1b2ad}: '𛊭' CharacterDash1b2ad, /// \u{1b2ae}: '𛊮' CharacterDash1b2ae, /// \u{1b2af}: '𛊯' CharacterDash1b2af, /// \u{1b2b0}: '𛊰' CharacterDash1b2b0, /// \u{1b2b1}: '𛊱' CharacterDash1b2b1, /// \u{1b2b2}: '𛊲' CharacterDash1b2b2, /// \u{1b2b3}: '𛊳' CharacterDash1b2b3, /// \u{1b2b4}: '𛊴' CharacterDash1b2b4, /// \u{1b2b5}: '𛊵' CharacterDash1b2b5, /// \u{1b2b6}: '𛊶' CharacterDash1b2b6, /// \u{1b2b7}: '𛊷' CharacterDash1b2b7, /// \u{1b2b8}: '𛊸' CharacterDash1b2b8, /// \u{1b2b9}: '𛊹' CharacterDash1b2b9, /// \u{1b2ba}: '𛊺' CharacterDash1b2ba, /// \u{1b2bb}: '𛊻' CharacterDash1b2bb, /// \u{1b2bc}: '𛊼' CharacterDash1b2bc, /// \u{1b2bd}: '𛊽' CharacterDash1b2bd, /// \u{1b2be}: '𛊾' CharacterDash1b2be, /// \u{1b2bf}: '𛊿' CharacterDash1b2bf, /// \u{1b2c0}: '𛋀' CharacterDash1b2c0, /// \u{1b2c1}: '𛋁' CharacterDash1b2c1, /// \u{1b2c2}: '𛋂' CharacterDash1b2c2, /// \u{1b2c3}: '𛋃' CharacterDash1b2c3, /// \u{1b2c4}: '𛋄' CharacterDash1b2c4, /// \u{1b2c5}: '𛋅' CharacterDash1b2c5, /// \u{1b2c6}: '𛋆' CharacterDash1b2c6, /// \u{1b2c7}: '𛋇' CharacterDash1b2c7, /// \u{1b2c8}: '𛋈' CharacterDash1b2c8, /// \u{1b2c9}: '𛋉' CharacterDash1b2c9, /// \u{1b2ca}: '𛋊' CharacterDash1b2ca, /// \u{1b2cb}: '𛋋' CharacterDash1b2cb, /// \u{1b2cc}: '𛋌' CharacterDash1b2cc, /// \u{1b2cd}: '𛋍' CharacterDash1b2cd, /// \u{1b2ce}: '𛋎' CharacterDash1b2ce, /// \u{1b2cf}: '𛋏' CharacterDash1b2cf, /// \u{1b2d0}: '𛋐' CharacterDash1b2d0, /// \u{1b2d1}: '𛋑' CharacterDash1b2d1, /// \u{1b2d2}: '𛋒' CharacterDash1b2d2, /// \u{1b2d3}: '𛋓' CharacterDash1b2d3, /// \u{1b2d4}: '𛋔' CharacterDash1b2d4, /// \u{1b2d5}: '𛋕' CharacterDash1b2d5, /// \u{1b2d6}: '𛋖' CharacterDash1b2d6, /// \u{1b2d7}: '𛋗' CharacterDash1b2d7, /// \u{1b2d8}: '𛋘' CharacterDash1b2d8, /// \u{1b2d9}: '𛋙' CharacterDash1b2d9, /// \u{1b2da}: '𛋚' CharacterDash1b2da, /// \u{1b2db}: '𛋛' CharacterDash1b2db, /// \u{1b2dc}: '𛋜' CharacterDash1b2dc, /// \u{1b2dd}: '𛋝' CharacterDash1b2dd, /// \u{1b2de}: '𛋞' CharacterDash1b2de, /// \u{1b2df}: '𛋟' CharacterDash1b2df, /// \u{1b2e0}: '𛋠' CharacterDash1b2e0, /// \u{1b2e1}: '𛋡' CharacterDash1b2e1, /// \u{1b2e2}: '𛋢' CharacterDash1b2e2, /// \u{1b2e3}: '𛋣' CharacterDash1b2e3, /// \u{1b2e4}: '𛋤' CharacterDash1b2e4, /// \u{1b2e5}: '𛋥' CharacterDash1b2e5, /// \u{1b2e6}: '𛋦' CharacterDash1b2e6, /// \u{1b2e7}: '𛋧' CharacterDash1b2e7, /// \u{1b2e8}: '𛋨' CharacterDash1b2e8, /// \u{1b2e9}: '𛋩' CharacterDash1b2e9, /// \u{1b2ea}: '𛋪' CharacterDash1b2ea, /// \u{1b2eb}: '𛋫' CharacterDash1b2eb, /// \u{1b2ec}: '𛋬' CharacterDash1b2ec, /// \u{1b2ed}: '𛋭' CharacterDash1b2ed, /// \u{1b2ee}: '𛋮' CharacterDash1b2ee, /// \u{1b2ef}: '𛋯' CharacterDash1b2ef, /// \u{1b2f0}: '𛋰' CharacterDash1b2f0, /// \u{1b2f1}: '𛋱' CharacterDash1b2f1, /// \u{1b2f2}: '𛋲' CharacterDash1b2f2, /// \u{1b2f3}: '𛋳' CharacterDash1b2f3, /// \u{1b2f4}: '𛋴' CharacterDash1b2f4, /// \u{1b2f5}: '𛋵' CharacterDash1b2f5, /// \u{1b2f6}: '𛋶' CharacterDash1b2f6, /// \u{1b2f7}: '𛋷' CharacterDash1b2f7, /// \u{1b2f8}: '𛋸' CharacterDash1b2f8, /// \u{1b2f9}: '𛋹' CharacterDash1b2f9, /// \u{1b2fa}: '𛋺' CharacterDash1b2fa, /// \u{1b2fb}: '𛋻' CharacterDash1b2fb, } impl Into<char> for Nushu { fn into(self) -> char { match self { Nushu::CharacterDash1b170 => '𛅰', Nushu::CharacterDash1b171 => '𛅱', Nushu::CharacterDash1b172 => '𛅲', Nushu::CharacterDash1b173 => '𛅳', Nushu::CharacterDash1b174 => '𛅴', Nushu::CharacterDash1b175 => '𛅵', Nushu::CharacterDash1b176 => '𛅶', Nushu::CharacterDash1b177 => '𛅷', Nushu::CharacterDash1b178 => '𛅸', Nushu::CharacterDash1b179 => '𛅹', Nushu::CharacterDash1b17a => '𛅺', Nushu::CharacterDash1b17b => '𛅻', Nushu::CharacterDash1b17c => '𛅼', Nushu::CharacterDash1b17d => '𛅽', Nushu::CharacterDash1b17e => '𛅾', Nushu::CharacterDash1b17f => '𛅿', Nushu::CharacterDash1b180 => '𛆀', Nushu::CharacterDash1b181 => '𛆁', Nushu::CharacterDash1b182 => '𛆂', Nushu::CharacterDash1b183 => '𛆃', Nushu::CharacterDash1b184 => '𛆄', Nushu::CharacterDash1b185 => '𛆅', Nushu::CharacterDash1b186 => '𛆆', Nushu::CharacterDash1b187 => '𛆇', Nushu::CharacterDash1b188 => '𛆈', Nushu::CharacterDash1b189 => '𛆉', Nushu::CharacterDash1b18a => '𛆊', Nushu::CharacterDash1b18b => '𛆋', Nushu::CharacterDash1b18c => '𛆌', Nushu::CharacterDash1b18d => '𛆍', Nushu::CharacterDash1b18e => '𛆎', Nushu::CharacterDash1b18f => '𛆏', Nushu::CharacterDash1b190 => '𛆐', Nushu::CharacterDash1b191 => '𛆑', Nushu::CharacterDash1b192 => '𛆒', Nushu::CharacterDash1b193 => '𛆓', Nushu::CharacterDash1b194 => '𛆔', Nushu::CharacterDash1b195 => '𛆕', Nushu::CharacterDash1b196 => '𛆖', Nushu::CharacterDash1b197 => '𛆗', Nushu::CharacterDash1b198 => '𛆘', Nushu::CharacterDash1b199 => '𛆙', Nushu::CharacterDash1b19a => '𛆚', Nushu::CharacterDash1b19b => '𛆛', Nushu::CharacterDash1b19c => '𛆜', Nushu::CharacterDash1b19d => '𛆝', Nushu::CharacterDash1b19e => '𛆞', Nushu::CharacterDash1b19f => '𛆟', Nushu::CharacterDash1b1a0 => '𛆠', Nushu::CharacterDash1b1a1 => '𛆡', Nushu::CharacterDash1b1a2 => '𛆢', Nushu::CharacterDash1b1a3 => '𛆣', Nushu::CharacterDash1b1a4 => '𛆤', Nushu::CharacterDash1b1a5 => '𛆥', Nushu::CharacterDash1b1a6 => '𛆦', Nushu::CharacterDash1b1a7 => '𛆧', Nushu::CharacterDash1b1a8 => '𛆨', Nushu::CharacterDash1b1a9 => '𛆩', Nushu::CharacterDash1b1aa => '𛆪', Nushu::CharacterDash1b1ab => '𛆫', Nushu::CharacterDash1b1ac => '𛆬', Nushu::CharacterDash1b1ad => '𛆭', Nushu::CharacterDash1b1ae => '𛆮', Nushu::CharacterDash1b1af => '𛆯', Nushu::CharacterDash1b1b0 => '𛆰', Nushu::CharacterDash1b1b1 => '𛆱', Nushu::CharacterDash1b1b2 => '𛆲', Nushu::CharacterDash1b1b3 => '𛆳', Nushu::CharacterDash1b1b4 => '𛆴', Nushu::CharacterDash1b1b5 => '𛆵', Nushu::CharacterDash1b1b6 => '𛆶', Nushu::CharacterDash1b1b7 => '𛆷', Nushu::CharacterDash1b1b8 => '𛆸', Nushu::CharacterDash1b1b9 => '𛆹', Nushu::CharacterDash1b1ba => '𛆺', Nushu::CharacterDash1b1bb => '𛆻', Nushu::CharacterDash1b1bc => '𛆼', Nushu::CharacterDash1b1bd => '𛆽', Nushu::CharacterDash1b1be => '𛆾', Nushu::CharacterDash1b1bf => '𛆿', Nushu::CharacterDash1b1c0 => '𛇀', Nushu::CharacterDash1b1c1 => '𛇁', Nushu::CharacterDash1b1c2 => '𛇂', Nushu::CharacterDash1b1c3 => '𛇃', Nushu::CharacterDash1b1c4 => '𛇄', Nushu::CharacterDash1b1c5 => '𛇅', Nushu::CharacterDash1b1c6 => '𛇆', Nushu::CharacterDash1b1c7 => '𛇇', Nushu::CharacterDash1b1c8 => '𛇈', Nushu::CharacterDash1b1c9 => '𛇉', Nushu::CharacterDash1b1ca => '𛇊', Nushu::CharacterDash1b1cb => '𛇋', Nushu::CharacterDash1b1cc => '𛇌', Nushu::CharacterDash1b1cd => '𛇍', Nushu::CharacterDash1b1ce => '𛇎', Nushu::CharacterDash1b1cf => '𛇏', Nushu::CharacterDash1b1d0 => '𛇐', Nushu::CharacterDash1b1d1 => '𛇑', Nushu::CharacterDash1b1d2 => '𛇒', Nushu::CharacterDash1b1d3 => '𛇓', Nushu::CharacterDash1b1d4 => '𛇔', Nushu::CharacterDash1b1d5 => '𛇕', Nushu::CharacterDash1b1d6 => '𛇖', Nushu::CharacterDash1b1d7 => '𛇗', Nushu::CharacterDash1b1d8 => '𛇘', Nushu::CharacterDash1b1d9 => '𛇙', Nushu::CharacterDash1b1da => '𛇚', Nushu::CharacterDash1b1db => '𛇛', Nushu::CharacterDash1b1dc => '𛇜', Nushu::CharacterDash1b1dd => '𛇝', Nushu::CharacterDash1b1de => '𛇞', Nushu::CharacterDash1b1df => '𛇟', Nushu::CharacterDash1b1e0 => '𛇠', Nushu::CharacterDash1b1e1 => '𛇡', Nushu::CharacterDash1b1e2 => '𛇢', Nushu::CharacterDash1b1e3 => '𛇣', Nushu::CharacterDash1b1e4 => '𛇤', Nushu::CharacterDash1b1e5 => '𛇥', Nushu::CharacterDash1b1e6 => '𛇦', Nushu::CharacterDash1b1e7 => '𛇧', Nushu::CharacterDash1b1e8 => '𛇨', Nushu::CharacterDash1b1e9 => '𛇩', Nushu::CharacterDash1b1ea => '𛇪', Nushu::CharacterDash1b1eb => '𛇫', Nushu::CharacterDash1b1ec => '𛇬', Nushu::CharacterDash1b1ed => '𛇭', Nushu::CharacterDash1b1ee => '𛇮', Nushu::CharacterDash1b1ef => '𛇯', Nushu::CharacterDash1b1f0 => '𛇰', Nushu::CharacterDash1b1f1 => '𛇱', Nushu::CharacterDash1b1f2 => '𛇲', Nushu::CharacterDash1b1f3 => '𛇳', Nushu::CharacterDash1b1f4 => '𛇴', Nushu::CharacterDash1b1f5 => '𛇵', Nushu::CharacterDash1b1f6 => '𛇶', Nushu::CharacterDash1b1f7 => '𛇷', Nushu::CharacterDash1b1f8 => '𛇸', Nushu::CharacterDash1b1f9 => '𛇹', Nushu::CharacterDash1b1fa => '𛇺', Nushu::CharacterDash1b1fb => '𛇻', Nushu::CharacterDash1b1fc => '𛇼', Nushu::CharacterDash1b1fd => '𛇽', Nushu::CharacterDash1b1fe => '𛇾', Nushu::CharacterDash1b1ff => '𛇿', Nushu::CharacterDash1b200 => '𛈀', Nushu::CharacterDash1b201 => '𛈁', Nushu::CharacterDash1b202 => '𛈂', Nushu::CharacterDash1b203 => '𛈃', Nushu::CharacterDash1b204 => '𛈄', Nushu::CharacterDash1b205 => '𛈅', Nushu::CharacterDash1b206 => '𛈆', Nushu::CharacterDash1b207 => '𛈇', Nushu::CharacterDash1b208 => '𛈈', Nushu::CharacterDash1b209 => '𛈉', Nushu::CharacterDash1b20a => '𛈊', Nushu::CharacterDash1b20b => '𛈋', Nushu::CharacterDash1b20c => '𛈌', Nushu::CharacterDash1b20d => '𛈍', Nushu::CharacterDash1b20e => '𛈎', Nushu::CharacterDash1b20f => '𛈏', Nushu::CharacterDash1b210 => '𛈐', Nushu::CharacterDash1b211 => '𛈑', Nushu::CharacterDash1b212 => '𛈒', Nushu::CharacterDash1b213 => '𛈓', Nushu::CharacterDash1b214 => '𛈔', Nushu::CharacterDash1b215 => '𛈕', Nushu::CharacterDash1b216 => '𛈖', Nushu::CharacterDash1b217 => '𛈗', Nushu::CharacterDash1b218 => '𛈘', Nushu::CharacterDash1b219 => '𛈙', Nushu::CharacterDash1b21a => '𛈚', Nushu::CharacterDash1b21b => '𛈛', Nushu::CharacterDash1b21c => '𛈜', Nushu::CharacterDash1b21d => '𛈝', Nushu::CharacterDash1b21e => '𛈞', Nushu::CharacterDash1b21f => '𛈟', Nushu::CharacterDash1b220 => '𛈠', Nushu::CharacterDash1b221 => '𛈡', Nushu::CharacterDash1b222 => '𛈢', Nushu::CharacterDash1b223 => '𛈣', Nushu::CharacterDash1b224 => '𛈤', Nushu::CharacterDash1b225 => '𛈥', Nushu::CharacterDash1b226 => '𛈦', Nushu::CharacterDash1b227 => '𛈧', Nushu::CharacterDash1b228 => '𛈨', Nushu::CharacterDash1b229 => '𛈩', Nushu::CharacterDash1b22a => '𛈪', Nushu::CharacterDash1b22b => '𛈫', Nushu::CharacterDash1b22c => '𛈬', Nushu::CharacterDash1b22d => '𛈭', Nushu::CharacterDash1b22e => '𛈮', Nushu::CharacterDash1b22f => '𛈯', Nushu::CharacterDash1b230 => '𛈰', Nushu::CharacterDash1b231 => '𛈱', Nushu::CharacterDash1b232 => '𛈲', Nushu::CharacterDash1b233 => '𛈳', Nushu::CharacterDash1b234 => '𛈴', Nushu::CharacterDash1b235 => '𛈵', Nushu::CharacterDash1b236 => '𛈶', Nushu::CharacterDash1b237 => '𛈷', Nushu::CharacterDash1b238 => '𛈸', Nushu::CharacterDash1b239 => '𛈹', Nushu::CharacterDash1b23a => '𛈺', Nushu::CharacterDash1b23b => '𛈻', Nushu::CharacterDash1b23c => '𛈼', Nushu::CharacterDash1b23d => '𛈽', Nushu::CharacterDash1b23e => '𛈾', Nushu::CharacterDash1b23f => '𛈿', Nushu::CharacterDash1b240 => '𛉀', Nushu::CharacterDash1b241 => '𛉁', Nushu::CharacterDash1b242 => '𛉂', Nushu::CharacterDash1b243 => '𛉃', Nushu::CharacterDash1b244 => '𛉄', Nushu::CharacterDash1b245 => '𛉅', Nushu::CharacterDash1b246 => '𛉆', Nushu::CharacterDash1b247 => '𛉇', Nushu::CharacterDash1b248 => '𛉈', Nushu::CharacterDash1b249 => '𛉉', Nushu::CharacterDash1b24a => '𛉊', Nushu::CharacterDash1b24b => '𛉋', Nushu::CharacterDash1b24c => '𛉌', Nushu::CharacterDash1b24d => '𛉍', Nushu::CharacterDash1b24e => '𛉎', Nushu::CharacterDash1b24f => '𛉏', Nushu::CharacterDash1b250 => '𛉐', Nushu::CharacterDash1b251 => '𛉑', Nushu::CharacterDash1b252 => '𛉒', Nushu::CharacterDash1b253 => '𛉓', Nushu::CharacterDash1b254 => '𛉔', Nushu::CharacterDash1b255 => '𛉕', Nushu::CharacterDash1b256 => '𛉖', Nushu::CharacterDash1b257 => '𛉗', Nushu::CharacterDash1b258 => '𛉘', Nushu::CharacterDash1b259 => '𛉙', Nushu::CharacterDash1b25a => '𛉚', Nushu::CharacterDash1b25b => '𛉛', Nushu::CharacterDash1b25c => '𛉜', Nushu::CharacterDash1b25d => '𛉝', Nushu::CharacterDash1b25e => '𛉞', Nushu::CharacterDash1b25f => '𛉟', Nushu::CharacterDash1b260 => '𛉠', Nushu::CharacterDash1b261 => '𛉡', Nushu::CharacterDash1b262 => '𛉢', Nushu::CharacterDash1b263 => '𛉣', Nushu::CharacterDash1b264 => '𛉤', Nushu::CharacterDash1b265 => '𛉥', Nushu::CharacterDash1b266 => '𛉦', Nushu::CharacterDash1b267 => '𛉧', Nushu::CharacterDash1b268 => '𛉨', Nushu::CharacterDash1b269 => '𛉩', Nushu::CharacterDash1b26a => '𛉪', Nushu::CharacterDash1b26b => '𛉫', Nushu::CharacterDash1b26c => '𛉬', Nushu::CharacterDash1b26d => '𛉭', Nushu::CharacterDash1b26e => '𛉮', Nushu::CharacterDash1b26f => '𛉯', Nushu::CharacterDash1b270 => '𛉰', Nushu::CharacterDash1b271 => '𛉱', Nushu::CharacterDash1b272 => '𛉲', Nushu::CharacterDash1b273 => '𛉳', Nushu::CharacterDash1b274 => '𛉴', Nushu::CharacterDash1b275 => '𛉵', Nushu::CharacterDash1b276 => '𛉶', Nushu::CharacterDash1b277 => '𛉷', Nushu::CharacterDash1b278 => '𛉸', Nushu::CharacterDash1b279 => '𛉹', Nushu::CharacterDash1b27a => '𛉺', Nushu::CharacterDash1b27b => '𛉻', Nushu::CharacterDash1b27c => '𛉼', Nushu::CharacterDash1b27d => '𛉽', Nushu::CharacterDash1b27e => '𛉾', Nushu::CharacterDash1b27f => '𛉿', Nushu::CharacterDash1b280 => '𛊀', Nushu::CharacterDash1b281 => '𛊁', Nushu::CharacterDash1b282 => '𛊂', Nushu::CharacterDash1b283 => '𛊃', Nushu::CharacterDash1b284 => '𛊄', Nushu::CharacterDash1b285 => '𛊅', Nushu::CharacterDash1b286 => '𛊆', Nushu::CharacterDash1b287 => '𛊇', Nushu::CharacterDash1b288 => '𛊈', Nushu::CharacterDash1b289 => '𛊉', Nushu::CharacterDash1b28a => '𛊊', Nushu::CharacterDash1b28b => '𛊋', Nushu::CharacterDash1b28c => '𛊌', Nushu::CharacterDash1b28d => '𛊍', Nushu::CharacterDash1b28e => '𛊎', Nushu::CharacterDash1b28f => '𛊏', Nushu::CharacterDash1b290 => '𛊐', Nushu::CharacterDash1b291 => '𛊑', Nushu::CharacterDash1b292 => '𛊒', Nushu::CharacterDash1b293 => '𛊓', Nushu::CharacterDash1b294 => '𛊔', Nushu::CharacterDash1b295 => '𛊕', Nushu::CharacterDash1b296 => '𛊖', Nushu::CharacterDash1b297 => '𛊗', Nushu::CharacterDash1b298 => '𛊘', Nushu::CharacterDash1b299 => '𛊙', Nushu::CharacterDash1b29a => '𛊚', Nushu::CharacterDash1b29b => '𛊛', Nushu::CharacterDash1b29c => '𛊜', Nushu::CharacterDash1b29d => '𛊝', Nushu::CharacterDash1b29e => '𛊞', Nushu::CharacterDash1b29f => '𛊟', Nushu::CharacterDash1b2a0 => '𛊠', Nushu::CharacterDash1b2a1 => '𛊡', Nushu::CharacterDash1b2a2 => '𛊢', Nushu::CharacterDash1b2a3 => '𛊣', Nushu::CharacterDash1b2a4 => '𛊤', Nushu::CharacterDash1b2a5 => '𛊥', Nushu::CharacterDash1b2a6 => '𛊦', Nushu::CharacterDash1b2a7 => '𛊧', Nushu::CharacterDash1b2a8 => '𛊨', Nushu::CharacterDash1b2a9 => '𛊩', Nushu::CharacterDash1b2aa => '𛊪', Nushu::CharacterDash1b2ab => '𛊫', Nushu::CharacterDash1b2ac => '𛊬', Nushu::CharacterDash1b2ad => '𛊭', Nushu::CharacterDash1b2ae => '𛊮', Nushu::CharacterDash1b2af => '𛊯', Nushu::CharacterDash1b2b0 => '𛊰', Nushu::CharacterDash1b2b1 => '𛊱', Nushu::CharacterDash1b2b2 => '𛊲', Nushu::CharacterDash1b2b3 => '𛊳', Nushu::CharacterDash1b2b4 => '𛊴', Nushu::CharacterDash1b2b5 => '𛊵', Nushu::CharacterDash1b2b6 => '𛊶', Nushu::CharacterDash1b2b7 => '𛊷', Nushu::CharacterDash1b2b8 => '𛊸', Nushu::CharacterDash1b2b9 => '𛊹', Nushu::CharacterDash1b2ba => '𛊺', Nushu::CharacterDash1b2bb => '𛊻', Nushu::CharacterDash1b2bc => '𛊼', Nushu::CharacterDash1b2bd => '𛊽', Nushu::CharacterDash1b2be => '𛊾', Nushu::CharacterDash1b2bf => '𛊿', Nushu::CharacterDash1b2c0 => '𛋀', Nushu::CharacterDash1b2c1 => '𛋁', Nushu::CharacterDash1b2c2 => '𛋂', Nushu::CharacterDash1b2c3 => '𛋃', Nushu::CharacterDash1b2c4 => '𛋄', Nushu::CharacterDash1b2c5 => '𛋅', Nushu::CharacterDash1b2c6 => '𛋆', Nushu::CharacterDash1b2c7 => '𛋇', Nushu::CharacterDash1b2c8 => '𛋈', Nushu::CharacterDash1b2c9 => '𛋉', Nushu::CharacterDash1b2ca => '𛋊', Nushu::CharacterDash1b2cb => '𛋋', Nushu::CharacterDash1b2cc => '𛋌', Nushu::CharacterDash1b2cd => '𛋍', Nushu::CharacterDash1b2ce => '𛋎', Nushu::CharacterDash1b2cf => '𛋏', Nushu::CharacterDash1b2d0 => '𛋐', Nushu::CharacterDash1b2d1 => '𛋑', Nushu::CharacterDash1b2d2 => '𛋒', Nushu::CharacterDash1b2d3 => '𛋓', Nushu::CharacterDash1b2d4 => '𛋔', Nushu::CharacterDash1b2d5 => '𛋕', Nushu::CharacterDash1b2d6 => '𛋖', Nushu::CharacterDash1b2d7 => '𛋗', Nushu::CharacterDash1b2d8 => '𛋘', Nushu::CharacterDash1b2d9 => '𛋙', Nushu::CharacterDash1b2da => '𛋚', Nushu::CharacterDash1b2db => '𛋛', Nushu::CharacterDash1b2dc => '𛋜', Nushu::CharacterDash1b2dd => '𛋝', Nushu::CharacterDash1b2de => '𛋞', Nushu::CharacterDash1b2df => '𛋟', Nushu::CharacterDash1b2e0 => '𛋠', Nushu::CharacterDash1b2e1 => '𛋡', Nushu::CharacterDash1b2e2 => '𛋢', Nushu::CharacterDash1b2e3 => '𛋣', Nushu::CharacterDash1b2e4 => '𛋤', Nushu::CharacterDash1b2e5 => '𛋥', Nushu::CharacterDash1b2e6 => '𛋦', Nushu::CharacterDash1b2e7 => '𛋧', Nushu::CharacterDash1b2e8 => '𛋨', Nushu::CharacterDash1b2e9 => '𛋩', Nushu::CharacterDash1b2ea => '𛋪', Nushu::CharacterDash1b2eb => '𛋫', Nushu::CharacterDash1b2ec => '𛋬', Nushu::CharacterDash1b2ed => '𛋭', Nushu::CharacterDash1b2ee => '𛋮', Nushu::CharacterDash1b2ef => '𛋯', Nushu::CharacterDash1b2f0 => '𛋰', Nushu::CharacterDash1b2f1 => '𛋱', Nushu::CharacterDash1b2f2 => '𛋲', Nushu::CharacterDash1b2f3 => '𛋳', Nushu::CharacterDash1b2f4 => '𛋴', Nushu::CharacterDash1b2f5 => '𛋵', Nushu::CharacterDash1b2f6 => '𛋶', Nushu::CharacterDash1b2f7 => '𛋷', Nushu::CharacterDash1b2f8 => '𛋸', Nushu::CharacterDash1b2f9 => '𛋹', Nushu::CharacterDash1b2fa => '𛋺', Nushu::CharacterDash1b2fb => '𛋻', } } } impl std::convert::TryFrom<char> for Nushu { type Error = (); fn try_from(c: char) -> Result<Self, Self::Error> { match c { '𛅰' => Ok(Nushu::CharacterDash1b170), '𛅱' => Ok(Nushu::CharacterDash1b171), '𛅲' => Ok(Nushu::CharacterDash1b172), '𛅳' => Ok(Nushu::CharacterDash1b173), '𛅴' => Ok(Nushu::CharacterDash1b174), '𛅵' => Ok(Nushu::CharacterDash1b175), '𛅶' => Ok(Nushu::CharacterDash1b176), '𛅷' => Ok(Nushu::CharacterDash1b177), '𛅸' => Ok(Nushu::CharacterDash1b178), '𛅹' => Ok(Nushu::CharacterDash1b179), '𛅺' => Ok(Nushu::CharacterDash1b17a), '𛅻' => Ok(Nushu::CharacterDash1b17b), '𛅼' => Ok(Nushu::CharacterDash1b17c), '𛅽' => Ok(Nushu::CharacterDash1b17d), '𛅾' => Ok(Nushu::CharacterDash1b17e), '𛅿' => Ok(Nushu::CharacterDash1b17f), '𛆀' => Ok(Nushu::CharacterDash1b180), '𛆁' => Ok(Nushu::CharacterDash1b181), '𛆂' => Ok(Nushu::CharacterDash1b182), '𛆃' => Ok(Nushu::CharacterDash1b183), '𛆄' => Ok(Nushu::CharacterDash1b184), '𛆅' => Ok(Nushu::CharacterDash1b185), '𛆆' => Ok(Nushu::CharacterDash1b186), '𛆇' => Ok(Nushu::CharacterDash1b187), '𛆈' => Ok(Nushu::CharacterDash1b188), '𛆉' => Ok(Nushu::CharacterDash1b189), '𛆊' => Ok(Nushu::CharacterDash1b18a), '𛆋' => Ok(Nushu::CharacterDash1b18b), '𛆌' => Ok(Nushu::CharacterDash1b18c), '𛆍' => Ok(Nushu::CharacterDash1b18d), '𛆎' => Ok(Nushu::CharacterDash1b18e), '𛆏' => Ok(Nushu::CharacterDash1b18f), '𛆐' => Ok(Nushu::CharacterDash1b190), '𛆑' => Ok(Nushu::CharacterDash1b191), '𛆒' => Ok(Nushu::CharacterDash1b192), '𛆓' => Ok(Nushu::CharacterDash1b193), '𛆔' => Ok(Nushu::CharacterDash1b194), '𛆕' => Ok(Nushu::CharacterDash1b195), '𛆖' => Ok(Nushu::CharacterDash1b196), '𛆗' => Ok(Nushu::CharacterDash1b197), '𛆘' => Ok(Nushu::CharacterDash1b198), '𛆙' => Ok(Nushu::CharacterDash1b199), '𛆚' => Ok(Nushu::CharacterDash1b19a), '𛆛' => Ok(Nushu::CharacterDash1b19b), '𛆜' => Ok(Nushu::CharacterDash1b19c), '𛆝' => Ok(Nushu::CharacterDash1b19d), '𛆞' => Ok(Nushu::CharacterDash1b19e), '𛆟' => Ok(Nushu::CharacterDash1b19f), '𛆠' => Ok(Nushu::CharacterDash1b1a0), '𛆡' => Ok(Nushu::CharacterDash1b1a1), '𛆢' => Ok(Nushu::CharacterDash1b1a2), '𛆣' => Ok(Nushu::CharacterDash1b1a3), '𛆤' => Ok(Nushu::CharacterDash1b1a4), '𛆥' => Ok(Nushu::CharacterDash1b1a5), '𛆦' => Ok(Nushu::CharacterDash1b1a6), '𛆧' => Ok(Nushu::CharacterDash1b1a7), '𛆨' => Ok(Nushu::CharacterDash1b1a8), '𛆩' => Ok(Nushu::CharacterDash1b1a9), '𛆪' => Ok(Nushu::CharacterDash1b1aa), '𛆫' => Ok(Nushu::CharacterDash1b1ab), '𛆬' => Ok(Nushu::CharacterDash1b1ac), '𛆭' => Ok(Nushu::CharacterDash1b1ad), '𛆮' => Ok(Nushu::CharacterDash1b1ae), '𛆯' => Ok(Nushu::CharacterDash1b1af), '𛆰' => Ok(Nushu::CharacterDash1b1b0), '𛆱' => Ok(Nushu::CharacterDash1b1b1), '𛆲' => Ok(Nushu::CharacterDash1b1b2), '𛆳' => Ok(Nushu::CharacterDash1b1b3), '𛆴' => Ok(Nushu::CharacterDash1b1b4), '𛆵' => Ok(Nushu::CharacterDash1b1b5), '𛆶' => Ok(Nushu::CharacterDash1b1b6), '𛆷' => Ok(Nushu::CharacterDash1b1b7), '𛆸' => Ok(Nushu::CharacterDash1b1b8), '𛆹' => Ok(Nushu::CharacterDash1b1b9), '𛆺' => Ok(Nushu::CharacterDash1b1ba), '𛆻' => Ok(Nushu::CharacterDash1b1bb), '𛆼' => Ok(Nushu::CharacterDash1b1bc), '𛆽' => Ok(Nushu::CharacterDash1b1bd), '𛆾' => Ok(Nushu::CharacterDash1b1be), '𛆿' => Ok(Nushu::CharacterDash1b1bf), '𛇀' => Ok(Nushu::CharacterDash1b1c0), '𛇁' => Ok(Nushu::CharacterDash1b1c1), '𛇂' => Ok(Nushu::CharacterDash1b1c2), '𛇃' => Ok(Nushu::CharacterDash1b1c3), '𛇄' => Ok(Nushu::CharacterDash1b1c4), '𛇅' => Ok(Nushu::CharacterDash1b1c5), '𛇆' => Ok(Nushu::CharacterDash1b1c6), '𛇇' => Ok(Nushu::CharacterDash1b1c7), '𛇈' => Ok(Nushu::CharacterDash1b1c8), '𛇉' => Ok(Nushu::CharacterDash1b1c9), '𛇊' => Ok(Nushu::CharacterDash1b1ca), '𛇋' => Ok(Nushu::CharacterDash1b1cb), '𛇌' => Ok(Nushu::CharacterDash1b1cc), '𛇍' => Ok(Nushu::CharacterDash1b1cd), '𛇎' => Ok(Nushu::CharacterDash1b1ce), '𛇏' => Ok(Nushu::CharacterDash1b1cf), '𛇐' => Ok(Nushu::CharacterDash1b1d0), '𛇑' => Ok(Nushu::CharacterDash1b1d1), '𛇒' => Ok(Nushu::CharacterDash1b1d2), '𛇓' => Ok(Nushu::CharacterDash1b1d3), '𛇔' => Ok(Nushu::CharacterDash1b1d4), '𛇕' => Ok(Nushu::CharacterDash1b1d5), '𛇖' => Ok(Nushu::CharacterDash1b1d6), '𛇗' => Ok(Nushu::CharacterDash1b1d7), '𛇘' => Ok(Nushu::CharacterDash1b1d8), '𛇙' => Ok(Nushu::CharacterDash1b1d9), '𛇚' => Ok(Nushu::CharacterDash1b1da), '𛇛' => Ok(Nushu::CharacterDash1b1db), '𛇜' => Ok(Nushu::CharacterDash1b1dc), '𛇝' => Ok(Nushu::CharacterDash1b1dd), '𛇞' => Ok(Nushu::CharacterDash1b1de), '𛇟' => Ok(Nushu::CharacterDash1b1df), '𛇠' => Ok(Nushu::CharacterDash1b1e0), '𛇡' => Ok(Nushu::CharacterDash1b1e1), '𛇢' => Ok(Nushu::CharacterDash1b1e2), '𛇣' => Ok(Nushu::CharacterDash1b1e3), '𛇤' => Ok(Nushu::CharacterDash1b1e4), '𛇥' => Ok(Nushu::CharacterDash1b1e5), '𛇦' => Ok(Nushu::CharacterDash1b1e6), '𛇧' => Ok(Nushu::CharacterDash1b1e7), '𛇨' => Ok(Nushu::CharacterDash1b1e8), '𛇩' => Ok(Nushu::CharacterDash1b1e9), '𛇪' => Ok(Nushu::CharacterDash1b1ea), '𛇫' => Ok(Nushu::CharacterDash1b1eb), '𛇬' => Ok(Nushu::CharacterDash1b1ec), '𛇭' => Ok(Nushu::CharacterDash1b1ed), '𛇮' => Ok(Nushu::CharacterDash1b1ee), '𛇯' => Ok(Nushu::CharacterDash1b1ef), '𛇰' => Ok(Nushu::CharacterDash1b1f0), '𛇱' => Ok(Nushu::CharacterDash1b1f1), '𛇲' => Ok(Nushu::CharacterDash1b1f2), '𛇳' => Ok(Nushu::CharacterDash1b1f3), '𛇴' => Ok(Nushu::CharacterDash1b1f4), '𛇵' => Ok(Nushu::CharacterDash1b1f5), '𛇶' => Ok(Nushu::CharacterDash1b1f6), '𛇷' => Ok(Nushu::CharacterDash1b1f7), '𛇸' => Ok(Nushu::CharacterDash1b1f8), '𛇹' => Ok(Nushu::CharacterDash1b1f9), '𛇺' => Ok(Nushu::CharacterDash1b1fa), '𛇻' => Ok(Nushu::CharacterDash1b1fb), '𛇼' => Ok(Nushu::CharacterDash1b1fc), '𛇽' => Ok(Nushu::CharacterDash1b1fd), '𛇾' => Ok(Nushu::CharacterDash1b1fe), '𛇿' => Ok(Nushu::CharacterDash1b1ff), '𛈀' => Ok(Nushu::CharacterDash1b200), '𛈁' => Ok(Nushu::CharacterDash1b201), '𛈂' => Ok(Nushu::CharacterDash1b202), '𛈃' => Ok(Nushu::CharacterDash1b203), '𛈄' => Ok(Nushu::CharacterDash1b204), '𛈅' => Ok(Nushu::CharacterDash1b205), '𛈆' => Ok(Nushu::CharacterDash1b206), '𛈇' => Ok(Nushu::CharacterDash1b207), '𛈈' => Ok(Nushu::CharacterDash1b208), '𛈉' => Ok(Nushu::CharacterDash1b209), '𛈊' => Ok(Nushu::CharacterDash1b20a), '𛈋' => Ok(Nushu::CharacterDash1b20b), '𛈌' => Ok(Nushu::CharacterDash1b20c), '𛈍' => Ok(Nushu::CharacterDash1b20d), '𛈎' => Ok(Nushu::CharacterDash1b20e), '𛈏' => Ok(Nushu::CharacterDash1b20f), '𛈐' => Ok(Nushu::CharacterDash1b210), '𛈑' => Ok(Nushu::CharacterDash1b211), '𛈒' => Ok(Nushu::CharacterDash1b212), '𛈓' => Ok(Nushu::CharacterDash1b213), '𛈔' => Ok(Nushu::CharacterDash1b214), '𛈕' => Ok(Nushu::CharacterDash1b215), '𛈖' => Ok(Nushu::CharacterDash1b216), '𛈗' => Ok(Nushu::CharacterDash1b217), '𛈘' => Ok(Nushu::CharacterDash1b218), '𛈙' => Ok(Nushu::CharacterDash1b219), '𛈚' => Ok(Nushu::CharacterDash1b21a), '𛈛' => Ok(Nushu::CharacterDash1b21b), '𛈜' => Ok(Nushu::CharacterDash1b21c), '𛈝' => Ok(Nushu::CharacterDash1b21d), '𛈞' => Ok(Nushu::CharacterDash1b21e), '𛈟' => Ok(Nushu::CharacterDash1b21f), '𛈠' => Ok(Nushu::CharacterDash1b220), '𛈡' => Ok(Nushu::CharacterDash1b221), '𛈢' => Ok(Nushu::CharacterDash1b222), '𛈣' => Ok(Nushu::CharacterDash1b223), '𛈤' => Ok(Nushu::CharacterDash1b224), '𛈥' => Ok(Nushu::CharacterDash1b225), '𛈦' => Ok(Nushu::CharacterDash1b226), '𛈧' => Ok(Nushu::CharacterDash1b227), '𛈨' => Ok(Nushu::CharacterDash1b228), '𛈩' => Ok(Nushu::CharacterDash1b229), '𛈪' => Ok(Nushu::CharacterDash1b22a), '𛈫' => Ok(Nushu::CharacterDash1b22b), '𛈬' => Ok(Nushu::CharacterDash1b22c), '𛈭' => Ok(Nushu::CharacterDash1b22d), '𛈮' => Ok(Nushu::CharacterDash1b22e), '𛈯' => Ok(Nushu::CharacterDash1b22f), '𛈰' => Ok(Nushu::CharacterDash1b230), '𛈱' => Ok(Nushu::CharacterDash1b231), '𛈲' => Ok(Nushu::CharacterDash1b232), '𛈳' => Ok(Nushu::CharacterDash1b233), '𛈴' => Ok(Nushu::CharacterDash1b234), '𛈵' => Ok(Nushu::CharacterDash1b235), '𛈶' => Ok(Nushu::CharacterDash1b236), '𛈷' => Ok(Nushu::CharacterDash1b237), '𛈸' => Ok(Nushu::CharacterDash1b238), '𛈹' => Ok(Nushu::CharacterDash1b239), '𛈺' => Ok(Nushu::CharacterDash1b23a), '𛈻' => Ok(Nushu::CharacterDash1b23b), '𛈼' => Ok(Nushu::CharacterDash1b23c), '𛈽' => Ok(Nushu::CharacterDash1b23d), '𛈾' => Ok(Nushu::CharacterDash1b23e), '𛈿' => Ok(Nushu::CharacterDash1b23f), '𛉀' => Ok(Nushu::CharacterDash1b240), '𛉁' => Ok(Nushu::CharacterDash1b241), '𛉂' => Ok(Nushu::CharacterDash1b242), '𛉃' => Ok(Nushu::CharacterDash1b243), '𛉄' => Ok(Nushu::CharacterDash1b244), '𛉅' => Ok(Nushu::CharacterDash1b245), '𛉆' => Ok(Nushu::CharacterDash1b246), '𛉇' => Ok(Nushu::CharacterDash1b247), '𛉈' => Ok(Nushu::CharacterDash1b248), '𛉉' => Ok(Nushu::CharacterDash1b249), '𛉊' => Ok(Nushu::CharacterDash1b24a), '𛉋' => Ok(Nushu::CharacterDash1b24b), '𛉌' => Ok(Nushu::CharacterDash1b24c), '𛉍' => Ok(Nushu::CharacterDash1b24d), '𛉎' => Ok(Nushu::CharacterDash1b24e), '𛉏' => Ok(Nushu::CharacterDash1b24f), '𛉐' => Ok(Nushu::CharacterDash1b250), '𛉑' => Ok(Nushu::CharacterDash1b251), '𛉒' => Ok(Nushu::CharacterDash1b252), '𛉓' => Ok(Nushu::CharacterDash1b253), '𛉔' => Ok(Nushu::CharacterDash1b254), '𛉕' => Ok(Nushu::CharacterDash1b255), '𛉖' => Ok(Nushu::CharacterDash1b256), '𛉗' => Ok(Nushu::CharacterDash1b257), '𛉘' => Ok(Nushu::CharacterDash1b258), '𛉙' => Ok(Nushu::CharacterDash1b259), '𛉚' => Ok(Nushu::CharacterDash1b25a), '𛉛' => Ok(Nushu::CharacterDash1b25b), '𛉜' => Ok(Nushu::CharacterDash1b25c), '𛉝' => Ok(Nushu::CharacterDash1b25d), '𛉞' => Ok(Nushu::CharacterDash1b25e), '𛉟' => Ok(Nushu::CharacterDash1b25f), '𛉠' => Ok(Nushu::CharacterDash1b260), '𛉡' => Ok(Nushu::CharacterDash1b261), '𛉢' => Ok(Nushu::CharacterDash1b262), '𛉣' => Ok(Nushu::CharacterDash1b263), '𛉤' => Ok(Nushu::CharacterDash1b264), '𛉥' => Ok(Nushu::CharacterDash1b265), '𛉦' => Ok(Nushu::CharacterDash1b266), '𛉧' => Ok(Nushu::CharacterDash1b267), '𛉨' => Ok(Nushu::CharacterDash1b268), '𛉩' => Ok(Nushu::CharacterDash1b269), '𛉪' => Ok(Nushu::CharacterDash1b26a), '𛉫' => Ok(Nushu::CharacterDash1b26b), '𛉬' => Ok(Nushu::CharacterDash1b26c), '𛉭' => Ok(Nushu::CharacterDash1b26d), '𛉮' => Ok(Nushu::CharacterDash1b26e), '𛉯' => Ok(Nushu::CharacterDash1b26f), '𛉰' => Ok(Nushu::CharacterDash1b270), '𛉱' => Ok(Nushu::CharacterDash1b271), '𛉲' => Ok(Nushu::CharacterDash1b272), '𛉳' => Ok(Nushu::CharacterDash1b273), '𛉴' => Ok(Nushu::CharacterDash1b274), '𛉵' => Ok(Nushu::CharacterDash1b275), '𛉶' => Ok(Nushu::CharacterDash1b276), '𛉷' => Ok(Nushu::CharacterDash1b277), '𛉸' => Ok(Nushu::CharacterDash1b278), '𛉹' => Ok(Nushu::CharacterDash1b279), '𛉺' => Ok(Nushu::CharacterDash1b27a), '𛉻' => Ok(Nushu::CharacterDash1b27b), '𛉼' => Ok(Nushu::CharacterDash1b27c), '𛉽' => Ok(Nushu::CharacterDash1b27d), '𛉾' => Ok(Nushu::CharacterDash1b27e), '𛉿' => Ok(Nushu::CharacterDash1b27f), '𛊀' => Ok(Nushu::CharacterDash1b280), '𛊁' => Ok(Nushu::CharacterDash1b281), '𛊂' => Ok(Nushu::CharacterDash1b282), '𛊃' => Ok(Nushu::CharacterDash1b283), '𛊄' => Ok(Nushu::CharacterDash1b284), '𛊅' => Ok(Nushu::CharacterDash1b285), '𛊆' => Ok(Nushu::CharacterDash1b286), '𛊇' => Ok(Nushu::CharacterDash1b287), '𛊈' => Ok(Nushu::CharacterDash1b288), '𛊉' => Ok(Nushu::CharacterDash1b289), '𛊊' => Ok(Nushu::CharacterDash1b28a), '𛊋' => Ok(Nushu::CharacterDash1b28b), '𛊌' => Ok(Nushu::CharacterDash1b28c), '𛊍' => Ok(Nushu::CharacterDash1b28d), '𛊎' => Ok(Nushu::CharacterDash1b28e), '𛊏' => Ok(Nushu::CharacterDash1b28f), '𛊐' => Ok(Nushu::CharacterDash1b290), '𛊑' => Ok(Nushu::CharacterDash1b291), '𛊒' => Ok(Nushu::CharacterDash1b292), '𛊓' => Ok(Nushu::CharacterDash1b293), '𛊔' => Ok(Nushu::CharacterDash1b294), '𛊕' => Ok(Nushu::CharacterDash1b295), '𛊖' => Ok(Nushu::CharacterDash1b296), '𛊗' => Ok(Nushu::CharacterDash1b297), '𛊘' => Ok(Nushu::CharacterDash1b298), '𛊙' => Ok(Nushu::CharacterDash1b299), '𛊚' => Ok(Nushu::CharacterDash1b29a), '𛊛' => Ok(Nushu::CharacterDash1b29b), '𛊜' => Ok(Nushu::CharacterDash1b29c), '𛊝' => Ok(Nushu::CharacterDash1b29d), '𛊞' => Ok(Nushu::CharacterDash1b29e), '𛊟' => Ok(Nushu::CharacterDash1b29f), '𛊠' => Ok(Nushu::CharacterDash1b2a0), '𛊡' => Ok(Nushu::CharacterDash1b2a1), '𛊢' => Ok(Nushu::CharacterDash1b2a2), '𛊣' => Ok(Nushu::CharacterDash1b2a3), '𛊤' => Ok(Nushu::CharacterDash1b2a4), '𛊥' => Ok(Nushu::CharacterDash1b2a5), '𛊦' => Ok(Nushu::CharacterDash1b2a6), '𛊧' => Ok(Nushu::CharacterDash1b2a7), '𛊨' => Ok(Nushu::CharacterDash1b2a8), '𛊩' => Ok(Nushu::CharacterDash1b2a9), '𛊪' => Ok(Nushu::CharacterDash1b2aa), '𛊫' => Ok(Nushu::CharacterDash1b2ab), '𛊬' => Ok(Nushu::CharacterDash1b2ac), '𛊭' => Ok(Nushu::CharacterDash1b2ad), '𛊮' => Ok(Nushu::CharacterDash1b2ae), '𛊯' => Ok(Nushu::CharacterDash1b2af), '𛊰' => Ok(Nushu::CharacterDash1b2b0), '𛊱' => Ok(Nushu::CharacterDash1b2b1), '𛊲' => Ok(Nushu::CharacterDash1b2b2), '𛊳' => Ok(Nushu::CharacterDash1b2b3), '𛊴' => Ok(Nushu::CharacterDash1b2b4), '𛊵' => Ok(Nushu::CharacterDash1b2b5), '𛊶' => Ok(Nushu::CharacterDash1b2b6), '𛊷' => Ok(Nushu::CharacterDash1b2b7), '𛊸' => Ok(Nushu::CharacterDash1b2b8), '𛊹' => Ok(Nushu::CharacterDash1b2b9), '𛊺' => Ok(Nushu::CharacterDash1b2ba), '𛊻' => Ok(Nushu::CharacterDash1b2bb), '𛊼' => Ok(Nushu::CharacterDash1b2bc), '𛊽' => Ok(Nushu::CharacterDash1b2bd), '𛊾' => Ok(Nushu::CharacterDash1b2be), '𛊿' => Ok(Nushu::CharacterDash1b2bf), '𛋀' => Ok(Nushu::CharacterDash1b2c0), '𛋁' => Ok(Nushu::CharacterDash1b2c1), '𛋂' => Ok(Nushu::CharacterDash1b2c2), '𛋃' => Ok(Nushu::CharacterDash1b2c3), '𛋄' => Ok(Nushu::CharacterDash1b2c4), '𛋅' => Ok(Nushu::CharacterDash1b2c5), '𛋆' => Ok(Nushu::CharacterDash1b2c6), '𛋇' => Ok(Nushu::CharacterDash1b2c7), '𛋈' => Ok(Nushu::CharacterDash1b2c8), '𛋉' => Ok(Nushu::CharacterDash1b2c9), '𛋊' => Ok(Nushu::CharacterDash1b2ca), '𛋋' => Ok(Nushu::CharacterDash1b2cb), '𛋌' => Ok(Nushu::CharacterDash1b2cc), '𛋍' => Ok(Nushu::CharacterDash1b2cd), '𛋎' => Ok(Nushu::CharacterDash1b2ce), '𛋏' => Ok(Nushu::CharacterDash1b2cf), '𛋐' => Ok(Nushu::CharacterDash1b2d0), '𛋑' => Ok(Nushu::CharacterDash1b2d1), '𛋒' => Ok(Nushu::CharacterDash1b2d2), '𛋓' => Ok(Nushu::CharacterDash1b2d3), '𛋔' => Ok(Nushu::CharacterDash1b2d4), '𛋕' => Ok(Nushu::CharacterDash1b2d5), '𛋖' => Ok(Nushu::CharacterDash1b2d6), '𛋗' => Ok(Nushu::CharacterDash1b2d7), '𛋘' => Ok(Nushu::CharacterDash1b2d8), '𛋙' => Ok(Nushu::CharacterDash1b2d9), '𛋚' => Ok(Nushu::CharacterDash1b2da), '𛋛' => Ok(Nushu::CharacterDash1b2db), '𛋜' => Ok(Nushu::CharacterDash1b2dc), '𛋝' => Ok(Nushu::CharacterDash1b2dd), '𛋞' => Ok(Nushu::CharacterDash1b2de), '𛋟' => Ok(Nushu::CharacterDash1b2df), '𛋠' => Ok(Nushu::CharacterDash1b2e0), '𛋡' => Ok(Nushu::CharacterDash1b2e1), '𛋢' => Ok(Nushu::CharacterDash1b2e2), '𛋣' => Ok(Nushu::CharacterDash1b2e3), '𛋤' => Ok(Nushu::CharacterDash1b2e4), '𛋥' => Ok(Nushu::CharacterDash1b2e5), '𛋦' => Ok(Nushu::CharacterDash1b2e6), '𛋧' => Ok(Nushu::CharacterDash1b2e7), '𛋨' => Ok(Nushu::CharacterDash1b2e8), '𛋩' => Ok(Nushu::CharacterDash1b2e9), '𛋪' => Ok(Nushu::CharacterDash1b2ea), '𛋫' => Ok(Nushu::CharacterDash1b2eb), '𛋬' => Ok(Nushu::CharacterDash1b2ec), '𛋭' => Ok(Nushu::CharacterDash1b2ed), '𛋮' => Ok(Nushu::CharacterDash1b2ee), '𛋯' => Ok(Nushu::CharacterDash1b2ef), '𛋰' => Ok(Nushu::CharacterDash1b2f0), '𛋱' => Ok(Nushu::CharacterDash1b2f1), '𛋲' => Ok(Nushu::CharacterDash1b2f2), '𛋳' => Ok(Nushu::CharacterDash1b2f3), '𛋴' => Ok(Nushu::CharacterDash1b2f4), '𛋵' => Ok(Nushu::CharacterDash1b2f5), '𛋶' => Ok(Nushu::CharacterDash1b2f6), '𛋷' => Ok(Nushu::CharacterDash1b2f7), '𛋸' => Ok(Nushu::CharacterDash1b2f8), '𛋹' => Ok(Nushu::CharacterDash1b2f9), '𛋺' => Ok(Nushu::CharacterDash1b2fa), '𛋻' => Ok(Nushu::CharacterDash1b2fb), _ => Err(()), } } } impl Into<u32> for Nushu { fn into(self) -> u32 { let c: char = self.into(); let hex = c .escape_unicode() .to_string() .replace("\\u{", "") .replace("}", ""); u32::from_str_radix(&hex, 16).unwrap() } } impl std::convert::TryFrom<u32> for Nushu { type Error = (); fn try_from(u: u32) -> Result<Self, Self::Error> { if let Ok(c) = char::try_from(u) { Self::try_from(c) } else { Err(()) } } } impl Iterator for Nushu { type Item = Self; fn next(&mut self) -> Option<Self> { let index: u32 = (*self).into(); use std::convert::TryFrom; Self::try_from(index + 1).ok() } } impl Nushu { /// The character with the lowest index in this unicode block pub fn new() -> Self { Nushu::CharacterDash1b170 } /// The character's name, in sentence case pub fn name(&self) -> String { let s = std::format!("Nushu{:#?}", self); string_morph::to_sentence_case(&s) } }
use std::collections::HashMap; use super::utils::http_get; use crate::{error::Result, Market, MarketType}; use serde::{Deserialize, Serialize}; use serde_json::Value; pub(crate) fn fetch_symbols(market_type: MarketType) -> Result<Vec<String>> { let instruments = fetch_instruments(market_type)?; Ok(instruments .into_iter() .map(|x| x.symbol) .collect::<Vec<String>>()) } pub(crate) fn fetch_markets(_market_type: MarketType) -> Result<Vec<Market>> { Ok(Vec::new()) } #[derive(Clone, Serialize, Deserialize)] #[allow(non_snake_case)] struct Instrument { symbol: String, rootSymbol: String, state: String, typ: String, listing: String, front: String, expiry: Option<String>, settle: Option<String>, listedSettle: Option<String>, relistInterval: Option<String>, inverseLeg: String, sellLeg: String, buyLeg: String, optionStrikePcnt: Option<f64>, optionStrikeRound: Option<f64>, optionStrikePrice: Option<f64>, optionMultiplier: Option<f64>, positionCurrency: String, underlying: String, quoteCurrency: String, underlyingSymbol: String, reference: String, referenceSymbol: String, calcInterval: Option<String>, publishInterval: Option<String>, publishTime: Option<String>, maxOrderQty: i64, maxPrice: f64, lotSize: i64, tickSize: f64, multiplier: i64, settlCurrency: String, underlyingToPositionMultiplier: Option<i64>, underlyingToSettleMultiplier: Option<i64>, quoteToSettleMultiplier: Option<i64>, isQuanto: bool, isInverse: bool, initMargin: f64, maintMargin: f64, riskLimit: i64, riskStep: i64, limit: Option<i64>, capped: bool, taxed: bool, deleverage: bool, makerFee: f64, takerFee: f64, settlementFee: f64, insuranceFee: f64, fundingBaseSymbol: String, fundingQuoteSymbol: String, fundingPremiumSymbol: String, fundingTimestamp: Option<String>, fundingInterval: Option<String>, fundingRate: Option<f64>, indicativeFundingRate: Option<f64>, rebalanceTimestamp: Option<String>, rebalanceInterval: Option<String>, openingTimestamp: String, closingTimestamp: String, sessionInterval: String, prevTotalVolume: i64, totalVolume: i64, volume: i64, volume24h: i64, prevTotalTurnover: i64, totalTurnover: i64, turnover: i64, turnover24h: i64, homeNotional24h: f64, foreignNotional24h: f64, lastTickDirection: String, hasLiquidity: bool, openInterest: i64, openValue: i64, fairMethod: String, markMethod: String, timestamp: String, #[serde(flatten)] extra: HashMap<String, Value>, } fn fetch_instruments(market_type: MarketType) -> Result<Vec<Instrument>> { let text = http_get("https://www.bitmex.com/api/v1/instrument/active", None)?; let instruments: Vec<Instrument> = serde_json::from_str::<Vec<Instrument>>(&text)? .into_iter() .filter(|x| x.state == "Open") .collect(); let swap: Vec<Instrument> = instruments .iter() .filter(|x| (&x.symbol[x.symbol.len() - 1..]).parse::<i32>().is_err()) .cloned() .collect(); let futures: Vec<Instrument> = instruments .iter() .filter(|x| (&x.symbol[x.symbol.len() - 1..]).parse::<i32>().is_ok()) .cloned() .collect(); // Check // for x in instruments.iter() { // assert_eq!(x.underlying, x.rootSymbol); // assert_eq!("XBt".to_string(), x.settlCurrency); // } for x in swap.iter() { assert_eq!("FundingRate", x.fairMethod.as_str()); // assert!(x.expiry.is_none()); // TODO: BitMEX data is not correct, comment it for now assert_eq!(x.symbol, format!("{}{}", x.underlying, x.quoteCurrency)); } for x in futures.iter() { assert_eq!("ImpactMidPrice", x.fairMethod.as_str()); assert!(x.expiry.is_some()); } // Inverse for x in instruments.iter().filter(|x| x.isInverse) { assert_eq!("XBT".to_string(), x.rootSymbol); // USD, EUR assert_eq!(x.quoteCurrency, x.positionCurrency); } // Quanto for x in instruments.iter().filter(|x| x.isQuanto) { assert!(x.positionCurrency.is_empty()); } // Linear for x in instruments.iter().filter(|x| !x.isQuanto && !x.isInverse) { assert_eq!(x.positionCurrency, x.rootSymbol); assert_eq!(x.settlCurrency.to_uppercase(), x.quoteCurrency); } let filtered: Vec<Instrument> = match market_type { MarketType::Unknown => instruments, MarketType::InverseSwap => swap.iter().filter(|x| x.isInverse).cloned().collect(), MarketType::QuantoSwap => swap.iter().filter(|x| x.isQuanto).cloned().collect(), MarketType::LinearFuture => futures .iter() .filter(|x| !x.isInverse && !x.isQuanto) .cloned() .collect(), MarketType::InverseFuture => futures.iter().filter(|x| x.isInverse).cloned().collect(), MarketType::QuantoFuture => futures.iter().filter(|x| x.isQuanto).cloned().collect(), _ => panic!("Unsupported market_type: {}", market_type), }; Ok(filtered) }
//! This module implements the algorithm for constructing a tree-child sequence for a set of trees. //! The function to construct a tree-child sequence is `tree_child_sequence()`. mod channel; mod master; mod search; mod worker; use self::master::Master; use self::search::Search; use tree::Tree; use std::fmt; /// An entry in a tree-child sequence #[derive(Clone, Debug, Eq, PartialEq)] pub enum Pair<T> { /// A pair (x, y) that represents a trivial cherry Trivial(T, T), /// A pair (x, y) that eliminates a non-trivial cherry NonTrivial(T, T), /// The final leaf left in every tree at the end of the sequence Final(T), } /// A tree-child sequence is just a sequence of `Pair`s pub type TcSeq<T> = Vec<Pair<T>>; /// Compute a tree-child sequence for a given set of trees pub fn tree_child_sequence<T: Clone + Send + 'static>( trees: Vec<Tree<T>>, num_threads: usize, poll_delay: Option<usize>, limit_fanout: bool, use_redundant_branch_opt: bool, binary: bool, ) -> Option<TcSeq<T>> { // Build the search state and eliminate initial trivial cherries let mut search = Search::new(trees, limit_fanout, use_redundant_branch_opt, binary); search.resolve_trivial_cherries(); // Do not start a search if the input is trivial if search.success() { Some(search.tc_seq().unwrap()) } else { Master::run(search, num_threads, poll_delay) } } /// Let's make pairs printable impl<T: fmt::Display> fmt::Display for Pair<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Pair::Trivial(u, v) => write!(f, "({}, {})", u, v), Pair::NonTrivial(u, v) => write!(f, "({}, {})", u, v), Pair::Final(u) => write!(f, "({}, -)", u), } } } #[cfg(test)] mod tests { use newick; use std::fmt::Write; use tree::TreeBuilder; /// Test tree_child_sequence #[test] fn tree_child_sequence() { let trees = { let mut builder = TreeBuilder::<String>::new(); let newick = "(a,((b,(c,d)),e));\n((a,b),((c,d),e));\n((a,b),(c,(d,e)));\n(a,((b,c),(d,e)));\n"; newick::parse_forest(&mut builder, newick).unwrap(); builder.trees() }; let seq = super::tree_child_sequence(trees, 32, Some(1), true, true); assert_eq!(seq.len(), 7); let mut string = String::new(); let mut first = true; write!(&mut string, "<").unwrap(); for pair in seq { if first { first = false; } else { write!(&mut string, ", ").unwrap(); } write!(&mut string, "{}", pair).unwrap(); } write!(&mut string, ">").unwrap(); assert!([ "<(d, c), (d, e), (b, c), (b, a), (c, e), (a, e), (e, -)>", "<(d, e), (d, c), (b, c), (b, a), (c, e), (a, e), (e, -)>", "<(b, a), (d, e), (d, c), (b, c), (c, e), (a, e), (e, -)>", ] .contains(&&string[..])); } /// A regression test with an instance where the code resolved trivial cherries even though /// both leaves in the cherry had already been cut that is we are in fact in a dead branch. #[test] fn regression_test_resolve_trivial_cherries() { let trees = { let mut builder = TreeBuilder::<String>::new(); let newick = "((a,b),(e,(d,c)));\n(b,(a,(e,(d,c))));\n(b,((e,d),(c,a)));\n"; newick::parse_forest(&mut builder, newick).unwrap(); builder.trees() }; let seq = super::tree_child_sequence(trees, 1, None, true, false); assert_eq!(seq.len(), 7); let mut string = String::new(); let mut first = true; write!(&mut string, "<").unwrap(); for pair in seq { if first { first = false; } else { write!(&mut string, ", ").unwrap(); } write!(&mut string, "{}", pair).unwrap(); } write!(&mut string, ">").unwrap(); assert_eq!( string, "<(b, a), (c, d), (c, a), (e, d), (a, d), (b, d), (d, -)>" ); } }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use std::f32; use crate::{edge::Edge, point::Point}; const TOLERANCE: f32 = 3.0; const PIXEL_ACCURACY: f32 = 0.25; #[derive(Clone, Debug, Default)] pub struct Path { edges: Vec<Edge<f32>>, closing_point: Option<Point<f32>>, } impl Path { pub fn new() -> Self { Self::default() } pub(crate) fn edges(&self) -> &[Edge<f32>] { &self.edges[..] } pub fn line(&mut self, p0: Point<f32>, p1: Point<f32>) { if self.closing_point.is_none() { self.closing_point = Some(p0); } let edge = Edge::new(p0, p1); self.edges.push(edge); } pub fn quad(&mut self, p0: Point<f32>, p1: Point<f32>, p2: Point<f32>) { let deviation_x = (p0.x + p2.x - 2.0 * p1.x).abs(); let deviation_y = (p0.y + p2.y - 2.0 * p1.y).abs(); let deviation_squared = deviation_x * deviation_x + deviation_y * deviation_y; if deviation_squared < PIXEL_ACCURACY { self.line(p0, p2); return; } let subdivisions = 1 + (TOLERANCE * deviation_squared).sqrt().sqrt().floor() as usize; let increment = (subdivisions as f32).recip(); let mut p = p0; let mut t = 0.0; for _ in 0..subdivisions - 1 { t += increment; let p_next = Point::lerp(t, Point::lerp(t, p0, p1), Point::lerp(t, p1, p2)); self.line(p, p_next); p = p_next; } self.line(p, p2); } pub fn cubic(&mut self, p0: Point<f32>, p1: Point<f32>, p2: Point<f32>, p3: Point<f32>) { let deviation_x = (p0.x + p2.x - 3.0 * (p1.x + p2.x)).abs(); let deviation_y = (p0.y + p2.y - 3.0 * (p1.y + p2.y)).abs(); let deviation_squared = deviation_x * deviation_x + deviation_y * deviation_y; if deviation_squared < PIXEL_ACCURACY { self.line(p0, p3); return; } let subdivisions = 1 + (TOLERANCE * deviation_squared).sqrt().sqrt().floor() as usize; let increment = (subdivisions as f32).recip(); let mut p = p0; let mut t = 0.0; for _ in 0..subdivisions - 1 { t += increment; let p_next = Point::lerp( t, Point::lerp(t, Point::lerp(t, p0, p1), Point::lerp(t, p1, p2)), Point::lerp(t, Point::lerp(t, p1, p2), Point::lerp(t, p2, p3)), ); self.line(p, p_next); p = p_next; } self.line(p, p3); } pub fn close(&mut self) -> bool { let last = self.edges.last().cloned(); if let (Some(closing_point), Some(last)) = (self.closing_point, last) { if !closing_point.approx_eq(last.p1) { self.line(last.p1, closing_point); } self.closing_point = None; return true; } false } }
fn nth_word(s: &String, nth: u32) -> &str { let bytes = s.as_bytes(); let mut number_of_spaces_to_find = nth - 1; let mut init = 0; for (i, &item) in bytes.iter().enumerate() { if item == b' ' { if number_of_spaces_to_find == 0 { return &s[init..i]; } else { number_of_spaces_to_find = number_of_spaces_to_find - 1; init = i + 1; } } } &s[init..] } fn first_word(s: &String) -> &str { nth_word(s, 1) } fn second_word(s: &String) -> &str { nth_word(s, 2) } fn third_word(s: &String) -> &str { nth_word(s, 3) } fn main() { let s = String::from("hello world"); let first = first_word(&s); println!("{}", first); let second = second_word(&s); println!("{}", second); let t = String::from("get third word"); let third = third_word(&t); println!("{}", third); }
fn main() { let mut v = vec![10,20,30]; for i in &mut v { *i += 3; } for i in &v { println!("{}", i); } }
use serde::{Deserialize, Serialize}; #[derive(PartialEq, Eq, Clone, Serialize, Deserialize, Debug, Hash, Copy)] pub enum MsgType { Prepare, Commit, Abort, AckP2, End, Ended, Err, } #[derive(PartialEq, Eq, Clone, Serialize, Deserialize, Debug)] pub struct CoordMsg { pub xid: u16, pub mtype: MsgType, } /// Member Response #[derive(PartialEq, Eq, Clone, Serialize, Deserialize, Debug)] pub struct SubordResponse { pub xid: u16, pub mtype: MsgType, }
use anyhop::{Atom, Goal, Method, MethodResult, Task}; use anyhop::MethodResult::{TaskLists}; use anyhop::Task::Operator; use MethodResult::*; use Task::*; use log::{debug, error, info, trace, warn}; use SatelliteMethod::*; use crate::methods::SatelliteMethod::{ScheduleAll, ScheduleOne}; use crate::operators::SatelliteOperator::{Calibrate, SwitchOff, SwitchOn, TakeImage, TurnTo}; use super::operators::*; use fixed::types::I40F24; #[derive(Copy, Clone, PartialOrd, PartialEq, Ord, Eq, Debug)] pub enum SatelliteMethod { ScheduleAll, //state, satellite, instrument, mode, new_direction, previous_direction ScheduleOne(SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum), //SatelliteState, Satellite, Instrument Switching(SatelliteEnum, SatelliteEnum), } #[derive(Clone, PartialOrd, PartialEq, Ord, Eq, Debug)] pub enum SatelliteStatus { Done, //state, satellite, instrument, mode, new_direction, previous_direction NotDone(u32, SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum, SatelliteEnum), } impl SatelliteStatus { pub fn new(identifier: u32, state: SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum, goal: SatelliteGoals) -> SatelliteStatus { return if is_satellite_done(state, &goal) { SatelliteStatus::Done } else { SatelliteStatus::NotDone(identifier, satellite, instrument, mode, new_direction, previous_direction) } } } //Checks to see if the planner is done by comparing the state and the goal. pub fn is_satellite_done(state: SatelliteState, goal: &SatelliteGoals) -> bool { for goal_image in goal.have_image.keys() { if !state.have_image.contains_key(goal_image) { return false; } else { if !(state.have_image.get(goal_image) == goal.have_image.get(goal_image)) { return false; } } } return true; } //Turn an instrument on or off. fn switching(state: &SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { TaskLists(vec![if !state.power_on.is_empty() && !state.power_on.contains(&instrument) { let powered_on_instrument = find_powered_on_instruments(state, &satellite); debug!("Our powered_on instrument is: {:?}", powered_on_instrument); match powered_on_instrument{ Some(n) => debug!("Performing switchoff on SwitchOff({:?}, {:?})",n, satellite ), None => debug!("None"), } match powered_on_instrument{ Some(n)=> vec![Operator(SwitchOff(n, satellite)), Operator(SwitchOn(instrument, satellite))], None => { if state.power_on.contains(&instrument){ vec![Operator(SwitchOff(instrument, satellite)), Operator(SwitchOn(instrument, satellite))] }else{ vec![Operator(SwitchOn(instrument, satellite))] } } } } else if state.power_on.is_empty() { vec![Operator(SwitchOn(instrument, satellite))] } else { vec![] }]) } //Schedules one step into the planner. fn schedule_one(state: &SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { let is_instrument_powered_on = !state.power_avail.get(&satellite).unwrap(); if is_satellite_pointing_in_direction(state, &satellite, &new_direction){ //Prevents short circuiting of the and from earlier if is_instrument_powered_on || state.power_on.is_empty(){ debug!("Scheduling pointing with on instruments"); return schedule_pointing_with_powered_on_instruments(satellite, instrument, mode, new_direction) } else { debug!("Scheduling pointing with off instruments"); return schedule_pointing_with_powered_off_instruments(state, &satellite, instrument, mode, new_direction) } }else{ if is_instrument_powered_on || state.power_on.is_empty(){ debug!("Scheduling no pointing with on instruments"); let calibration_target_direction = state.calibration_target.get(&instrument).unwrap(); return schedule_not_pointing_with_powered_on_instruments(satellite, instrument, mode, new_direction, previous_direction, calibration_target_direction) }else{ debug!("Scheduling no pointing with off instruments"); let calibration_target_direction = state.calibration_target.get(&instrument).unwrap(); return schedule_not_pointing_with_powered_off_instruments(state, &satellite, instrument, mode, new_direction, previous_direction, calibration_target_direction) } } } // New function fn remove_redundant_turns(tasks: Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>>) -> Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>> { tasks.iter().filter(|t| match t { Method(_) => true, Operator(op) => match op { TurnTo(_,dest,start) => dest != start, _ => true } }).map(|t| *t).collect() } // Replacement fn schedule_not_pointing_with_powered_off_instruments(state: &SatelliteState, satellite: &SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum, calibration_target_direction: &SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { let powered_on_instrument= find_powered_on_instruments(state, &satellite); debug!("Our found powered on instrument is is {:?}", instrument); debug!("Our satellite is {:?}", satellite); match powered_on_instrument { Some(instrument_to_power_off) => { if calibration_target_direction == &previous_direction && &new_direction == calibration_target_direction{ TaskLists(vec![vec![Operator(SwitchOff(instrument_to_power_off, *satellite)), Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, *calibration_target_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))]]) } else if calibration_target_direction == &previous_direction { TaskLists(vec![remove_redundant_turns(vec![Operator(SwitchOff(instrument_to_power_off, *satellite)), Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, *calibration_target_direction)), Operator(TurnTo(*satellite, new_direction, *calibration_target_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))])]) } else if &new_direction==calibration_target_direction { TaskLists(vec![remove_redundant_turns(vec![Operator(SwitchOff(instrument_to_power_off, *satellite)), Operator(TurnTo(*satellite, *calibration_target_direction, previous_direction)), Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, *calibration_target_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))])]) }else{ TaskLists(vec![remove_redundant_turns(vec![Operator(SwitchOff(instrument_to_power_off, *satellite)), Operator(TurnTo(*satellite, *calibration_target_direction, previous_direction)), Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, *calibration_target_direction)), Operator(TurnTo(*satellite, new_direction, *calibration_target_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))])]) } }, None => if &new_direction==calibration_target_direction || new_direction==previous_direction{ TaskLists(vec![vec![Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, *calibration_target_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))]]) }else{ TaskLists(vec![remove_redundant_turns(vec![Operator(TurnTo(*satellite, *calibration_target_direction, previous_direction)), Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, *calibration_target_direction)), Operator(TurnTo(*satellite, new_direction, *calibration_target_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))])]) }, } } fn schedule_not_pointing_with_powered_on_instruments(satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum, previous_direction: SatelliteEnum, calibration_target_direction: &SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { if calibration_target_direction == &previous_direction && &new_direction==calibration_target_direction{ TaskLists(vec![vec![Method(Switching(satellite, instrument)), Operator(Calibrate(satellite, instrument, *calibration_target_direction)), Operator(TakeImage(satellite, new_direction, instrument, mode))]]) }else if calibration_target_direction == &previous_direction{ TaskLists(vec![vec![Method(Switching(satellite, instrument)), Operator(Calibrate(satellite, instrument, *calibration_target_direction)), Operator(TurnTo(satellite, new_direction, *calibration_target_direction)), Operator(TakeImage(satellite, new_direction, instrument, mode))]]) }else if &new_direction==calibration_target_direction{ TaskLists(vec![vec![Operator(TurnTo(satellite, *calibration_target_direction, previous_direction)), Method(Switching(satellite, instrument)), Operator(Calibrate(satellite, instrument, *calibration_target_direction)), Operator(TakeImage(satellite, new_direction, instrument, mode))]]) }else{ debug!("Taking the dangerous way!"); TaskLists(vec![vec![Operator(TurnTo(satellite, *calibration_target_direction, previous_direction)), Method(Switching(satellite, instrument)), Operator(Calibrate(satellite, instrument, *calibration_target_direction)), Operator(TurnTo(satellite, new_direction, *calibration_target_direction)), Operator(TakeImage(satellite, new_direction, instrument, mode))]]) } } fn schedule_pointing_with_powered_on_instruments(satellite: SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { TaskLists(vec![vec![Method(Switching(satellite, instrument)), Operator(Calibrate(satellite, instrument, new_direction)), Operator(TakeImage(satellite, new_direction, instrument, mode))]]) } fn schedule_pointing_with_powered_off_instruments(state: &SatelliteState, satellite: &SatelliteEnum, instrument: SatelliteEnum, mode: SatelliteEnum, new_direction: SatelliteEnum) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { match find_powered_on_instruments(state, &satellite) { Some(instrument_to_power_off) => TaskLists(vec![vec![Operator(SwitchOff(instrument_to_power_off, *satellite)), Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, new_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))]]), None => TaskLists(vec![vec![Method(Switching(*satellite, instrument)), Operator(Calibrate(*satellite, instrument, new_direction)), Operator(TakeImage(*satellite, new_direction, instrument, mode))]]) } } //Given a state, and a Satellite::SatelliteEnum, return an Instrument::Maybe<SatelliteEnum> containing any powered on instruments owned by the satellite. fn find_powered_on_instruments(state: &SatelliteState, satellite: &SatelliteEnum) -> Option<SatelliteEnum>{ debug!("Attempting to search the following {:?} ", state.onboard); for onboard_instrument_array in state.onboard.get(satellite) { //Get the instrument array for the satellite for onboard_instrument in onboard_instrument_array.into_iter() { //Loop over the instruments debug!("Seeing if contains: {:?}", onboard_instrument); if state.power_on.contains(onboard_instrument) { //Check if the instrument is powered on return Some(onboard_instrument.clone()); } } } return None; } fn is_satellite_pointing_in_direction(state: &SatelliteState, satellite: &SatelliteEnum, direction: &SatelliteEnum) -> bool { return match state.pointing.get(satellite) { Some(x) => x == direction, //If we have the correct instrument selected, we need to make sure that it is selected at the right direction. None => false, //If the lookup fails, the if statement should fail. }; } fn schedule_all(state: &SatelliteState, goal: &SatelliteGoals) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { let mut tasks: Vec<Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>>> = vec![]; let mut completed_tasks: Vec<SatelliteEnum> = vec![]; for goal_image in goal.have_image.keys() { if !(state.have_image.get(goal_image) == goal.have_image.get(goal_image)) { let goal_image_clone = goal_image.clone(); let mode = goal.have_image.get(goal_image).unwrap(); let instrument = brute_force_instrument(state, mode).unwrap(); //First look up the goal image to see which mode it should be in, and then look up which mode it should be in. let new_direction = goal_image_clone; let satellite = brute_force_satellite(state, &instrument, mode).unwrap(); let previous_direction = state.pointing.get(&satellite.clone()).unwrap(); tasks.push(vec![Task::Method(ScheduleOne(satellite, instrument, mode.clone(), new_direction, previous_direction.clone())), Task::Method(ScheduleAll)]); } else { let image_clone = goal_image.clone(); completed_tasks.push(image_clone); } } return if goal.have_image.keys().eq(&completed_tasks) { let pointing_tasks = pointing_needed(state, goal).iter() .map(|(sat, dir)| Task::Operator(TurnTo(*sat, *dir, *state.pointing.get(sat).unwrap()))) .collect(); TaskLists(vec![pointing_tasks]) }else if tasks.len()>0{ TaskLists(tasks) }else { debug!("ScheduleAll is returning failure"); debug!("completed_tasks: {:?}", completed_tasks); debug!("goals: {:?}", goal.have_image.keys()); debug!("goal.pointing: {:?}", goal.pointing); for satellite in state.pointing.iter() { debug!("pointing: {:?}", satellite); } Failure }; } fn pointing_needed(state: &SatelliteState, goal: &SatelliteGoals) -> Vec<(SatelliteEnum,SatelliteEnum)> { goal.pointing.iter() .map(|(sat, dir)| (*sat, *dir)) .filter(|(sat, dir)| dir != state.pointing.get(&sat).unwrap()) .collect() } //This method returns a Maybe<Instrument> from a state, and a mode. fn brute_force_instrument(state: &SatelliteState, mode: &SatelliteEnum) -> Option<SatelliteEnum> { for instrument in state.supports.keys(){ if state.supports.get(instrument)?.contains(mode){ return Some(instrument.clone()); } } return None; } //This method returns a Satellite::Maybe<SatelliteEnum> from a state, an instrument and a mode. fn brute_force_satellite(state: &SatelliteState, instrument: &SatelliteEnum, mode: &SatelliteEnum) -> Option<SatelliteEnum> { for satellites in state.onboard.keys() { if state.does_instrument_support_mode(instrument, mode)&& is_onboard(state, satellites.clone(), instrument.clone()){ return Some(satellites.clone()); } } return None; } fn is_onboard(state: &SatelliteState, satellite: SatelliteEnum, instrument: SatelliteEnum) -> bool{ match state.onboard.get(&satellite){ Some(n) => n.contains(&instrument), None => false, } } impl Method for SatelliteMethod { type S = SatelliteState; type G = SatelliteGoals; type O = SatelliteOperator<SatelliteEnum>; fn apply(&self, state: &SatelliteState, goal: &SatelliteGoals) -> MethodResult<SatelliteOperator<SatelliteEnum>, SatelliteMethod> { use SatelliteMethod::*; match self { ScheduleAll => schedule_all(state, goal), ScheduleOne(satellite, instrument, mode, new_direction, previous_direction) => schedule_one(state, satellite.clone(), instrument.clone(), mode.clone(), new_direction.clone(), previous_direction.clone()), Switching(satellite, instrument) => switching(state, satellite.clone(), instrument.clone()), } } } impl Goal for SatelliteGoals { type O = SatelliteOperator<SatelliteEnum>; type M = SatelliteMethod; type S = SatelliteState; type C = I40F24; fn starting_tasks(&self) -> Vec<Task<SatelliteOperator<SatelliteEnum>, SatelliteMethod>> { vec![Task::Method(SatelliteMethod::ScheduleAll)] } fn accepts(&self, state: &Self::S) -> bool { return self.all_met_in(state); } fn distance_from(&self, state: &Self::S) -> Self::C { let mut unvisited = 0; let mut num_turns = 0; for goal_image in self.have_image.keys(){ //If we haven't visited the image, add it to the unvisited count. if state.have_image.get(goal_image)!=self.have_image.get(goal_image){ unvisited+=1; } } for goal_pointing in self.pointing.keys(){ if state.pointing.get(goal_pointing)!=self.pointing.get(goal_pointing){ num_turns+=1; } } return I40F24::from_num(unvisited+num_turns); } }
mod widgets; use crossbeam_channel as channel; use druid::{ commands::{OPEN_FILE, SHOW_OPEN_PANEL}, kurbo::Point, theme, widget::{prelude::*, Flex, Label, Svg}, AppDelegate, AppLauncher, ArcStr, Command, Data, DelegateCtx, Env, FileDialogOptions, FileSpec, Handled, ImageBuf, Lens, Selector, SingleUse, Target, Widget, WidgetExt, WidgetPod, WindowDesc, }; use std::{error::Error, path::PathBuf, sync::Arc, thread}; use crate::widgets::ZoomImage; const FILE_LOADED: Selector<SingleUse<Result<ImageBuf, Box<dyn Error + Send + Sync>>>> = Selector::new("image-viewer.file-loaded"); const OPEN_IMAGE_SVG: &str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/image.svg")); const ALL_IMAGES: FileSpec = FileSpec::new("Image", &["jpg", "jpeg", "gif", "bmp", "png"]); #[derive(Debug, Clone, Data, Lens)] struct AppData { image: Option<Arc<ImageBuf>>, error: ArcStr, } impl AppData { fn set_image(&mut self, image: Arc<ImageBuf>) { self.image = Some(image); self.error = "".into() } fn set_error(&mut self, error: ArcStr) { self.image = None; self.error = error; } } pub fn main() { let main_window = WindowDesc::new(ui_builder).title("Image Viewer"); // Set our initial data let data = AppData { image: None, error: "".into(), }; let launcher = AppLauncher::with_window(main_window).use_simple_logger(); // worker thread for IO let evt_sink = launcher.get_external_handle(); let (gui_send, io_recv) = channel::unbounded::<UiMsg>(); let io_thread = thread::spawn(move || loop { match io_recv.recv() { Ok(UiMsg::LoadImage(path)) => { if let Err(_) = evt_sink.submit_command( FILE_LOADED, SingleUse::new(ImageBuf::from_file(path)), Target::Global, ) { log::error!("should be unreachable"); break; } } Ok(UiMsg::Shutdown) | Err(_) => break, } }); launcher .delegate(Delegate { gui_send: gui_send.clone(), }) .launch(data) .expect("launch failed"); // shut down gracefully gui_send.send(UiMsg::Shutdown).unwrap(); io_thread.join().unwrap(); } fn ui_builder() -> impl Widget<AppData> { let ribbon = Flex::row().with_child(open_button()).align_left(); Flex::column() .with_child(ribbon) .with_flex_child(ZoomImage::new().lens(AppData::image).center(), 1.0) .with_child(Label::raw().lens(AppData::error).align_left()) //.debug_paint_layout() } fn open_button() -> impl Widget<AppData> { BgHover::new( Flex::column() .with_child( Svg::new(OPEN_IMAGE_SVG.parse().unwrap()) .fix_height(30.) .fix_width(50.), ) // no need for spacer because of spacing around image .with_child(Label::new("Open")) .padding(4.) .on_click(|ctx, _, _| { ctx.submit_command(SHOW_OPEN_PANEL.with( FileDialogOptions::new().allowed_types(vec![ ALL_IMAGES, FileSpec::JPG, FileSpec::GIF, ]), )); }), ) } enum UiMsg { LoadImage(PathBuf), Shutdown, } struct Delegate { gui_send: channel::Sender<UiMsg>, } impl AppDelegate<AppData> for Delegate { fn command( &mut self, _ctx: &mut DelegateCtx, _target: Target, cmd: &Command, data: &mut AppData, _env: &Env, ) -> Handled { if let Some(file) = cmd.get(OPEN_FILE) { if let Err(e) = self.gui_send.send(UiMsg::LoadImage(file.path().to_owned())) { data.set_error(format!("error sending message to io thread: {}", e).into()); } Handled::Yes } else if let Some(img) = cmd.get(FILE_LOADED) { match img.take().unwrap() { Ok(img) => data.set_image(Arc::new(img)), Err(e) => data.set_error(format!("error decoding/loading image: {}", e).into()), } Handled::Yes } else { Handled::No } } } struct BgHover<T, W> { hot: bool, inner: WidgetPod<T, W>, } impl<T, W: Widget<T>> BgHover<T, W> { fn new(inner: W) -> Self { BgHover { hot: false, inner: WidgetPod::new(inner), } } } impl<T: Data, W: Widget<T>> Widget<T> for BgHover<T, W> { fn event(&mut self, ctx: &mut EventCtx, event: &Event, data: &mut T, env: &Env) { self.inner.event(ctx, event, data, env) } fn lifecycle(&mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, data: &T, env: &Env) { match event { LifeCycle::HotChanged(hot) => { self.hot = *hot; ctx.request_paint(); } _ => (), } self.inner.lifecycle(ctx, event, data, env) } fn update(&mut self, ctx: &mut UpdateCtx, old_data: &T, data: &T, env: &Env) { self.inner.update(ctx, data, env) } fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size { let size = self.inner.layout(ctx, bc, data, env); self.inner.set_origin(ctx, data, env, Point::ZERO); size } fn paint(&mut self, ctx: &mut PaintCtx, data: &T, env: &Env) { if self.hot { let r = ctx.size().to_rect(); let bg = ctx.solid_brush(env.get(theme::BUTTON_DARK)); ctx.fill(r, &bg); } self.inner.paint(ctx, data, env) } }
use core::ptr::NonNull; use fermium::{SDL_Palette, SDL_PixelFormat}; use crate::{sdl_get_error, Palette, PixelFormatEnum, SdlError}; /// Information about a pixel format. /// /// Internally these are ref counted and usually handed out from a pool that SDL /// manages. As a result, they're generally read-only. The only exception is /// that you can change the *content* (but not *size*) of a palette associated /// with a pixel format (each palette pixel format allocates a separate palette /// when created). #[repr(transparent)] pub struct PixelFormat { nn: NonNull<SDL_PixelFormat>, } impl Drop for PixelFormat { fn drop(&mut self) { unsafe { fermium::SDL_FreeFormat(self.nn.as_ptr()) } } } impl PixelFormat { pub fn new(format: PixelFormatEnum) -> Result<Self, SdlError> { NonNull::new(unsafe { fermium::SDL_AllocFormat(format.0) }) .ok_or_else(sdl_get_error) .map(|nn| PixelFormat { nn }) } pub fn pixel_format_enum(&self) -> PixelFormatEnum { PixelFormatEnum(unsafe { (*self.nn.as_ptr()).format }) } pub fn palette(&self) -> &Option<Palette> { unsafe { let p: *const SDL_Palette = (*self.nn.as_ptr()).palette; core::mem::transmute::<&*const SDL_Palette, &Option<Palette>>(&p) } } pub fn bits_per_pixel(&self) -> usize { unsafe { (*self.nn.as_ptr()).BitsPerPixel as usize } } pub fn bytes_per_pixel(&self) -> usize { unsafe { (*self.nn.as_ptr()).BytesPerPixel as usize } } /// 0 for palette formats. pub fn r_mask(&self) -> u32 { unsafe { (*self.nn.as_ptr()).Rmask } } /// 0 for palette formats. pub fn g_mask(&self) -> u32 { unsafe { (*self.nn.as_ptr()).Gmask } } /// 0 for palette formats. pub fn b_mask(&self) -> u32 { unsafe { (*self.nn.as_ptr()).Bmask } } /// 0 for palette formats or for formats without alpha. pub fn a_mask(&self) -> u32 { unsafe { (*self.nn.as_ptr()).Amask } } }
use raycast::Window; use raycast::World; use sdl2::event::Event; use sdl2::keyboard::Keycode; use std::error::Error; use std::time::Duration; const VIEW_FOV_DEGREES: f64 = 60.0; const VIEW_FOV: f64 = VIEW_FOV_DEGREES * std::f64::consts::PI / 180.0; const WORLD_WIDTH: usize = 30; const WORLD_HEIGHT: usize = 30; const WORLD_BLOCK_SIZE: usize = 30; const DELTA_ANGLE: f64 = 0.03; const MOVE_SPEED: f64 = 0.2; fn main() -> Result<(), Box<dyn Error>> { let sdl_context = sdl2::init()?; let mut window = Window::new( &sdl_context, WORLD_WIDTH, WORLD_HEIGHT, WORLD_BLOCK_SIZE, VIEW_FOV, )?; let mut event_pump = sdl_context.event_pump()?; let mut world = World::new(WORLD_WIDTH, WORLD_HEIGHT); for i in 5..15 { world.set_block(i, i + 10, true)?; world.set_block(i + 10, i, true)?; } for y in 0..WORLD_HEIGHT { world.set_block(0, y, true)?; world.set_block(WORLD_WIDTH - 1, y, true)?; } for x in 0..WORLD_WIDTH { world.set_block(x, 0, true)?; world.set_block(x, WORLD_HEIGHT - 1, true)?; } //world.set_block(5, 5, true); 'running: loop { // poll for user exit for event in event_pump.poll_iter() { match event { Event::Quit { .. } | Event::KeyDown { keycode: Some(Keycode::Escape), .. } => break 'running, _ => {} } } for keycode in event_pump .keyboard_state() .pressed_scancodes() .filter_map(Keycode::from_scancode) { match keycode { Keycode::Left => world.rotate_user(-DELTA_ANGLE), Keycode::Right => world.rotate_user(DELTA_ANGLE), Keycode::Up => world.move_user(MOVE_SPEED), Keycode::Down => world.move_user(-MOVE_SPEED), _ => {} }; } window.update(&world)?; std::thread::sleep(Duration::new(0, 1_000_000_000u32 / 60)); } Ok(()) }
pub mod app_state; pub mod errors; pub mod handlers; pub mod extractors; pub mod fields; pub mod cursor; use crate::{ websocket::client_subscriber::ClientSubscriber, }; use actix_web::{web, HttpResponse, Error, HttpRequest}; use actix_web_actors::ws; use crate::api::extractors::config::{default_json_config, default_path_config, default_query_config, default_global_rate_limit}; async fn websocket( req: HttpRequest, stream: web::Payload, database: web::Data<app_state::AppDatabase>, subscriber: web::Data<app_state::AppSubscriber>, ) -> Result<HttpResponse, Error> { ws::start( ClientSubscriber::new( database, subscriber, ), &req, stream, ) } pub fn api_v1( config: &web::Data<app_state::AppConfig>, database: &web::Data<app_state::AppDatabase>, subscriber: &web::Data<app_state::AppSubscriber>, smtp: &web::Data<app_state::AppSmtp>, ) -> Box<dyn FnOnce(&mut web::ServiceConfig)> { let tokens_api = handlers::tokens::tokens_api( database, ); let users_api = handlers::users::users_api( config, database, subscriber, smtp, ); let config = config.clone(); let database = database.clone(); let subscriber = subscriber.clone(); let smtp = smtp.clone(); Box::new(move |cfg| { cfg.service( web::scope("/api/v1") .wrap(default_global_rate_limit(database.clone())) .app_data(config.clone()) .app_data(database.clone()) .app_data(subscriber.clone()) .app_data(smtp.clone()) .app_data(default_json_config()) .app_data(default_path_config()) .app_data(default_query_config()) .configure(tokens_api) .configure(users_api) .service( web::scope("/ws") .app_data(config.clone()) .app_data(database.clone()) .app_data(subscriber.clone()) .app_data(smtp) .route("", web::route().to(websocket)) ) ); }) }
// Copyright 2017, 2020 Parity Technologies // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use hash_db::{HashDBRef, Prefix, EMPTY_PREFIX}; use crate::nibble::NibbleSlice; use crate::iterator::TrieDBNodeIterator; use crate::rstd::boxed::Box; use super::node::{NodeHandle, Node, OwnedNode, decode_hash}; use super::lookup::Lookup; use super::{Result, DBValue, Trie, TrieItem, TrieError, TrieIterator, Query, TrieLayout, CError, TrieHash}; use super::nibble::NibbleVec; #[cfg(feature = "std")] use crate::rstd::{fmt, vec::Vec}; /// A `Trie` implementation using a generic `HashDB` backing database, a `Hasher` /// implementation to generate keys and a `NodeCodec` implementation to encode/decode /// the nodes. /// /// Use it as a `Trie` trait object. You can use `db()` to get the backing database object. /// Use `get` and `contains` to query values associated with keys in the trie. /// /// # Example /// ```ignore /// use hash_db::Hasher; /// use reference_trie::{RefTrieDBMut, RefTrieDB, Trie, TrieMut}; /// use trie_db::DBValue; /// use keccak_hasher::KeccakHasher; /// use memory_db::*; /// /// let mut memdb = MemoryDB::<KeccakHasher, HashKey<_>, _>::default(); /// let mut root = Default::default(); /// RefTrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap(); /// let t = RefTrieDB::new(&memdb, &root).unwrap(); /// assert!(t.contains(b"foo").unwrap()); /// assert_eq!(t.get(b"foo").unwrap().unwrap(), b"bar".to_vec()); /// ``` pub struct TrieDB<'db, L> where L: TrieLayout, { db: &'db dyn HashDBRef<L::Hash, DBValue>, root: &'db TrieHash<L>, /// The number of hashes performed so far in operations on this trie. hash_count: usize, } impl<'db, L> TrieDB<'db, L> where L: TrieLayout, { /// Create a new trie with the backing database `db` and `root` /// Returns an error if `root` does not exist pub fn new( db: &'db dyn HashDBRef<L::Hash, DBValue>, root: &'db TrieHash<L> ) -> Result<Self, TrieHash<L>, CError<L>> { if !db.contains(root, EMPTY_PREFIX) { Err(Box::new(TrieError::InvalidStateRoot(*root))) } else { Ok(TrieDB {db, root, hash_count: 0}) } } /// Get the backing database. pub fn db(&'db self) -> &'db dyn HashDBRef<L::Hash, DBValue> { self.db } /// Given some node-describing data `node`, and node key return the actual node RLP. /// This could be a simple identity operation in the case that the node is sufficiently small, /// but may require a database lookup. /// /// Return value is the node data and the node hash if the value was looked up in the database /// or None if it was returned raw. /// /// `partial_key` is encoded nibble slice that addresses the node. pub(crate) fn get_raw_or_lookup( &self, parent_hash: TrieHash<L>, node_handle: NodeHandle, partial_key: Prefix, ) -> Result<(OwnedNode<DBValue>, Option<TrieHash<L>>), TrieHash<L>, CError<L>> { let (node_hash, node_data) = match node_handle { NodeHandle::Hash(data) => { let node_hash = decode_hash::<L::Hash>(data) .ok_or_else(|| Box::new(TrieError::InvalidHash(parent_hash, data.to_vec())))?; let node_data = self.db .get(&node_hash, partial_key) .ok_or_else(|| { if partial_key == EMPTY_PREFIX { Box::new(TrieError::InvalidStateRoot(node_hash)) } else { Box::new(TrieError::IncompleteDatabase(node_hash)) } })?; (Some(node_hash), node_data) } NodeHandle::Inline(data) => (None, data.to_vec()), }; let owned_node = OwnedNode::new::<L::Codec>(node_data) .map_err(|e| Box::new(TrieError::DecoderError(node_hash.unwrap_or(parent_hash), e)))?; Ok((owned_node, node_hash)) } } impl<'db, L> Trie<L> for TrieDB<'db, L> where L: TrieLayout, { fn root(&self) -> &TrieHash<L> { self.root } fn get_with<'a, 'key, Q: Query<L::Hash>>( &'a self, key: &'key [u8], query: Q, ) -> Result<Option<Q::Item>, TrieHash<L>, CError<L>> where 'a: 'key, { Lookup::<L, Q> { db: self.db, query, hash: *self.root, }.look_up(NibbleSlice::new(key)) } fn iter<'a>(&'a self)-> Result< Box<dyn TrieIterator<L, Item=TrieItem<TrieHash<L>, CError<L>>> + 'a>, TrieHash<L>, CError<L>, > { TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>) } } #[cfg(feature="std")] // This is for pretty debug output only struct TrieAwareDebugNode<'db, 'a, L> where L: TrieLayout, { trie: &'db TrieDB<'db, L>, node_key: NodeHandle<'a>, partial_key: NibbleVec, index: Option<u8>, } #[cfg(feature="std")] impl<'db, 'a, L> fmt::Debug for TrieAwareDebugNode<'db, 'a, L> where L: TrieLayout, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.trie.get_raw_or_lookup( <TrieHash<L>>::default(), self.node_key, self.partial_key.as_prefix() ) { Ok((owned_node, _node_hash)) => match owned_node.node() { Node::Leaf(slice, value) => match (f.debug_struct("Node::Leaf"), self.index) { (ref mut d, Some(i)) => d.field("index", &i), (ref mut d, _) => d, } .field("slice", &slice) .field("value", &value) .finish(), Node::Extension(slice, item) => { match (f.debug_struct("Node::Extension"), self.index) { (ref mut d, Some(i)) => d.field("index", &i), (ref mut d, _) => d, } .field("slice", &slice) .field("item", &TrieAwareDebugNode { trie: self.trie, node_key: item, partial_key: self.partial_key .clone_append_optional_slice_and_nibble(Some(&slice), None), index: None, }) .finish() }, Node::Branch(ref nodes, ref value) => { let nodes: Vec<TrieAwareDebugNode<L>> = nodes.into_iter() .enumerate() .filter_map(|(i, n)| n.map(|n| (i, n))) .map(|(i, n)| TrieAwareDebugNode { trie: self.trie, index: Some(i as u8), node_key: n, partial_key: self.partial_key .clone_append_optional_slice_and_nibble(None, Some(i as u8)), }) .collect(); match (f.debug_struct("Node::Branch"), self.index) { (ref mut d, Some(ref i)) => d.field("index", i), (ref mut d, _) => d, } .field("nodes", &nodes) .field("value", &value) .finish() }, Node::NibbledBranch(slice, nodes, value) => { let nodes: Vec<TrieAwareDebugNode<L>> = nodes.iter() .enumerate() .filter_map(|(i, n)| n.map(|n| (i, n))) .map(|(i, n)| TrieAwareDebugNode { trie: self.trie, index: Some(i as u8), node_key: n, partial_key: self.partial_key .clone_append_optional_slice_and_nibble(Some(&slice), Some(i as u8)), }).collect(); match (f.debug_struct("Node::NibbledBranch"), self.index) { (ref mut d, Some(ref i)) => d.field("index", i), (ref mut d, _) => d, } .field("slice", &slice) .field("nodes", &nodes) .field("value", &value) .finish() }, Node::Empty => f.debug_struct("Node::Empty").finish(), }, Err(e) => f.debug_struct("BROKEN_NODE") .field("index", &self.index) .field("key", &self.node_key) .field("error", &format!("ERROR fetching node: {}", e)) .finish(), } } } #[cfg(feature="std")] impl<'db, L> fmt::Debug for TrieDB<'db, L> where L: TrieLayout, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("TrieDB") .field("hash_count", &self.hash_count) .field("root", &TrieAwareDebugNode { trie: self, node_key: NodeHandle::Hash(self.root().as_ref()), partial_key: NibbleVec::new(), index: None, }) .finish() } } /// Iterator for going through all values in the trie in pre-order traversal order. pub struct TrieDBIterator<'a, L: TrieLayout> { inner: TrieDBNodeIterator<'a, L>, } impl<'a, L: TrieLayout> TrieDBIterator<'a, L> { /// Create a new iterator. pub fn new(db: &'a TrieDB<L>) -> Result<TrieDBIterator<'a, L>, TrieHash<L>, CError<L>> { let inner = TrieDBNodeIterator::new(db)?; Ok(TrieDBIterator { inner }) } /// Create a new iterator, but limited to a given prefix. pub fn new_prefixed(db: &'a TrieDB<L>, prefix: &[u8]) -> Result<TrieDBIterator<'a, L>, TrieHash<L>, CError<L>> { let mut inner = TrieDBNodeIterator::new(db)?; inner.prefix(prefix)?; Ok(TrieDBIterator { inner, }) } /// Create a new iterator, but limited to a given prefix. /// It then do a seek operation from prefixed context (using `seek` lose /// prefix context by default). pub fn new_prefixed_then_seek( db: &'a TrieDB<L>, prefix: &[u8], start_at: &[u8], ) -> Result<TrieDBIterator<'a, L>, TrieHash<L>, CError<L>> { let mut inner = TrieDBNodeIterator::new(db)?; inner.prefix_then_seek(prefix, start_at)?; Ok(TrieDBIterator { inner, }) } } impl<'a, L: TrieLayout> TrieIterator<L> for TrieDBIterator<'a, L> { /// Position the iterator on the first element with key >= `key` fn seek(&mut self, key: &[u8]) -> Result<(), TrieHash<L>, CError<L>> { TrieIterator::seek(&mut self.inner, key) } } impl<'a, L: TrieLayout> Iterator for TrieDBIterator<'a, L> { type Item = TrieItem<'a, TrieHash<L>, CError<L>>; fn next(&mut self) -> Option<Self::Item> { while let Some(item) = self.inner.next() { match item { Ok((mut prefix, _, node)) => { let maybe_value = match node.node() { Node::Leaf(partial, value) => { prefix.append_partial(partial.right()); Some(value) } Node::Branch(_, value) => value, Node::NibbledBranch(partial, _, value) => { prefix.append_partial(partial.right()); value } _ => None, }; if let Some(value) = maybe_value { let (key_slice, maybe_extra_nibble) = prefix.as_prefix(); let key = key_slice.to_vec(); if let Some(extra_nibble) = maybe_extra_nibble { return Some(Err(Box::new( TrieError::ValueAtIncompleteKey(key, extra_nibble) ))); } return Some(Ok((key, value.to_vec()))); } }, Err(err) => return Some(Err(err)), } } None } }
use crate::datetime::{get_weekday_val, get_year_len, to_ordinal}; use crate::iter::masks::MASKS; use crate::options::*; use crate::utils::pymod; use chrono::prelude::*; #[derive(Debug)] pub struct YearInfo { pub yearlen: usize, pub nextyearlen: usize, pub yearordinal: isize, pub yearweekday: usize, pub mmask: &'static [usize], pub mdaymask: &'static [isize], pub nmdaymask: &'static [isize], pub mrange: &'static [usize], pub wdaymask: &'static [usize], pub wnomask: Option<Vec<usize>>, } pub struct BaseMasks { mmask: &'static [usize], mdaymask: &'static [isize], nmdaymask: &'static [isize], mrange: &'static [usize], wdaymask: &'static [usize], } fn base_year_masks(year: i32) -> BaseMasks { // let masks = MASKS.clone(); let firstyday = Utc.ymd(year, 1, 1).and_hms_milli(0, 0, 0, 0); let yearlen = get_year_len(year); let wday = get_weekday_val(&firstyday.weekday()) as usize; if yearlen == 365 { return BaseMasks { mmask: &MASKS.m365, mdaymask: &MASKS.mday365, nmdaymask: &MASKS.nmday365, mrange: &MASKS.m365range, wdaymask: &MASKS.wday[wday..], }; } BaseMasks { mmask: &MASKS.m366, mdaymask: &MASKS.mday366, nmdaymask: &MASKS.nmday366, mrange: &MASKS.m366range, wdaymask: &MASKS.wday[wday..], } } pub fn rebuild_year(year: i32, options: &ParsedOptions) -> YearInfo { let firstyday = Utc.ymd(year, 1, 1).and_hms_milli(0, 0, 0, 0); let yearlen = get_year_len(year); let nextyearlen = get_year_len(year + 1); let yearordinal = to_ordinal(&firstyday); let yearweekday = get_weekday_val(&firstyday.weekday()); let base_masks = base_year_masks(year); let mut result = YearInfo { yearlen, nextyearlen, yearordinal, yearweekday, wnomask: None, mmask: base_masks.mmask, mdaymask: base_masks.mdaymask, nmdaymask: base_masks.nmdaymask, mrange: base_masks.mrange, wdaymask: base_masks.wdaymask, }; if options.byweekno.is_empty() { return result; } let mut wnomask = vec![0; yearlen + 7]; let wyearlen; let mut no1wkst = pymod((7 - yearweekday + options.wkst) as isize, 7); let firstwkst = no1wkst; if no1wkst >= 4 { no1wkst = 0; // Number of days in the year, plus the days we got // from last year. wyearlen = result.yearlen as isize + pymod(yearweekday as isize - options.wkst as isize, 7); } else { // Number of days in the year, minus the days we // left in last year. wyearlen = yearlen as isize - no1wkst; } let div = (wyearlen as f32 / 7.).floor() as isize; let year_mod = pymod(wyearlen, 7); //const numweeks = Math.floor(div + mod / 4) let numweeks = div + (year_mod / 4); for j in 0..options.byweekno.len() { let mut n = options.byweekno[j]; if n < 0 { n += (numweeks + 1) as isize; } if !(n > 0 && n <= numweeks) { continue; } let mut i; if n > 1 { i = no1wkst + (n - 1) * 7; if no1wkst != firstwkst { i -= 7 - firstwkst; } } else { i = no1wkst; } for _ in 0..7 { wnomask[i as usize] = 1; i += 1; if result.wdaymask[i as usize] as usize == options.wkst { break; } } } if options.byweekno.iter().any(|&wkno| wkno == 1) { // Check week number 1 of next year as well // orig-TODO : Check -numweeks for next year. let mut i = no1wkst + numweeks * 7; if no1wkst != firstwkst { i -= 7 - firstwkst; } if i < yearlen as isize { // If week starts in next year, we // don't care about it. for _ in 0..7 { wnomask[i as usize] = 1; i += 1; if result.wdaymask[i as usize] as usize == options.wkst { break; } } } } if no1wkst > 0 { // Check last week number of last year as // well. If no1wkst is 0, either the year // started on week start, or week number 1 // got days from last year, so there are no // days from last year's last week number in // this year. let lnumweeks; if !options.byweekno.iter().any(|&weekno| weekno == -1) { let lyearweekday = get_weekday_val(&Utc.ymd(year - 1, 1, 1).weekday()); let lno1wkst = pymod((7 - lyearweekday + options.wkst) as isize, 7); let lyearlen = get_year_len(year - 1); let weekst; if lno1wkst >= 4 { //lno1wkst = 0; weekst = lyearlen as isize + pymod((lyearweekday - options.wkst) as isize, 7); } else { weekst = yearlen as isize - no1wkst; } lnumweeks = 52 + (pymod(weekst, 7) / 4) as isize; } else { lnumweeks = -1 as isize; } if options.byweekno.iter().any(|&weekno| weekno == lnumweeks) { for i in 0..no1wkst { wnomask[i as usize] = 1; } } } result.wnomask = Some(wnomask); result }
// Copyright 2021 Protocol Labs. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. use crate::structs_proto; use async_trait::async_trait; use futures::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; use libp2p_core::{upgrade, Multiaddr, PeerId}; use libp2p_request_response::{self as request_response, ProtocolName}; use prost::Message; use std::{convert::TryFrom, io}; #[derive(Clone, Debug)] pub struct AutoNatProtocol; /// The protocol name used for negotiating with multistream-select. pub const DEFAULT_PROTOCOL_NAME: &[u8] = b"/libp2p/autonat/1.0.0"; impl ProtocolName for AutoNatProtocol { fn protocol_name(&self) -> &[u8] { DEFAULT_PROTOCOL_NAME } } #[derive(Clone)] pub struct AutoNatCodec; #[async_trait] impl request_response::Codec for AutoNatCodec { type Protocol = AutoNatProtocol; type Request = DialRequest; type Response = DialResponse; async fn read_request<T>( &mut self, _: &AutoNatProtocol, io: &mut T, ) -> io::Result<Self::Request> where T: AsyncRead + Send + Unpin, { let bytes = upgrade::read_length_prefixed(io, 1024).await?; let request = DialRequest::from_bytes(&bytes)?; Ok(request) } async fn read_response<T>( &mut self, _: &AutoNatProtocol, io: &mut T, ) -> io::Result<Self::Response> where T: AsyncRead + Send + Unpin, { let bytes = upgrade::read_length_prefixed(io, 1024).await?; let response = DialResponse::from_bytes(&bytes)?; Ok(response) } async fn write_request<T>( &mut self, _: &AutoNatProtocol, io: &mut T, data: Self::Request, ) -> io::Result<()> where T: AsyncWrite + Send + Unpin, { upgrade::write_length_prefixed(io, data.into_bytes()).await?; io.close().await } async fn write_response<T>( &mut self, _: &AutoNatProtocol, io: &mut T, data: Self::Response, ) -> io::Result<()> where T: AsyncWrite + Send + Unpin, { upgrade::write_length_prefixed(io, data.into_bytes()).await?; io.close().await } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct DialRequest { pub peer_id: PeerId, pub addresses: Vec<Multiaddr>, } impl DialRequest { pub fn from_bytes(bytes: &[u8]) -> Result<Self, io::Error> { let msg = structs_proto::Message::decode(bytes) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; if msg.r#type != Some(structs_proto::message::MessageType::Dial as _) { return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid type")); } let (peer_id, addrs) = if let Some(structs_proto::message::Dial { peer: Some(structs_proto::message::PeerInfo { id: Some(peer_id), addrs, }), }) = msg.dial { (peer_id, addrs) } else { log::debug!("Received malformed dial message."); return Err(io::Error::new( io::ErrorKind::InvalidData, "invalid dial message", )); }; let peer_id = { PeerId::try_from(peer_id) .map_err(|_| io::Error::new(io::ErrorKind::InvalidData, "invalid peer id"))? }; let addrs = addrs .into_iter() .filter_map(|a| match Multiaddr::try_from(a) { Ok(a) => Some(a), Err(e) => { log::debug!("Unable to parse multiaddr: {e}"); None } }) .collect(); Ok(Self { peer_id, addresses: addrs, }) } pub fn into_bytes(self) -> Vec<u8> { let peer_id = self.peer_id.to_bytes(); let addrs = self .addresses .into_iter() .map(|addr| addr.to_vec()) .collect(); let msg = structs_proto::Message { r#type: Some(structs_proto::message::MessageType::Dial as _), dial: Some(structs_proto::message::Dial { peer: Some(structs_proto::message::PeerInfo { id: Some(peer_id), addrs, }), }), dial_response: None, }; let mut bytes = Vec::with_capacity(msg.encoded_len()); msg.encode(&mut bytes) .expect("Vec<u8> provides capacity as needed"); bytes } } #[derive(Clone, Debug, Eq, PartialEq)] pub enum ResponseError { DialError, DialRefused, BadRequest, InternalError, } impl From<ResponseError> for i32 { fn from(t: ResponseError) -> Self { match t { ResponseError::DialError => 100, ResponseError::DialRefused => 101, ResponseError::BadRequest => 200, ResponseError::InternalError => 300, } } } impl TryFrom<structs_proto::message::ResponseStatus> for ResponseError { type Error = io::Error; fn try_from(value: structs_proto::message::ResponseStatus) -> Result<Self, Self::Error> { match value { structs_proto::message::ResponseStatus::EDialError => Ok(ResponseError::DialError), structs_proto::message::ResponseStatus::EDialRefused => Ok(ResponseError::DialRefused), structs_proto::message::ResponseStatus::EBadRequest => Ok(ResponseError::BadRequest), structs_proto::message::ResponseStatus::EInternalError => { Ok(ResponseError::InternalError) } structs_proto::message::ResponseStatus::Ok => { log::debug!("Received response with status code OK but expected error."); Err(io::Error::new( io::ErrorKind::InvalidData, "invalid response error type", )) } } } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct DialResponse { pub status_text: Option<String>, pub result: Result<Multiaddr, ResponseError>, } impl DialResponse { pub fn from_bytes(bytes: &[u8]) -> Result<Self, io::Error> { let msg = structs_proto::Message::decode(bytes) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; if msg.r#type != Some(structs_proto::message::MessageType::DialResponse as _) { return Err(io::Error::new(io::ErrorKind::InvalidData, "invalid type")); } Ok(match msg.dial_response { Some(structs_proto::message::DialResponse { status: Some(status), status_text, addr: Some(addr), }) if structs_proto::message::ResponseStatus::from_i32(status) == Some(structs_proto::message::ResponseStatus::Ok) => { let addr = Multiaddr::try_from(addr) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; Self { status_text, result: Ok(addr), } } Some(structs_proto::message::DialResponse { status: Some(status), status_text, addr: None, }) => Self { status_text, result: Err(ResponseError::try_from( structs_proto::message::ResponseStatus::from_i32(status).ok_or_else(|| { io::Error::new(io::ErrorKind::InvalidData, "invalid response status code") })?, )?), }, _ => { log::debug!("Received malformed response message."); return Err(io::Error::new( io::ErrorKind::InvalidData, "invalid dial response message", )); } }) } pub fn into_bytes(self) -> Vec<u8> { let dial_response = match self.result { Ok(addr) => structs_proto::message::DialResponse { status: Some(0), status_text: self.status_text, addr: Some(addr.to_vec()), }, Err(error) => structs_proto::message::DialResponse { status: Some(error.into()), status_text: self.status_text, addr: None, }, }; let msg = structs_proto::Message { r#type: Some(structs_proto::message::MessageType::DialResponse as _), dial: None, dial_response: Some(dial_response), }; let mut bytes = Vec::with_capacity(msg.encoded_len()); msg.encode(&mut bytes) .expect("Vec<u8> provides capacity as needed"); bytes } } #[cfg(test)] mod tests { use super::*; #[test] fn test_request_encode_decode() { let request = DialRequest { peer_id: PeerId::random(), addresses: vec![ "/ip4/8.8.8.8/tcp/30333".parse().unwrap(), "/ip4/192.168.1.42/tcp/30333".parse().unwrap(), ], }; let bytes = request.clone().into_bytes(); let request2 = DialRequest::from_bytes(&bytes).unwrap(); assert_eq!(request, request2); } #[test] fn test_response_ok_encode_decode() { let response = DialResponse { result: Ok("/ip4/8.8.8.8/tcp/30333".parse().unwrap()), status_text: None, }; let bytes = response.clone().into_bytes(); let response2 = DialResponse::from_bytes(&bytes).unwrap(); assert_eq!(response, response2); } #[test] fn test_response_err_encode_decode() { let response = DialResponse { result: Err(ResponseError::DialError), status_text: Some("dial failed".to_string()), }; let bytes = response.clone().into_bytes(); let response2 = DialResponse::from_bytes(&bytes).unwrap(); assert_eq!(response, response2); } #[test] fn test_skip_unparsable_multiaddr() { let valid_multiaddr: Multiaddr = "/ip6/2001:db8::/tcp/1234".parse().unwrap(); let valid_multiaddr_bytes = valid_multiaddr.to_vec(); let invalid_multiaddr = { let a = vec![255; 8]; assert!(Multiaddr::try_from(a.clone()).is_err()); a }; let msg = structs_proto::Message { r#type: Some(structs_proto::message::MessageType::Dial.into()), dial: Some(structs_proto::message::Dial { peer: Some(structs_proto::message::PeerInfo { id: Some(PeerId::random().to_bytes()), addrs: vec![valid_multiaddr_bytes, invalid_multiaddr], }), }), dial_response: None, }; let mut bytes = Vec::with_capacity(msg.encoded_len()); msg.encode(&mut bytes) .expect("Vec<u8> provides capacity as needed"); let request = DialRequest::from_bytes(&bytes).expect("not to fail"); assert_eq!(request.addresses, vec![valid_multiaddr]) } }
use amethyst::{ core::math, assets::{AssetStorage, Handle, Loader}, core::{Named, Parent, Transform, TransformBundle, math::Vector3}, derive::SystemDesc, ecs::{ Component, Entity, Join, NullStorage, Read, Write, WriteExpect, ReadStorage, System, SystemData, WorldExt, WriteStorage, }, input::{InputHandler, StringBindings}, core::timing::Time, }; use amethyst_physics::{ prelude::*, }; use std::ops::Deref; use crate::systems::player::Player; use crate::systems::zombie_ai::Zombie; #[derive(Default)] pub struct Ball; impl Component for Ball { type Storage = NullStorage<Self>; } #[derive(SystemDesc)] pub struct BallAiSystem; impl<'s> System<'s> for BallAiSystem { type SystemData = ( Read<'s, Time>, WriteExpect<'s, PhysicsWorld<f32>>, ReadStorage<'s, PhysicsHandle<PhysicsRigidBodyTag>>, ReadStorage<'s, Transform>, ReadStorage<'s, Ball>, ReadStorage<'s, Zombie>, ReadStorage<'s, Player>, ); fn run(&mut self, (time, physics, rigidbodies, transforms, balls, zombies, players): Self::SystemData) { let zero_vec = Vector3::new(0.0, 0.0, 0.0); for (my_rigidbody, my_transform, _, _, _) in (&rigidbodies, &transforms, &balls, !&zombies, !&players).join() { let mut closest_vec = zero_vec; for (other_transform, _) in (&transforms, &balls).join() { let other_vec = other_transform.translation() - my_transform.translation(); if other_vec != zero_vec && (closest_vec == zero_vec || other_vec.magnitude() < closest_vec.magnitude()) { closest_vec = other_vec; } } if closest_vec != zero_vec { closest_vec = closest_vec.normalize() * -3000.0 * time.delta_seconds(); physics.rigid_body_server().apply_force(my_rigidbody.get(), &closest_vec); } } } }
use std::pin::Pin; use futures::Future; pub type BoxFut<S> = Pin<Box<dyn Future<Output = S> + 'static>>; pub(crate) mod with_db_methods; pub(crate) mod with_tx_methods; pub(crate) mod without_db_methods;
use proc_macro2::TokenStream; use quote::quote; pub(crate) trait Errors { fn result(&self) -> Result<(), TokenStream>; } impl Errors for Vec<TokenStream> { fn result(&self) -> Result<(), TokenStream> { if self.is_empty() { Ok(()) } else { let v = self; Err(quote! { #(#v)* }) } } }
mod utils; use wasm_bindgen::prelude::*; use yew::prelude::*; // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global // allocator. #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[wasm_bindgen] extern { fn alert(s: &str); } #[wasm_bindgen] pub fn greet() { alert("Hello, myproject!"); } #[wasm_bindgen] pub fn add(a: u32, b: u32) -> u32 { a + b } enum Msg { AddOne, } struct Model { // `ComponentLink` is like a reference to a component. // It can be used to send messages to the component link: ComponentLink<Self>, value: i64, } impl Component for Model { type Message = Msg; type Properties = (); fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self { Self { link, value: 0, } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::AddOne => { self.value += 1; // the value has changed so we need to // re-render for it to appear on the page true } } } fn change(&mut self, _props: Self::Properties) -> ShouldRender { // Should only return "true" if new properties are different to // previously received properties. // This component has no properties so we will always return "false". false } fn view(&self) -> Html { html! { <div> <h1>{{ "CINS465 Hello World" }}</h1> <button onclick=self.link.callback(|_| Msg::AddOne)>{ "+1" }</button> <p>{ self.value }</p> </div> } } } #[wasm_bindgen] pub fn run_app() { yew::start_app::<Model>(); }
#[doc = "Register `MACFCR` reader"] pub type R = crate::R<MACFCR_SPEC>; #[doc = "Register `MACFCR` writer"] pub type W = crate::W<MACFCR_SPEC>; #[doc = "Field `FCB_BPA` reader - Flow control busy/back pressure activate"] pub type FCB_BPA_R = crate::BitReader; #[doc = "Field `FCB_BPA` writer - Flow control busy/back pressure activate"] pub type FCB_BPA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TFCE` reader - Transmit flow control enable"] pub type TFCE_R = crate::BitReader; #[doc = "Field `TFCE` writer - Transmit flow control enable"] pub type TFCE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `RFCE` reader - Receive flow control enable"] pub type RFCE_R = crate::BitReader; #[doc = "Field `RFCE` writer - Receive flow control enable"] pub type RFCE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `UPFD` reader - Unicast pause frame detect"] pub type UPFD_R = crate::BitReader; #[doc = "Field `UPFD` writer - Unicast pause frame detect"] pub type UPFD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PLT` reader - Pause low threshold"] pub type PLT_R = crate::FieldReader; #[doc = "Field `PLT` writer - Pause low threshold"] pub type PLT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; #[doc = "Field `ZQPD` reader - Zero-quanta pause disable"] pub type ZQPD_R = crate::BitReader; #[doc = "Field `ZQPD` writer - Zero-quanta pause disable"] pub type ZQPD_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PT` reader - Pass control frames"] pub type PT_R = crate::FieldReader<u16>; #[doc = "Field `PT` writer - Pass control frames"] pub type PT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>; impl R { #[doc = "Bit 0 - Flow control busy/back pressure activate"] #[inline(always)] pub fn fcb_bpa(&self) -> FCB_BPA_R { FCB_BPA_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - Transmit flow control enable"] #[inline(always)] pub fn tfce(&self) -> TFCE_R { TFCE_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - Receive flow control enable"] #[inline(always)] pub fn rfce(&self) -> RFCE_R { RFCE_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - Unicast pause frame detect"] #[inline(always)] pub fn upfd(&self) -> UPFD_R { UPFD_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bits 4:5 - Pause low threshold"] #[inline(always)] pub fn plt(&self) -> PLT_R { PLT_R::new(((self.bits >> 4) & 3) as u8) } #[doc = "Bit 7 - Zero-quanta pause disable"] #[inline(always)] pub fn zqpd(&self) -> ZQPD_R { ZQPD_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bits 16:31 - Pass control frames"] #[inline(always)] pub fn pt(&self) -> PT_R { PT_R::new(((self.bits >> 16) & 0xffff) as u16) } } impl W { #[doc = "Bit 0 - Flow control busy/back pressure activate"] #[inline(always)] #[must_use] pub fn fcb_bpa(&mut self) -> FCB_BPA_W<MACFCR_SPEC, 0> { FCB_BPA_W::new(self) } #[doc = "Bit 1 - Transmit flow control enable"] #[inline(always)] #[must_use] pub fn tfce(&mut self) -> TFCE_W<MACFCR_SPEC, 1> { TFCE_W::new(self) } #[doc = "Bit 2 - Receive flow control enable"] #[inline(always)] #[must_use] pub fn rfce(&mut self) -> RFCE_W<MACFCR_SPEC, 2> { RFCE_W::new(self) } #[doc = "Bit 3 - Unicast pause frame detect"] #[inline(always)] #[must_use] pub fn upfd(&mut self) -> UPFD_W<MACFCR_SPEC, 3> { UPFD_W::new(self) } #[doc = "Bits 4:5 - Pause low threshold"] #[inline(always)] #[must_use] pub fn plt(&mut self) -> PLT_W<MACFCR_SPEC, 4> { PLT_W::new(self) } #[doc = "Bit 7 - Zero-quanta pause disable"] #[inline(always)] #[must_use] pub fn zqpd(&mut self) -> ZQPD_W<MACFCR_SPEC, 7> { ZQPD_W::new(self) } #[doc = "Bits 16:31 - Pass control frames"] #[inline(always)] #[must_use] pub fn pt(&mut self) -> PT_W<MACFCR_SPEC, 16> { PT_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Ethernet MAC flow control register (ETH_MACFCR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`macfcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`macfcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct MACFCR_SPEC; impl crate::RegisterSpec for MACFCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`macfcr::R`](R) reader structure"] impl crate::Readable for MACFCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`macfcr::W`](W) writer structure"] impl crate::Writable for MACFCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets MACFCR to value 0"] impl crate::Resettable for MACFCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
extern crate clap; extern crate env_logger; #[macro_use] extern crate log; extern crate term_painter; extern crate dirac; use clap::{Arg, App}; use term_painter::ToStyle; use term_painter::Color::*; use term_painter::Attr::*; use std::collections::HashMap; use dirac::checks::CheckSuite; use dirac::engine::CheckSuiteResult; use dirac::reports::{Report, Reporter}; static VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { if env_logger::init().is_err() { panic!("Could not initiliaze logger"); } let cli_args = App::new("Dirac Host Properties Checker") .version(VERSION) .arg(Arg::with_name("report") .takes_value(true) .requires("output") .possible_values(&["json", "markdown"]) .short("r") .long("report") .value_name("REPORT") .help("Enables report")) .arg(Arg::with_name("output") .takes_value(true) .requires("report") .short("o") .long("output") .value_name("FILENAME") .help("Sets output file for report")) .arg(Arg::with_name("check_suite") .takes_value(true) .value_name("FILENAME") .min_values(1) .help("Check suites to run")) .get_matches(); let check_suite_filenames = cli_args.values_of("check_suite").unwrap(); for filename in check_suite_filenames { let check_suite = CheckSuite::read_from_file(&filename).unwrap(); let results = dirac::engine::run(&check_suite); print_summary(&results); if cli_args.is_present("report") && cli_args.is_present("output") { let report_type = cli_args.value_of("report").unwrap().to_string(); let report_filename = cli_args.value_of("output").unwrap().to_string(); let mut report_builder = Reporter::new(&results, &report_type); let report = report_builder.with_filename(&report_filename).create(); let _ = report.write_to_file(); } } } fn print_summary(check_suite_result: &CheckSuiteResult) { let summary = create_summary(check_suite_result); print!("{}\n", Bold.paint("SUMMARY")); for kv in summary { print!(" * {:<30} Success {:4}, Failed {:4}\n", kv.0, Green.paint((kv.1).0), Red.paint((kv.1).1)); } } fn create_summary<'a>(check_suite_result: &'a CheckSuiteResult) -> HashMap<&'a str, (u16, u16)> { let mut result = HashMap::new(); for check in &check_suite_result.results { for property in &check.results { let mut host_result = result.entry(property.host).or_insert((0, 0)); if property.result.is_ok() { host_result.0 += 1; } else { host_result.1 += 1; } } } result }
use game_state::*; use enemy::*; use player::*; use parsing::strings::*; use rand::{thread_rng, Rng}; use std::process; use combat_action::*; pub struct BattleCoordinator { pub player: Player, pub enemy: Option<Enemy>, } impl BattleCoordinator { fn get_gameover_state(player: Player) -> State { State { state_description: "Corrosion crawls up your body, reducing you to a brittle \ husk." .to_string(), state_options: vec![OptionPair { option_number: 1, option_description: "End battle".to_string(), option_action: None, }], is_combat_state: false, player: player, enemy: None } } pub fn take_turn(&mut self, input: u32, curr_state: &State) -> State { self.player_turn(input, curr_state); self.enemy_turn(curr_state); if self.player.health == 0 { let return_state = BattleCoordinator::get_gameover_state(curr_state.player.clone()); return return_state; } let fight_action = super::player_actions::get_player_actions().remove(0); let next_turn_option = OptionPair { option_number: 1, option_description: "Fight".to_string(), option_action: Some(fight_action), }; let new_state = State { state_description: format!("You have {} hp. The enemy has {} hp.", &self.player.health, self.enemy.as_ref().unwrap().health), state_options: vec![next_turn_option], is_combat_state: true, player: self.player.clone(), enemy: self.enemy.clone() }; return new_state; } // todo: properly print for different actions fn player_turn(&mut self, input: u32, curr_state: &State) { for pair in &curr_state.state_options { if pair.option_number == input { match pair.option_action.as_ref() { Some(act) => { let action_result = (act.action)(&mut self.player, self.enemy.as_mut().unwrap()); match action_result { Some(result) => { let desc_string = ordered_inject(&act.description, vec![&self.enemy .as_ref() .unwrap() .name, &result.to_string()]); println!("{}", desc_string); } None => { let desc_string = ordered_inject(&"You missed the {0}." .to_string(), vec![&self.enemy .as_ref() .unwrap() .name]); println!("{}", desc_string); } } } None => println!("You didn't do anything."), } } } } fn enemy_turn(&mut self, curr_state: &State) { match self.enemy { Some(ref mut curr_enemy) => { let mut rng = thread_rng(); let actions_length = curr_enemy.get_actions().len(); let mut temp_index = 0; if actions_length > 1 as usize { temp_index = rng.gen_range(1, actions_length - 1 as usize); } let chosen_index = temp_index; let chosen_action = &curr_enemy.get_actions()[chosen_index]; let action_result = (chosen_action.action)(&mut self.player, curr_enemy); match action_result { Some(result) => { let desc_string = ordered_inject(&chosen_action.description, vec![&curr_enemy.name, &result.to_string()]); println!("{}", desc_string); } None => { let desc_string = ordered_inject(&"The {0} missed you!".to_string(), vec![&curr_enemy.name]); println!("{}", desc_string); } } } None => {} } } }
//! Structured ROTMG packets. #[macro_use] mod macros; pub mod data; pub mod packets;
use super::PageSize; use std::fmt::Debug; use std::cmp::{PartialEq, Eq, Ord, PartialOrd, Ordering}; use alloc::raw_vec::RawVec; use alloc::boxed::Box; use std::mem; use std::fmt; use constants::*; /// Uniform linear address transformation #[derive(Clone)] pub struct Segment { physical_base: u64, virtual_base: u64, size: u64, allocate: bool, write: bool, user: bool, execute: bool, global: bool } #[repr(packed)] #[derive(Debug, Clone, Copy)] struct RawSegment { physical_base: u64, virtual_base: u64, size: u64, flags: u8 } pub fn raw_segment_size() -> usize { mem::size_of::<RawSegment>() } impl Debug for Segment { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { write!(fmt, "Segment {{ physical_base: 0x{:x}, virtual_base: 0x{:x} size: 0x{:x}, allocate: {:?}, write: {:?}, user: {:?}, execute: {:?}, global: {:?} }}", self.physical_base, self.virtual_base, self.size, self.allocate, self.write, self.user, self.execute, self.global) } } // Overlap concerns virtual address only // Segments can overlap on physical addresses and that's fine impl Ord for Segment { fn cmp(&self, other: &Segment) -> Ordering { // aligned overlap check, since the page table is page-aligned if align(self.virtual_base + self.size, PageSize::Page as u64) <= align_back(other.virtual_base, PageSize::Page as u64) || align_back(self.virtual_base, PageSize::Page as u64) >= align(other.virtual_base + other.size, PageSize::Page as u64) { self.physical_base.cmp(&other.virtual_base) } else { Ordering::Equal } } } impl PartialOrd for Segment { fn partial_cmp(&self, other: &Segment) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialEq for Segment { fn eq(&self, other: &Segment) -> bool { self.cmp(other) == Ordering::Equal } } impl Eq for Segment {} impl Segment { pub fn new(physical_base: u64, virtual_base: u64, size: u64, write: bool, user: bool, execute: bool, global: bool) -> Segment { debug_assert!(is_aligned(physical_base, 0x1000), "Physical base was not page-aligned"); debug_assert!(is_aligned(virtual_base, 0x1000), "Virtual base was not aligned"); Segment { physical_base: physical_base, virtual_base: virtual_base & ((1 << CANONICAL_BITS) - 1), allocate: true, size: size, write: write, user: user, execute: execute, global: global } } pub fn dummy_range(virtual_address: u64, size: u64) -> Segment { Segment { physical_base: 0, virtual_base: virtual_address & ((1 << CANONICAL_BITS) - 1), allocate: false, size: size, write: false, user: false, execute: false, global: false } } pub fn dummy(virtual_address: u64) -> Segment { Segment::dummy_range(virtual_address, 0) } pub fn physical_base(&self) -> u64 { self.physical_base } pub fn virtual_base(&self) -> u64 { self.virtual_base } pub fn size(&self) -> u64 { self.size } pub fn write(&self) -> bool { self.write } pub fn user(&self) -> bool { self.user } pub fn execute(&self) -> bool { self.execute } pub fn global(&self) -> bool { self.global } pub fn from_raw(raw: &[u8]) -> Segment { assert!(raw.len() == mem::size_of::<RawSegment>()); let data = unsafe { let ptr = raw.as_ptr() as *const RawSegment; ptr.as_ref().unwrap() }; Segment { physical_base: data.physical_base, virtual_base: data.virtual_base, allocate: true, size: data.size, write: (data.flags & 1 << 0) == 1 << 0, user: (data.flags & 1 << 1) == 1 << 1, execute: (data.flags & 1 << 2) == 1 << 2, global: (data.flags & 1 << 3) == 1 << 3 } } pub fn get_raw(&self) -> Box<[u8]> { let buffer: RawVec<u8> = RawVec::with_capacity(mem::size_of::<RawSegment>()); let mut flags = 0; if self.write { flags |= 1 << 0; } if self.user { flags |= 1 << 1; } if self.execute { flags |= 1 << 2; } if self.global { flags |= 1 << 3; } let data = RawSegment { physical_base: self.physical_base, virtual_base: self.virtual_base, size: self.size, flags: flags }; trace!("data: {:?}", data); unsafe { let ptr = buffer.ptr() as *mut RawSegment; *ptr.as_mut().unwrap() = data; buffer.into_box() } } #[inline] pub fn get_physical_subframe(&self, subframe_base: u64) -> u64 { if self.virtual_base > subframe_base { self.physical_base + self.virtual_base - subframe_base } else { self.physical_base + subframe_base - self.virtual_base } } #[inline] pub fn same_settings(&self, other: &Segment) -> bool { other.write == self.write && other.user == self.user && other.execute == self.execute && other.global == self.global } }
#[doc = "Register `PLLSAI2CFGR` reader"] pub type R = crate::R<PLLSAI2CFGR_SPEC>; #[doc = "Register `PLLSAI2CFGR` writer"] pub type W = crate::W<PLLSAI2CFGR_SPEC>; #[doc = "Field `PLLSAI2N` reader - SAI2PLL multiplication factor for VCO"] pub type PLLSAI2N_R = crate::FieldReader; #[doc = "Field `PLLSAI2N` writer - SAI2PLL multiplication factor for VCO"] pub type PLLSAI2N_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>; #[doc = "Field `PLLSAI2PEN` reader - SAI2PLL PLLSAI2CLK output enable"] pub type PLLSAI2PEN_R = crate::BitReader; #[doc = "Field `PLLSAI2PEN` writer - SAI2PLL PLLSAI2CLK output enable"] pub type PLLSAI2PEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PLLSAI2P` reader - SAI1PLL division factor for PLLSAI2CLK (SAI1 or SAI2 clock)"] pub type PLLSAI2P_R = crate::BitReader; #[doc = "Field `PLLSAI2P` writer - SAI1PLL division factor for PLLSAI2CLK (SAI1 or SAI2 clock)"] pub type PLLSAI2P_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PLLSAI2REN` reader - PLLSAI2 PLLADC2CLK output enable"] pub type PLLSAI2REN_R = crate::BitReader; #[doc = "Field `PLLSAI2REN` writer - PLLSAI2 PLLADC2CLK output enable"] pub type PLLSAI2REN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PLLSAI2R` reader - PLLSAI2 division factor for PLLADC2CLK (ADC clock)"] pub type PLLSAI2R_R = crate::FieldReader; #[doc = "Field `PLLSAI2R` writer - PLLSAI2 division factor for PLLADC2CLK (ADC clock)"] pub type PLLSAI2R_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; impl R { #[doc = "Bits 8:14 - SAI2PLL multiplication factor for VCO"] #[inline(always)] pub fn pllsai2n(&self) -> PLLSAI2N_R { PLLSAI2N_R::new(((self.bits >> 8) & 0x7f) as u8) } #[doc = "Bit 16 - SAI2PLL PLLSAI2CLK output enable"] #[inline(always)] pub fn pllsai2pen(&self) -> PLLSAI2PEN_R { PLLSAI2PEN_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - SAI1PLL division factor for PLLSAI2CLK (SAI1 or SAI2 clock)"] #[inline(always)] pub fn pllsai2p(&self) -> PLLSAI2P_R { PLLSAI2P_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 24 - PLLSAI2 PLLADC2CLK output enable"] #[inline(always)] pub fn pllsai2ren(&self) -> PLLSAI2REN_R { PLLSAI2REN_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bits 25:26 - PLLSAI2 division factor for PLLADC2CLK (ADC clock)"] #[inline(always)] pub fn pllsai2r(&self) -> PLLSAI2R_R { PLLSAI2R_R::new(((self.bits >> 25) & 3) as u8) } } impl W { #[doc = "Bits 8:14 - SAI2PLL multiplication factor for VCO"] #[inline(always)] #[must_use] pub fn pllsai2n(&mut self) -> PLLSAI2N_W<PLLSAI2CFGR_SPEC, 8> { PLLSAI2N_W::new(self) } #[doc = "Bit 16 - SAI2PLL PLLSAI2CLK output enable"] #[inline(always)] #[must_use] pub fn pllsai2pen(&mut self) -> PLLSAI2PEN_W<PLLSAI2CFGR_SPEC, 16> { PLLSAI2PEN_W::new(self) } #[doc = "Bit 17 - SAI1PLL division factor for PLLSAI2CLK (SAI1 or SAI2 clock)"] #[inline(always)] #[must_use] pub fn pllsai2p(&mut self) -> PLLSAI2P_W<PLLSAI2CFGR_SPEC, 17> { PLLSAI2P_W::new(self) } #[doc = "Bit 24 - PLLSAI2 PLLADC2CLK output enable"] #[inline(always)] #[must_use] pub fn pllsai2ren(&mut self) -> PLLSAI2REN_W<PLLSAI2CFGR_SPEC, 24> { PLLSAI2REN_W::new(self) } #[doc = "Bits 25:26 - PLLSAI2 division factor for PLLADC2CLK (ADC clock)"] #[inline(always)] #[must_use] pub fn pllsai2r(&mut self) -> PLLSAI2R_W<PLLSAI2CFGR_SPEC, 25> { PLLSAI2R_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "PLLSAI2 configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pllsai2cfgr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pllsai2cfgr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct PLLSAI2CFGR_SPEC; impl crate::RegisterSpec for PLLSAI2CFGR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`pllsai2cfgr::R`](R) reader structure"] impl crate::Readable for PLLSAI2CFGR_SPEC {} #[doc = "`write(|w| ..)` method takes [`pllsai2cfgr::W`](W) writer structure"] impl crate::Writable for PLLSAI2CFGR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets PLLSAI2CFGR to value 0x1000"] impl crate::Resettable for PLLSAI2CFGR_SPEC { const RESET_VALUE: Self::Ux = 0x1000; }
pub mod code_generator; pub mod verifier;
mod read; mod write; mod open; mod close; mod mmap; mod mprotect; mod brk; mod ioctl; mod writev; mod mincore; mod getcwd; mod poll; mod arch_prctl; mod exit_group; use super::interrupts::InteruptStack; static mut POS: usize = 0; pub fn handle(vars: &mut InteruptStack) { let syscall_number = vars.rax; // http://blog.rchapman.org/posts/Linux_System_Call_Table_for_x86_64/ let result = match syscall_number { 0 => read::read(vars.rdi, vars.rsi, vars.rdx), 1 => write::write(vars.rdi, vars.rsi, vars.rdx), 2 => open::open(vars.rdi, vars.rsi, vars.rdx), 3 => close::close(vars.rdi), 7 => poll::poll(vars.rdi, vars.rsi, vars.rdx), 9 => mmap::mmap(vars.rdi, vars.rsi, vars.rdx, vars.r10, vars.r8, vars.r9), 10 => mprotect::mprotect(vars.rdi, vars.rsi, vars.rdx), 12 => brk::brk(vars.rdi), 13 => { let sig = vars.rdi as usize; let act = vars.rsi as usize; let oact = vars.rdx as usize; let sigsetsize = vars.r10 as usize; println!("Syscall: rt_sigaction sig={:x} act={:x} oact={:x} sigsetsize={:x}", sig, act, oact, sigsetsize); 0 // TODO: implement signals } 14 => { println!("Syscall: sigprocmask how={:x} set={:x} oldset={:x}", vars.rdi, vars.rsi, vars.rdx); // SIG_BLOCK: 0 // SIG_UNBLOCK: 1 // SIG_SETMASK: 2 // TODO: handle signals unsafe { if vars.rsi != 0 { let set = *(vars.rsi as *const u32); println!("Set: {:x}", set); } } unsafe { if vars.rdi != 0 { *(vars.rdi as *mut u32) = 0; } } 0 } 16 => ioctl::ioctl(vars.rdi, vars.rsi, vars.rdx), 20 => writev::writev(vars.rdi, vars.rsi, vars.rdx), 27 => mincore::mincore(vars.rdi, vars.rsi, vars.rdx), 39 => { println!("Syscall: getpid"); 1 } 63 => { println!("Syscall: uname name={:x}", vars.rdi); 0 } 79 => getcwd::getcwd(vars.rdi, vars.rsi), 102 => { println!("Syscall: getuid"); 0 } 104 => { println!("Syscall: getgid"); 0 } 105 => { println!("Syscall: setuid uid={}", vars.rdi); 0 } 106 => { println!("Syscall: setgid gid={}", vars.rdi); 0 } 110 => { println!("Syscall: getppid"); 0 } 158 => arch_prctl::arch_prctl(vars.rdi, vars.rsi), 202 => { let addr = vars.rdi as usize; let futex_word = unsafe { *(addr as *const u32) }; let op = vars.rsi; println!("Syscall: futex op={:x} addr={:x} futex_word={:x}", op, addr, futex_word); 0 // on FUTEX_WAKE } 218 => { // println!("Syscall: set_tid_address tidptr={:x}", vars.rdi); unsafe { let mut asdf = vars.rdi as *mut usize; let val = *asdf; println!("TID: {:x}", val); // *asdf = 1usize; } 20 // thread id }, 231 => exit_group::exit_group(vars.rdi), _ => handle_unknown_syscall(vars) }; vars.rax = result; } pub fn handle_unknown_syscall(vars: &mut InteruptStack) -> u64 { println!("Syscall rax={} 1={:x} 2={:x} 3={:x} 4={:x} 5={:x} 6={:x}", vars.rax, vars.rdi, vars.rsi, vars.rdx, vars.r10, vars.r8, vars.r9); panic!("Unhandled syscall {}", vars.rax); } pub fn init() { unsafe { let addr = handle_fast_syscall as *const u8 as u64; asm!("wrmsr" :: "{ecx}"(0xC0000082u32), "{edx}"(addr >> 32), "{eax}"(addr as u32) :: "intel"); // IA32_LSTAR -> syscall address asm!("wrmsr" :: "{eax}"(0), "{edx}"(8 | 27 << 16), "{ecx}"(0xC0000081u32) :: "intel"); // STAR -> used segments asm!("wrmsr" :: "{eax}"(0), "{edx}"(0), "{ecx}"(0xC0000084u32) :: "intel"); // flasgs mask asm!(" rdmsr or eax, 1 wrmsr " :: "{ecx}"(0xC0000080u32) :: "intel") // IA32_EFER } } #[naked] fn handle_fast_syscall() -> ! { unsafe { // TODO: use gs to store a pointer to current CPU and use swapgs // TODO: use SCRATCH_RSP from the current CPU asm!(" mov SCRATCH_RSP, rsp mov rsp, TSS + 4 push 35 push SCRATCH_RSP push r11 push 27 push rcx push 1 push 0x80 jmp handle_irq " :::: "intel", "volatile"); } unreachable!(); } #[no_mangle] // TODO: this is not SMP safe, need a CPU specific value pub static SCRATCH_RSP: u64 = 0;
use super::{schema::schedules, Connection, Postgres}; use artell_domain::{ art::ArtId, scheduler::{Schedule, Scheduler, SchedulerRepository}, }; use chrono::{DateTime, Utc}; use diesel::prelude::*; use uuid::Uuid; pub struct PgSchedulerRepository { pg: Postgres, } impl PgSchedulerRepository { pub fn new(pg: Postgres) -> Self { PgSchedulerRepository { pg } } } #[async_trait] impl SchedulerRepository for PgSchedulerRepository { async fn find(&self) -> anyhow::Result<Option<Scheduler>> { self.pg.try_with_conn(find).await } async fn save(&self, scheduler: Scheduler) -> anyhow::Result<()> { self.pg .try_with_conn(move |conn| save(conn, scheduler)) .await } } /* * ========== * Query * ========== */ #[derive(Queryable)] struct QueriedSchedule { art_id: Uuid, activate_at: DateTime<Utc>, } impl Into<Schedule> for QueriedSchedule { fn into(self) -> Schedule { Schedule { art_id: ArtId(self.art_id), activate_at: self.activate_at, } } } fn find(conn: Connection) -> anyhow::Result<Option<Scheduler>> { let mut schedules = schedules::table .filter(schedules::is_scheduled.eq(true)) .select((schedules::art_id, schedules::activate_at)) .load::<QueriedSchedule>(&conn)? .into_iter() .map(QueriedSchedule::into) .collect::<Vec<Schedule>>(); schedules.sort_unstable_by_key(|s| s.activate_at); Ok(Some(Scheduler { schedules })) } /* * ======== * Update * ======== */ #[derive(Clone, Copy, Insertable)] #[table_name = "schedules"] struct NewSchedule<'a> { art_id: &'a Uuid, activate_at: &'a DateTime<Utc>, is_scheduled: bool, } impl<'a> From<&'a Schedule> for NewSchedule<'a> { fn from(schedule: &'a Schedule) -> Self { NewSchedule { art_id: &schedule.art_id.0, activate_at: &schedule.activate_at, is_scheduled: true, } } } fn save(conn: Connection, scheduler: Scheduler) -> anyhow::Result<()> { // 一旦、全てのscheduleを無効にする diesel::update(schedules::table) .set(schedules::is_scheduled.eq(false)) .execute(&conn)?; // 現在有効なscheduleを入力する let new_schedules = scheduler .schedules .iter() .map(NewSchedule::from) .collect::<Vec<_>>(); diesel::insert_into(schedules::table) .values(new_schedules) .on_conflict((schedules::art_id, schedules::activate_at)) .do_update() .set(schedules::is_scheduled.eq(true)) .execute(&conn)?; Ok(()) }
use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use siro::prelude::*; use siro::{ effects::{DomFocus, Effects}, html::{ self, attr, event::{on_blur, on_click, on_double_click, on_enter, on_input}, }, vdom::class, }; use std::str::FromStr; // ==== model ==== #[derive(Debug, Default, Deserialize, Serialize)] pub struct Model { #[serde(skip)] pub input: String, pub entries: IndexMap<TodoId, TodoEntry>, #[serde(skip)] pub visibility: Option<Visibility>, } pub type TodoId = ulid::Ulid; fn todo_edit_input_id(id: TodoId) -> String { format!("todo-{}", id) } #[derive(Debug, Deserialize, Serialize)] pub struct TodoEntry { pub id: TodoId, pub description: String, pub completed: bool, #[serde(skip)] pub editing: bool, } #[derive(Copy, Clone, Debug, PartialEq, Deserialize, Serialize)] pub enum Visibility { All, Active, Completed, } impl FromStr for Visibility { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { match &*s.trim().to_lowercase() { "all" => Ok(Visibility::All), "active" => Ok(Visibility::Active), "completed" => Ok(Visibility::Completed), _ => Err(()), } } } // ==== update ==== pub enum Msg { UpdateField(String), Add, Check(TodoId, bool), CheckAll(bool), Delete(TodoId), DeleteCompleted, ChangeVisibility(Visibility), UpdateEntry(TodoId, String), EditingEntry(TodoId, bool), } pub trait SaveModel: Effects { fn save_model(&mut self, model: &Model) -> Result<(), Self::Error>; } pub fn update<E>(model: &mut Model, msg: Msg, mut effects: E) -> Result<E::Ok, E::Error> where E: DomFocus + SaveModel, { match msg { Msg::UpdateField(input) => { model.input = input; } Msg::Add => { let description = std::mem::take(&mut model.input).trim().to_owned(); if !description.is_empty() { let id = TodoId::new(); model.entries.insert( id, TodoEntry { id, description, completed: false, editing: false, }, ); effects.save_model(model)?; } } Msg::Check(id, completed) => { if let Some(entry) = model.entries.get_mut(&id) { entry.completed = completed; effects.save_model(model)?; } } Msg::CheckAll(completed) => { for entry in model.entries.values_mut() { entry.completed = completed; } effects.save_model(model)?; } Msg::Delete(id) => { if let Some(..) = model.entries.remove(&id) { effects.save_model(model)?; } } Msg::DeleteCompleted => { model.entries.retain(|_, entry| !entry.completed); effects.save_model(model)?; } Msg::ChangeVisibility(visibility) => { model.visibility.replace(visibility); } Msg::UpdateEntry(id, description) => { if let Some(entry) = model.entries.get_mut(&id) { entry.description = description; effects.save_model(model)?; } } Msg::EditingEntry(id, editing) => { if let Some(entry) = model.entries.get_mut(&id) { entry.editing = editing; } if editing { effects.focus(&todo_edit_input_id(id))?; } } } effects.end() } // ==== view ==== pub fn view(model: &Model) -> impl Nodes<Msg> + '_ { html::div( class("todomvc-wrapper"), ( html::section( class("todoapp"), ( view_header(model), view_main(model), if_then(!model.entries.is_empty(), || view_controls(model)), ), ), view_info_footer(), ), ) } fn view_header(model: &Model) -> impl Nodes<Msg> { html::header( class("header"), ( html::h1((), "todos"), html::input( ( class("new-todo"), attr::placeholder("What needs to be done?"), attr::autofocus(true), attr::name("new_todo"), attr::value(model.input.clone()), on_input(Msg::UpdateField), on_enter(|| Msg::Add), ), (), ), ), ) } fn view_main(model: &Model) -> impl Nodes<Msg> + '_ { let all_completed = model.entries.values().all(|entry| entry.completed); html::section( class("main"), ( html::input::checkbox(( class("toggle-all"), attr::id("toggle-all"), attr::checked(all_completed), on_click(move || Msg::CheckAll(!all_completed)), )), html::label(attr::label_for("toggle-all"), "Mark all as complete"), html::ul( class("todo-list"), siro::vdom::iter( model .entries .values() .filter(move |entry| match model.visibility { Some(Visibility::Active) => !entry.completed, Some(Visibility::Completed) => entry.completed, _ => true, }) .map(|entry| view_entry(entry)), ), ), ), ) } fn view_entry(entry: &TodoEntry) -> impl Nodes<Msg> { let TodoEntry { id, completed, editing, ref description, .. } = *entry; html::li( ( if_then(completed, || class("completed")), if_then(editing, || class("editing")), ), ( html::div( class("view"), ( html::input::checkbox(( class("toggle"), attr::checked(completed), on_click(move || Msg::Check(id, !completed)), )), html::label( on_double_click(move || Msg::EditingEntry(id, true)), description.clone(), ), html::button((class("destroy"), on_click(move || Msg::Delete(id))), ()), ), ), if_then(editing, || { html::input::text(( class("edit"), attr::name("title"), attr::id(todo_edit_input_id(id)), attr::value(description.clone()), on_input(move |input| Msg::UpdateEntry(id, input)), on_blur(move || Msg::EditingEntry(id, false)), on_enter(move || Msg::EditingEntry(id, false)), )) }), ), ) } fn view_controls(model: &Model) -> impl Nodes<Msg> { let has_completed = model.entries.values().any(|entry| entry.completed); let entries_left = model.entries.values().filter(|e| !e.completed).count(); fn plural_prefix(n: usize) -> &'static str { if n == 1 { "" } else { "s" } } html::footer( class("footer"), ( html::span( class("todo-count"), ( html::strong((), entries_left.to_string()), format!(" item{} left", plural_prefix(entries_left)), ), ), html::ul( class("filters"), ( view_visibility_swap(model, Visibility::All, "All", "#/"), view_visibility_swap(model, Visibility::Active, "Active", "#/active"), view_visibility_swap(model, Visibility::Completed, "Completed", "#/completed"), ), ), if_then(has_completed, || { html::button( (class("clear-completed"), on_click(|| Msg::DeleteCompleted)), "Clear completed", ) }), ), ) } fn view_visibility_swap( model: &Model, v: Visibility, text: &'static str, url: &'static str, ) -> impl Nodes<Msg> { let selected = model.visibility.map_or(false, |vis| vis == v); html::li( html::event::on_click(move || Msg::ChangeVisibility(v)), html::a( (attr::href(url), if_then(selected, || class("selected"))), text, ), ) } fn view_info_footer() -> impl Nodes<Msg> { html::footer( class("info"), ( html::p((), "Double-click to edit a todo"), html::p( (), ( "Written by ", html::a( html::attr::href("https://github.com/ubnt-intrepid"), "@ubnt-intrepid", ), html::p( (), ( "Part of ", html::a(html::attr::href("http://todomvc.com"), "TodoMVC"), ), ), ), ), ), ) } fn if_then<T>(pred: bool, f: impl FnOnce() -> T) -> Option<T> { if pred { Some(f()) } else { None } }
use serde::{Deserialize, Serialize}; use crate::column::{column_definition::ColumnDefinition, column_name::ColumnName}; /// Actions to be done by ALTER TABLE statement. #[derive(Clone, Eq, PartialEq, Hash, Debug, Serialize, Deserialize)] pub enum AlterTableAction { /// ALTER TABLE {table_name} ADD COLUMN {column_definition} AddColumn { /// Column to add column_definition: ColumnDefinition, }, /// ALTER TABLE {table_name} DROP COLUMN {column_name} DropColumn { /// Column to drop. Currently PK column cannot be dropped. column_name: ColumnName, }, }
#[derive(Debug)] pub struct Array2<T> { inner: Vec<T>, shape: (usize, usize), }
// Copyright 2017 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! RPC types, corresponding to protocol requests, notifications & responses. //TODO: At the moment (May 08, 2017) this is all very much in flux. // At some point, it will be stabalized and then perhaps will live in another crate, // shared with the plugin lib. /// A simple update, sent to a plugin. #[derive(Serialize, Deserialize, Debug)] pub struct PluginUpdate { start: usize, end: usize, new_len: usize, #[serde(skip_serializing_if = "Option::is_none")] text: Option<String>, rev: usize, edit_type: String, author: String, } impl PluginUpdate { pub fn new(start: usize, end: usize, new_len: usize, rev: usize, text: Option<String>, edit_type: String, author: String) -> Self { PluginUpdate { start: start, end: end, new_len: new_len, text: text, rev: rev, edit_type: edit_type, author: author } } } /// An simple edit, received from a plugin. #[derive(Serialize, Deserialize, Debug)] pub struct PluginEdit { pub start: u64, pub end: u64, pub rev: u64, pub text: String, /// the edit priority determines the resolution strategy when merging /// concurrent edits. The highest priority edit will be applied last. pub priority: u64, /// whether the inserted text prefers to be to the right of the cursor. pub after_cursor: bool, /// the originator of this edit: some identifier (plugin name, 'core', etc) pub author: String, } /// A response to an `update` RPC sent to a plugin. #[derive(Serialize, Deserialize, Debug)] #[serde(untagged)] pub enum UpdateResponse { /// An edit to the buffer. Edit(PluginEdit), /// An acknowledgement with no action. A response cannot be Null, so we send a uint. Ack(u64), } #[derive(Serialize, Deserialize, Debug)] pub struct Span { pub start: usize, pub end: usize, pub fg: u32, #[serde(rename = "font")] pub font_style: u8, } #[derive(Serialize, Deserialize, Debug)] #[serde(untagged)] /// RPC commands sent from the plugins. pub enum PluginCommand { SetFgSpans {start: usize, len: usize, spans: Vec<Span>, rev: usize }, GetData { offset: usize, max_size: usize, rev: usize }, Alert { msg: String }, LineCount, }
use crate::query::search::Search; use group_by::GroupBy; use scan::Scan; use segment_metadata::SegmentMetadata; use serde::{Deserialize, Serialize}; use time_boundary::TimeBoundary; use timeseries::Timeseries; use top_n::TopN; pub mod definitions; pub mod group_by; pub mod response; pub mod scan; pub mod search; pub mod segment_metadata; pub mod time_boundary; pub mod timeseries; pub mod top_n; #[derive(Serialize, Deserialize, Debug)] #[serde(untagged)] #[serde(rename_all = "camelCase")] pub enum Query { GroupBy(GroupBy), Scan(Scan), Search(Search), SegmentMetadata(SegmentMetadata), TimeBoundary(TimeBoundary), Timeseries(Timeseries), TopN(TopN), } impl From<TopN> for Query { fn from(query: TopN) -> Self { Query::TopN(query) } } impl From<GroupBy> for Query { fn from(query: GroupBy) -> Self { Query::GroupBy(query) } } impl From<Scan> for Query { fn from(scan: Scan) -> Self { Query::Scan(scan) } } impl From<Search> for Query { fn from(query: Search) -> Self { Query::Search(query) } } impl From<TimeBoundary> for Query { fn from(query: TimeBoundary) -> Self { Query::TimeBoundary(query) } } impl From<SegmentMetadata> for Query { fn from(query: SegmentMetadata) -> Self { Query::SegmentMetadata(query) } } impl From<Timeseries> for Query { fn from(query: Timeseries) -> Self { Query::Timeseries(query) } } #[rustfmt::skip] #[derive(Deserialize, Serialize, Debug)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] pub enum DataSource { Table { name: String }, Lookup { lookup: String }, #[serde(rename_all = "camelCase")] Union { data_sources: Vec<String> }, #[serde(rename_all = "camelCase")] Inline { column_names: Vec<String>, rows: Vec<Vec<String>>, }, #[serde(rename_all = "camelCase")] Query { query: Box<Query> }, #[serde(rename_all = "camelCase")] // left: table, join, lookup, query, or inline // right: lookup, query, or inline Join {left: Box<DataSource>, right: Box<DataSource>, right_prefix: String, condition: String, join_type: JoinType } } pub struct JoinBuilder { left: Option<DataSource>, right: Option<DataSource>, right_prefix: Option<String>, condition: Option<String>, join_type: JoinType, } impl JoinBuilder { pub fn new(join_type: JoinType) -> Self { JoinBuilder { left: None, right: None, right_prefix: None, condition: None, join_type: join_type, } } pub fn left(mut self, left: DataSource) -> Self { self.left = Some(left); self } pub fn right(mut self, right: DataSource, right_prefix: &str) -> Self { self.right = Some(right); self.right_prefix = Some(right_prefix.to_string()); self } pub fn condition(mut self, condition: &str) -> Self { self.condition = Some(condition.to_string()); self } pub fn build(&mut self) -> Option<DataSource> { if let (Some(left), Some(right), Some(condition), Some(right_prefix)) = ( self.left.take(), self.right.take(), self.condition.take(), self.right_prefix.take(), ) { Some(DataSource::Join { join_type: self.join_type.clone(), left: Box::new(left), right: Box::new(right), condition: condition, right_prefix: right_prefix, }) } else { return None; } } } impl DataSource { pub fn table(name: &str) -> DataSource { DataSource::Table { name: name.into() } } pub fn lookup(name: &str) -> DataSource { DataSource::Lookup { lookup: name.into(), } } pub fn union(sources: Vec<&str>) -> DataSource { DataSource::Union { data_sources: sources.iter().map(|s| s.to_string()).collect(), } } pub fn query(query: Query) -> DataSource { DataSource::Query { query: Box::new(query), } } pub fn join(join_type: JoinType) -> JoinBuilder { JoinBuilder::new(join_type) } } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "UPPERCASE")] pub enum JoinType { Inner, Left, } #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] #[serde(tag = "queryType", rename = "dataSourceMetadata")] pub struct DataSourceMetadata { pub data_source: DataSource, pub context: std::collections::HashMap<String, String>, } #[serde(untagged)] #[derive(Serialize, Deserialize, Debug, Clone)] pub enum JsonNumber { Float(f32), Integer(isize), } impl From<f32> for JsonNumber { fn from(float: f32) -> Self { JsonNumber::Float(float) } } impl From<isize> for JsonNumber { fn from(integer: isize) -> Self { JsonNumber::Integer(integer) } } #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(untagged)] pub enum JsonAny { Float(f32), Integer(isize), STRING(String), Boolean(bool), } impl From<f32> for JsonAny { fn from(float: f32) -> Self { JsonAny::Float(float) } } impl From<isize> for JsonAny { fn from(integer: isize) -> Self { JsonAny::Integer(integer) } } impl From<bool> for JsonAny { fn from(boolean: bool) -> Self { JsonAny::Boolean(boolean) } } impl From<String> for JsonAny { fn from(str: String) -> Self { JsonAny::STRING(str) } } impl From<&str> for JsonAny { fn from(str: &str) -> Self { JsonAny::STRING(str.to_string()) } }
pub struct TryReader<'l, T> { elements: &'l Vec<T>, needle: usize, } impl<'l, T> TryReader<'l, T> { pub fn new(elements: &'l Vec<T>) -> TryReader<'l, T> { TryReader { elements, needle: 0, } } pub fn next(&mut self) -> Option<&'l T> { if self.has_next() { let t = &self.elements[self.needle]; self.needle += 1; Some(t) } else { None } } pub fn has_next(&self) -> bool { self.elements.len() != self.needle } pub fn try_<R, S, F>(&mut self, f: F) -> Result<(usize, R), S> where F: FnOnce(&mut TryReader<T>) -> Result<R, S> { let mut clone = TryReader { elements: self.elements, needle: self.needle }; match f(&mut clone).map(|result| (clone.needle - self.needle, result)) { Ok(result) => { self.needle = clone.needle; Ok(result) }, Err(err) => Err(err), } } pub fn try_next<R, S, F>(&mut self, f: F) -> Result<R, Option<S>> where F: FnOnce(&T) -> Result<R, S> { if !self.has_next() { return Err(None) } match f(&self.elements[self.needle]) { Ok(r) => { self.next(); Ok(r) }, Err(s) => Err(Some(s)), } } pub fn drop_while<F>(&mut self, f: F) where F: Fn(&T) -> bool { while let Ok(_) = self.try_next(|elem| { if f(&elem) { Ok(()) } else { Err(()) } }){} } }
use super::super::super::super::{awesome, btn, modal}; use super::state::Modal; use super::Msg; use crate::{ block::{self, BlockId}, resource::Data, Resource, }; use kagura::prelude::*; mod common { pub use super::super::common::*; } pub fn render<'a>(block_field: &block::Field, resource: &Resource, world: &block::World) -> Html { modal::container( Attributes::new(), Events::new(), vec![modal::frame( 8, Attributes::new(), Events::new(), vec![ common::header("タグ設定"), modal::body( Attributes::new() .class("linear-v") .class("linear-v-stretch") .class("scroll-v"), Events::new(), vec![ block_field .listed::<block::Tag>(world.tags().collect()) .map(|(tag_id, tag)| { Html::div( Attributes::new() .class("container-a") .class("pure-form") .class("keyvalue") .class("keyvalue-rev"), Events::new(), vec![ Html::input( Attributes::new().value(tag.name()), Events::new().on_input({ let tag_id = tag_id.clone(); move |name| Msg::SetTagName(tag_id, name) }), vec![], ), btn::danger( Attributes::new(), Events::new().on_click({ let tag_id = tag_id.clone(); move |_| Msg::RemoveTag(tag_id) }), vec![awesome::i("fa-times")], ), ], ) }) .collect(), vec![btn::secondary( Attributes::new(), Events::new().on_click(move |_| Msg::AddTag), vec![awesome::i("fa-plus")], )], ] .into_iter() .flatten() .collect(), ), modal::footer(Attributes::new(), Events::new(), vec![]), ], )], ) }
/* chapter 4 syntax and semantics */ fn main() { let a: i32 = 3; // implicit fn foo(n: &i32) -> &i32 { n } // explicit fn bar<'a>(n: &'a i32) -> &i32 { n } let c = foo(&a); println!("{}", c); let d = bar(&b); println!("{}", d); } // output should be: /* 3 7 */
//! Utilities for serializing graphs to render using Sigma.js use num::bigint::BigUint; use num::{One, Zero}; use rand::random; use serde_json::map::Map; use serde_json::Value; use crate::hypergraph::DirectedGraph; pub fn to_sigma_json(graph: DirectedGraph) -> Value { // The top-level JSON object. // Should contain "nodes" and "edges" let mut root: Map<String, Value> = Map::new(); // The nodes list. let mut nodes: Vec<Value> = Vec::new(); // Find highest node number let mut node_top: BigUint = BigUint::new(vec![0]); for (lhs, rhs) in graph.edges().iter() { if node_top < *lhs { node_top = lhs.clone(); } if node_top < *rhs { node_top = rhs.clone(); } } // Add nodes to array let mut i = BigUint::new(vec![0]); while i <= node_top { nodes.push(new_node(&i, None)); i = i + BigUint::new(vec![1]); } root.insert("nodes".to_string(), Value::Array(nodes)); // Add edges to array let mut edges: Vec<Value> = Vec::new(); for (src, dst) in graph.edges().iter() { edges.push(new_edge(src, dst)); } root.insert("edges".to_string(), Value::Array(edges)); Value::Object(root) } /// Make a new node entry given an ID and a label fn new_node(id: &BigUint, label: Option<&str>) -> Value { let mut node: Map<String, Value> = Map::new(); node.insert( "id".to_string(), Value::String(format!("n{}", id.to_str_radix(16))), ); if let Some(label) = label { node.insert("label".to_string(), Value::String(label.to_string())); } node.insert("size".to_string(), Value::Number(1.into())); Value::Object(node) } /// Make a new edge entry given a source and a destination fn new_edge(src: &BigUint, dst: &BigUint) -> Value { let mut edge: Map<String, Value> = Map::new(); edge.insert( "id".to_string(), Value::String(format!( "e{}..{}-{}", src.to_str_radix(16), dst.to_str_radix(16), hex::encode(random::<u64>().to_string()) )), ); edge.insert( "source".to_string(), Value::String(format!("n{}", src.to_str_radix(16))), ); edge.insert( "target".to_string(), Value::String(format!("n{}", dst.to_str_radix(16))), ); Value::Object(edge) } #[cfg(test)] mod test { use super::*; use crate::hypergraph::{DirectedGraph, DirectedHyperGraph}; #[test] fn smoke_json() { let hg = DirectedHyperGraph::ternary_self_loop(); let unrolled = hg.unroll_to_graph(); let json = serde_json::to_string_pretty(&to_sigma_json(unrolled)).expect("wtf"); println!("{}", json); } }
#![allow(unused)] mod dna; mod execute; use std::io::prelude::*; use structopt::StructOpt; use std::path::PathBuf; use std::fs::File; use dna::DNA; use crossbeam_channel::unbounded; // Struct for command line parsing #[derive(StructOpt, Debug)] #[structopt()] struct MyOpt { #[structopt(name = "DNA", default_value = "numbers.json", parse(from_os_str))] dna: PathBuf, } fn main() { // Parse command line arguments according to the struct let opt = MyOpt::from_args(); // println!("Hello, world!"); //TODO: Read from the zip-file directly let mut file = File::open(opt.dna).unwrap(); let mut contents = String::new(); file.read_to_string(&mut contents).expect("Unable to read the file"); let mut dna: DNA = DNA::from(contents.as_str()); // Create a channel of unbounded capacity. let (s, r) = unbounded(); let thr = std::thread::spawn(move || { loop { let orna = r.recv().unwrap(); match orna { None => return, Some(dna) => println!("Got some RNA: {:?}", dna), } } }); execute::execute(dna, |chunk| s.send(Some(chunk)).unwrap()); s.send(None).unwrap(); thr.join(); }
pub mod first; pub mod second; use first::List; fn main() { let mut l = List::new(); l.push(0); l.push(1); l.push(2); println!("{:?}", l.pop()); println!("{:?}", l.pop()); println!("{:?}", l.pop()); }
#![feature(int_bits_const)] #[warn(unused_variables)] #[warn(non_camel_case_types)] mod loader; mod header; mod programheader; mod sectionheader; fn main() { } #[cfg(test)] mod tests { use super::*; #[test] fn testParserReadData() { let mut parser = loader::Loader::new("/home/kai/Projects/elfLoader/src/binaries/ls"); assert_eq!(0x7F, parser.readUByte().unwrap()); assert_eq!(0x454C, parser.readUShort().unwrap()); assert_eq!(0x46020101, parser.readUInt().unwrap()); assert_eq!(0x0, parser.readULong().unwrap()); } #[test] fn testReadE_IDENT() { let mut parser = loader::Loader::new("/home/kai/Projects/elfLoader/src/binaries/ls"); parser.loadHeader(); assert_eq!(0x7F454C46, parser.header.e_ident.Magic); assert_eq!(0x02, parser.header.e_ident.Class); assert_eq!(0x01, parser.header.e_ident.Data); assert_eq!(0x01, parser.header.e_ident.Version); assert_eq!(0x0, parser.header.e_ident.OS_ABI); } #[test] fn testHeaderLoading() { let mut parser = loader::Loader::new("/home/kai/Projects/elfLoader/src/binaries/ls"); parser.load(); assert_eq!(0x7F454C46, parser.header.e_ident.Magic); assert_eq!(0x02, parser.header.e_ident.Class); assert_eq!(0x01, parser.header.e_ident.Data); assert_eq!(0x01, parser.header.e_ident.Version); assert_eq!(0x0, parser.header.e_ident.OS_ABI); assert_eq!(0x3,parser.header.e_type); assert_eq!(0x3e,parser.header.e_machine); assert_eq!(0x1,parser.header.e_version); assert_eq!(0x5b20,parser.header.e_entry); assert_eq!(0x40,parser.header.e_phoff); assert_eq!(140208,parser.header.e_shoff); assert_eq!(0,parser.header.e_flags); assert_eq!(64,parser.header.e_ehsize); assert_eq!(56,parser.header.e_phentsize); assert_eq!(11,parser.header.e_phnum); assert_eq!(64,parser.header.e_shentsize); assert_eq!(27,parser.header.e_shnum); assert_eq!(26,parser.header.e_shstrndx); } #[test] fn testProgramHeaderLoading() { let mut parser = loader::Loader::new("/home/kai/Projects/elfLoader/src/binaries/ls"); parser.load(); let PF_X = (1<<0); let PF_W = (1<<1); let PF_R = (1<<2); assert_eq!(0x6, parser.programHeaders[0].getTYPE()); assert_eq!(0x40, parser.programHeaders[0].getOFFSET()); assert_eq!(0x40, parser.programHeaders[0].getVADDR()); assert_eq!(0x40, parser.programHeaders[0].getPADDR()); assert_eq!(0x268, parser.programHeaders[0].getFILESZ()); assert_eq!(0x268, parser.programHeaders[0].getMEMSZ()); assert_eq!(PF_R, parser.programHeaders[0].getFLAGS()); assert_eq!(0x8, parser.programHeaders[0].getALIGN()); } #[test] fn testSectionHeaderLoading() { let mut parser = loader::Loader::new("/home/kai/Projects/elfLoader/src/binaries/ls"); parser.load(); let PF_X = (1<<0); let PF_W = (1<<1); let PF_R = (1<<2); println!("{:#x?}", parser.sectionHeaders[26]); assert_eq!(0x222b4, parser.sectionHeaders[26].sh_offset); } }
extern crate sdl2; use super::font_renderer; use sdl2::render; use sdl2::ttf; use sdl2::video; pub struct Renderer { ttf_context: ttf::Sdl2TtfContext, texture_creator: render::TextureCreator<video::WindowContext>, } impl Renderer { pub fn from( ttf_context: ttf::Sdl2TtfContext, texture_creator: render::TextureCreator<video::WindowContext>, ) -> Renderer { Renderer { ttf_context, texture_creator, } } pub fn create_font_renderer<'renderer>( &'renderer self, font_path: &str, font_size: u16, ) -> Result<font_renderer::FontRenderer<'renderer>, String> { let mut font = self.ttf_context.load_font(font_path, font_size)?; font.set_hinting(sdl2::ttf::Hinting::Light); Ok(font_renderer::FontRenderer::from( font, &self.texture_creator, )) } }
use super::{Open, Sink}; use std::io; use libpulse_sys::*; use std::ptr::{null, null_mut}; use std::mem::{transmute}; use std::ffi::CString; pub struct PulseAudioSink(*mut pa_simple); impl Open for PulseAudioSink { fn open() -> PulseAudioSink { info!("Using PulseAudioSink"); let ss = pa_sample_spec { format: PA_SAMPLE_S16LE, channels: 2, // stereo rate: 44100 }; let name = CString::new("librespot").unwrap(); let description = CString::new("A spoty client library").unwrap(); let s = unsafe { pa_simple_new(null(), // Use the default server. name.as_ptr(), // Our application's name. PA_STREAM_PLAYBACK, null(), // Use the default device. description.as_ptr(), // Description of our stream. &ss, // Our sample format. null(), // Use default channel map null(), // Use default buffering attributes. null_mut(), // Ignore error code. ) }; assert!(s != null_mut()); PulseAudioSink(s) } } impl Sink for PulseAudioSink { fn start(&self) -> io::Result<()> { Ok(()) } fn stop(&self) -> io::Result<()> { Ok(()) } fn write(&self, data: &[i16]) -> io::Result<()> { unsafe { let ptr = transmute(data.as_ptr()); let bytes = data.len() as usize * 2; pa_simple_write(self.0, ptr, bytes, null_mut()); }; Ok(()) } }
use core::position::{Size, HasSize, Pos, HasPosition}; use core::cellbuffer::{Cell, CellAccessor}; use ui::core::{ Alignable, HorizontalAlign, VerticalAlign, Widget, Frame, Painter }; /// Logical clone of [Frame](core/frame/struct.Frame.html) that exposes backend /// functionality for users without breaking the API rules /// /// # Examples /// /// ```ignore /// use rustty::ui::Canvas; /// /// let mut canvas = Canvas::new(60, 10); /// /// let (rows, cols) = canvas.size(); /// // Set the entire canvas to '-' character /// for i in 0..cols*rows { /// let y = i / cols; /// let x = i % cols; /// let mut cell = canvas.get_mut(x, y).unwrap(); /// cell.set_ch('-'); /// } /// ``` /// pub struct Canvas { frame: Frame } impl Canvas { /// Constructs a new `Canvas` object *cols* wide by *rows* high /// /// # Examples /// /// ``` /// use rustty::ui::Canvas; /// /// let mut canvas = Canvas::new(60, 10); /// ``` /// pub fn new(cols: usize, rows: usize) -> Canvas { Canvas { frame: Frame::new(cols, rows) } } /// Returns the size of the canvas /// /// # Examples /// /// ``` /// use rustty::ui::Canvas; /// /// let mut canvas = Canvas::new(60, 10); /// /// assert_eq!(canvas.size(), (60, 10)); /// ``` /// pub fn size(&self) -> Size { self.frame.size() } /// Returns a reference to the vector of cells /// that lie within the canvas pub fn cellvec(&self) -> &Vec<Cell> { self.frame.cellvec() } /// Returns a mutable reference to the vector /// of cells that lie within the canvas pub fn cellvec_mut(&mut self) -> &mut Vec<Cell> { self.frame.cellvec_mut() } /// Clears the canvas with a *blank* [Cell](../struct.Cell.html) pub fn clear(&mut self, blank: Cell) { self.frame.clear(blank); } /// Converts a position on the screen to the relative coordinate /// within the Canvas cell buffer pub fn pos_to_index(&self, x: usize, y: usize) -> Option<usize> { self.frame.pos_to_index(x, y) } /// Returns a reference to the cell at the specified position (*x*,*y*), /// in the form of an *Option*. If no cell exists at that position, /// then *None* pub fn get(&self, x: usize, y: usize) -> Option<&Cell> { self.frame.get(x, y) } /// Returns a mutable reference to the cell at the specified position /// (*x*, *y*), in the form of an *Option*. If no cell exists at that /// position, then *None* pub fn get_mut(&mut self, x: usize, y: usize) -> Option<&mut Cell> { self.frame.get_mut(x, y) } /// The location of the canvas pub fn origin(&self) -> Pos { self.frame.origin() } /// Mantually sets the location of the canvas pub fn set_origin(&mut self, new_origin: Pos) { self.frame.set_origin(new_origin); } } impl Widget for Canvas { fn draw(&mut self, parent: &mut CellAccessor) { self.frame.draw_into(parent); } fn pack(&mut self, parent: &HasSize, halign: HorizontalAlign, valign: VerticalAlign, margin: (usize, usize)) { self.frame.align(parent, halign, valign, margin); } fn resize(&mut self, new_size: Size) { self.frame.resize(new_size); } fn draw_box(&mut self) { self.frame.draw_box(); } fn frame(&self) -> &Frame { &self.frame } fn frame_mut(&mut self) -> &mut Frame { &mut self.frame } }
/// CPU registers, including registers specific to the ALU /// /// The registers on the NES CPU are just like on the 6502. There is the accumulator, 2 indexes, a /// program counter, the stack pointer, and the status register. Unlike many CPU families, members /// do not have generic groups of registers like say, R0 through R7. pub struct Registers { /// Accumulator register (A) pub acc: u8, /// Index register (X) /// /// It can be set to a value retrieved from memory and can be used to get or set the value of /// the stack pointer. pub x_idx: u8, /// Index register (Y) /// /// It can be set to a value retrieved from memory but cannot be used to get or set the value /// of the stack pointer. pub y_idx: u8, /// Program counter (PC) pub pc: u16, /// Stack pointer (SP) pub stack: u8, /// Status register (P) pub status: StatusFlags, } impl Default for Registers { fn default() -> Self { Registers { acc: 0, x_idx: 0, y_idx: 0, pc: 0xc00, stack: 0x24, status: StatusFlags::default(), } } } bitflags! { /// Status register /// /// 7 6 5 4 3 2 1 0 /// N V _ B D I Z C /// | | | | | | +--- Carry Flag /// | | | | | +----- Zero Flag /// | | | | +------- Interrupt Disable /// | | | +--------- Decimal Mode (unused) /// | | +----------- Break Command /// | +--------------- Overflow Flag /// +----------------- Negative Flag pub struct StatusFlags: u8 { const C_FLAG = 0b00000001; const Z_FLAG = 0b00000010; const I_FLAG = 0b00000100; const D_FLAG = 0b00001000; //unused, always on const B_FLAG = 0b00010000; const X_FLAG = 0b00100000; //unused, always on const V_FLAG = 0b01000000; const N_FLAG = 0b10000000; const NZ_FLAG = Self::N_FLAG.bits | Self::Z_FLAG.bits; const NZC_FLAG = Self::NZ_FLAG.bits | Self::C_FLAG.bits; const NVZC_FLAG = Self::NZC_FLAG.bits | Self::V_FLAG.bits; const NV_FLAG = Self::N_FLAG.bits | Self::V_FLAG.bits; const DX_FLAG = Self::D_FLAG.bits | Self::X_FLAG.bits; } } impl Default for StatusFlags { fn default() -> Self { Self::DX_FLAG } }
#[macro_use] extern crate lazy_static; extern crate regex; use std::fs; use std::io::{self, BufRead}; use std::path::Path; use std::cmp::max; use std::cmp::Ordering; use regex::Regex; use std::collections::{HashMap, HashSet}; struct Constraint { name: String, intervals: Vec<(u32, u32)> } fn main() { let filename = "/home/remy/AOC/2020/16/input"; let data = fs::read_to_string(filename).unwrap(); let data: Vec<&str> = data.split("\n\n").collect(); let constraints_str = data[0].to_string(); let my_ticket_str = data[1].to_string(); let nearby_tickets_str = data[2].to_string(); let mut constraints: Vec<Constraint> = Vec::new(); /* Ugly Constraint Parsing */ for constraint in constraints_str.lines() { let splitted: Vec<&str> = constraint.split(':').collect(); let label = splitted[0].to_string(); let mut intervals: Vec<(u32, u32)> = Vec::new(); for constraint in splitted[1].split("or") { let constraint: Vec<&str> = constraint.split("-").collect(); intervals.push( (constraint[0].trim().parse::<u32>().unwrap(), constraint[1].trim().parse::<u32>().unwrap()) ); } constraints.push(Constraint{name: label, intervals}); } /* Ugly Ticket Parsing */ let mut my_ticket: Vec<u32> = Vec::new(); let mut lines = my_ticket_str.lines(); lines.next(); let numbers = lines.next().unwrap().to_string(); for number in numbers.split(",") { my_ticket.push(number.trim().parse::<u32>().unwrap()); } /* Ugly Nearby Ticket Parsing */ let mut nearby_tickets: Vec<Vec<u32>> = Vec::new(); let mut lines = nearby_tickets_str.lines(); lines.next(); for line in lines { let mut nearby_ticket: Vec<u32> = Vec::new(); let numbers = line.to_string(); for number in numbers.split(",") { nearby_ticket.push(number.trim().parse::<u32>().unwrap()); } nearby_tickets.push(nearby_ticket); } let mut valid_count = 0; let mut nearby_valid_tickets: Vec<Vec<u32>> = Vec::new(); for nearby_ticket in &nearby_tickets { let mut ticket_is_valid = true; for number in nearby_ticket { let mut is_valid: bool = false; for constraint in &constraints { for (min, max) in &constraint.intervals { if min <= number && number <= max { is_valid = true; break; } } if is_valid { break; } } if !is_valid { valid_count += number; ticket_is_valid = false; } } if ticket_is_valid { nearby_valid_tickets.push(nearby_ticket.clone()); } } println!("Result 1: {}", valid_count); let mut attribution: Vec<HashSet<String>> = Vec::new(); let mut hs: HashSet<String> = HashSet::new(); for constraint in &constraints { hs.insert(constraint.name.clone()); } for (pos, number) in my_ticket.iter().enumerate() { attribution.push(hs.clone()); } for nearby_ticket in &nearby_valid_tickets { for (pos, number) in nearby_ticket.iter().enumerate() { for constraint in &constraints { let mut is_valid: bool = false; for (min, max) in &constraint.intervals { if (min <= number && number <= max) { is_valid = true; break; } } if !is_valid { attribution[pos].remove(&constraint.name); } } } } let mut attributeds: HashSet<String> = HashSet::new(); let mut attributions: HashMap<String, usize> = HashMap::new(); loop { let mut finished: bool = true; for (i, possibility) in &mut attribution.iter().enumerate() { match possibility.len() { 0 => (), 1 => { finished = false; let values: Vec<&String> = possibility.iter().collect(); attributeds.insert(values[0].to_string()); attributions.insert(values[0].to_string(), i); }, _ => { finished = false; } } } if finished { break; } for attributed in &attributeds { for possibility in &mut attribution { possibility.remove(attributed); } } } let mut result: i64 = 1; for (key, value) in attributions { if key.starts_with("departure") { result *= my_ticket[value] as i64; } else { } } println!("Result 2: {}", result); }
use crate::openrtb3::bool; use serde::{Deserialize, Serialize}; use super::{ data_asset_format::DataAssetFormat, image_asset_format::ImageAssetFormat, title_asset_format::TitleAssetFormat, video_placement::VideoPlacement, }; #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct AssetFormat { id: i64, #[serde(default = "bool::Bool::default_false")] req: bool::Bool, title: Option<TitleAssetFormat>, img: Option<ImageAssetFormat>, video: Option<VideoPlacement>, data: Option<DataAssetFormat>, ext: Option<AssetFormatExt>, } #[derive(Serialize, Deserialize, Debug, PartialEq)] pub struct AssetFormatExt {}
// Line comments are anything after ‘//’ and extend to the end of the line. let x = 5; // This is also a line comment. // If you have a long explanation for something, you can put line comments next // to each other. Put a space between the // and your comment so that it’s // more readable. /// Adds one to the number given. /// /// # Examples /// /// ``` /// let five = 5; /// /// assert_eq!(6, add_one(5)); /// # fn add_one(x: i32) -> i32 { /// # x + 1 /// # } /// ``` fn add_one(x: i32) -> i32 { x + 1 } //! # The Rust Standard Library //! //! The Rust Standard Library provides the essential runtime //! functionality for building portable Rust software.
use chrono::Utc; use taskwarrior_rust::{taskstorage, Operation, Server, DB}; use uuid::Uuid; fn newdb() -> DB { DB::new(Box::new(taskstorage::InMemoryStorage::new())) } #[test] fn test_sync() { let mut server = Server::new(); let mut db1 = newdb(); db1.sync("me", &mut server).unwrap(); let mut db2 = newdb(); db2.sync("me", &mut server).unwrap(); // make some changes in parallel to db1 and db2.. let uuid1 = Uuid::new_v4(); db1.apply(Operation::Create { uuid: uuid1 }).unwrap(); db1.apply(Operation::Update { uuid: uuid1, property: "title".into(), value: Some("my first task".into()), timestamp: Utc::now(), }) .unwrap(); let uuid2 = Uuid::new_v4(); db2.apply(Operation::Create { uuid: uuid2 }).unwrap(); db2.apply(Operation::Update { uuid: uuid2, property: "title".into(), value: Some("my second task".into()), timestamp: Utc::now(), }) .unwrap(); // and synchronize those around db1.sync("me", &mut server).unwrap(); db2.sync("me", &mut server).unwrap(); db1.sync("me", &mut server).unwrap(); assert_eq!(db1.sorted_tasks(), db2.sorted_tasks()); // now make updates to the same task on both sides db1.apply(Operation::Update { uuid: uuid2, property: "priority".into(), value: Some("H".into()), timestamp: Utc::now(), }) .unwrap(); db2.apply(Operation::Update { uuid: uuid2, property: "project".into(), value: Some("personal".into()), timestamp: Utc::now(), }) .unwrap(); // and synchronize those around db1.sync("me", &mut server).unwrap(); db2.sync("me", &mut server).unwrap(); db1.sync("me", &mut server).unwrap(); assert_eq!(db1.sorted_tasks(), db2.sorted_tasks()); } #[test] fn test_sync_create_delete() { let mut server = Server::new(); let mut db1 = newdb(); db1.sync("me", &mut server).unwrap(); let mut db2 = newdb(); db2.sync("me", &mut server).unwrap(); // create and update a task.. let uuid = Uuid::new_v4(); db1.apply(Operation::Create { uuid }).unwrap(); db1.apply(Operation::Update { uuid: uuid, property: "title".into(), value: Some("my first task".into()), timestamp: Utc::now(), }) .unwrap(); // and synchronize those around db1.sync("me", &mut server).unwrap(); db2.sync("me", &mut server).unwrap(); db1.sync("me", &mut server).unwrap(); assert_eq!(db1.sorted_tasks(), db2.sorted_tasks()); // delete and re-create the task on db1 db1.apply(Operation::Delete { uuid }).unwrap(); db1.apply(Operation::Create { uuid }).unwrap(); db1.apply(Operation::Update { uuid: uuid, property: "title".into(), value: Some("my second task".into()), timestamp: Utc::now(), }) .unwrap(); // and on db2, update a property of the task db2.apply(Operation::Update { uuid: uuid, property: "project".into(), value: Some("personal".into()), timestamp: Utc::now(), }) .unwrap(); db1.sync("me", &mut server).unwrap(); db2.sync("me", &mut server).unwrap(); db1.sync("me", &mut server).unwrap(); assert_eq!(db1.sorted_tasks(), db2.sorted_tasks()); }
use crate::utility; use crate::utility::{random_f64, random_f64_range}; use std::fmt; use std::ops::{Add, AddAssign, Div, DivAssign, Index, IndexMut, Mul, MulAssign, Neg, Sub}; /// Simple vec3 class /// Laid out as x, y, z #[derive(Debug, Default, PartialEq, Clone, Copy)] pub struct Vec3(f64, f64, f64); #[allow(dead_code)] impl Vec3 { pub fn new(x: f64, y: f64, z: f64) -> Self { Vec3(x, y, z) } pub fn x(&self) -> f64 { self.0 } pub fn y(&self) -> f64 { self.1 } pub fn z(&self) -> f64 { self.2 } pub fn length(&self) -> f64 { self.length_squared().sqrt() } pub fn length_squared(&self) -> f64 { self.0 * self.0 + self.1 * self.1 + self.2 * self.2 } pub fn dot(u: &Vec3, v: &Vec3) -> f64 { u.0 * v.0 + u.1 * v.1 + u.2 * v.2 } pub fn cross(u: &Vec3, v: &Vec3) -> Self { Vec3( u.1 * v.2 - u.2 * v.1, u.2 * v.0 - u.0 * v.2, u.0 * v.1 - u.1 * v.0, ) } pub fn unit_vector(v: Vec3) -> Self { v / v.length() } /// Get a random unit vector. pub fn random() -> Self { Vec3(random_f64(), random_f64(), random_f64()) } /// Get a random vector with a given min/max range. pub fn random_range(min: f64, max: f64) -> Self { Vec3( random_f64_range(min, max), random_f64_range(min, max), random_f64_range(min, max), ) } /// Get a random vector within a unit sphere. pub fn random_in_unit_sphere() -> Self { loop { let p = Vec3::random_range(-1.0, 1.0); if p.length_squared() >= 1.0 { continue; } return p; } } /// Get a Lambertian distrubuted unit vector, see Section 8.5. pub fn random_unit_vector() -> Self { let a = random_f64_range(0.0, 2.0 * utility::PI); let z = random_f64_range(-1.0, 1.0); let r = f64::sqrt(1.0 - z * z); Vec3(r * a.cos(), r * a.sin(), z) } /// Get an alternative diffuse vector, see Section 8.6. pub fn random_in_hemisphere(normal: &Vec3) -> Self { let in_unit_sphere = Vec3::random_in_unit_sphere(); if Vec3::dot(&in_unit_sphere, normal) > 0.0 // In the same hemisphere as the normal { in_unit_sphere } else { -in_unit_sphere } } pub fn reflect(v: &Vec3, n: &Vec3) -> Self { *v - 2.0 * Vec3::dot(v, n) * (*n) } pub fn refract(uv: &Vec3, n: &Vec3, etai_over_etat: f64) -> Vec3 { let cos_theta = Vec3::dot(&-*uv, n); let r_out_parallel = etai_over_etat * (*uv + cos_theta * (*n)); let r_out_perp = -f64::sqrt(1.0 - r_out_parallel.length_squared()) * (*n); r_out_parallel + r_out_perp } pub fn random_in_unit_disk() -> Self { loop { let p = Vec3( random_f64_range(-1.0, 1.0), random_f64_range(-1.0, 1.0), 0.0, ); if p.length_squared() >= 1.0 { continue; } return p; } } } impl Neg for Vec3 { type Output = Vec3; fn neg(self) -> Self::Output { Vec3(-self.0, -self.1, -self.2) } } impl Index<usize> for Vec3 { type Output = f64; fn index(&self, index: usize) -> &Self::Output { match index { 0 => &self.0, 1 => &self.1, 2 => &self.2, _ => panic!("Vec3 index out of bounds"), } } } impl IndexMut<usize> for Vec3 { fn index_mut(&mut self, index: usize) -> &mut Self::Output { match index { 0 => &mut self.0, 1 => &mut self.1, 2 => &mut self.2, _ => panic!("Vec3 index out of bounds"), } } } impl Add for Vec3 { type Output = Self; fn add(self, other: Self) -> Self { Vec3(self.0 + other.0, self.1 + other.1, self.2 + other.2) } } impl AddAssign for Vec3 { fn add_assign(&mut self, other: Self) { self.0 += other.0; self.1 += other.1; self.2 += other.2; } } impl Sub for Vec3 { type Output = Self; fn sub(self, other: Self) -> Self { Vec3(self.0 - other.0, self.1 - other.1, self.2 - other.2) } } impl Mul for Vec3 { type Output = Self; fn mul(self, rhs: Self) -> Self { Vec3(self.0 * rhs.0, self.1 * rhs.1, self.2 * rhs.2) } } impl Mul<Vec3> for f64 { type Output = Vec3; fn mul(self, rhs: Vec3) -> Self::Output { Vec3(self * rhs.0, self * rhs.1, self * rhs.2) } } impl Mul<f64> for Vec3 { type Output = Self; fn mul(self, rhs: f64) -> Self::Output { rhs * self } } impl MulAssign<f64> for Vec3 { fn mul_assign(&mut self, rhs: f64) { self.0 *= rhs; self.1 *= rhs; self.2 *= rhs; } } impl Div<f64> for Vec3 { type Output = Self; fn div(self, rhs: f64) -> Self::Output { (1.0 / rhs) * self } } impl DivAssign<f64> for Vec3 { fn div_assign(&mut self, rhs: f64) { *self *= 1.0 / rhs; } } impl fmt::Display for Vec3 { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} {} {}", self.0, self.1, self.2) } } // Point3 and Vec3 are just aliases for each other // TODO - Probably should use newtype here? but it results in all annoying // non-automatic from conversion in operators, even with deriving pub type Point3 = Vec3; // Use Newtype pattern for Color macro_attr! { /// RGB Color (r, g, b) #[derive(Clone, Copy, Debug, NewtypeAdd!, NewtypeAddAssign!, NewtypeSub!, NewtypeMul!, NewtypeMulAssign!(f64), NewtypeDiv!(f64), NewtypeDivAssign!(f64), NewtypeDeref!, NewtypeDerefMut!)] pub struct Color(Vec3); } // TODO - Any way to derive this? impl Mul<Color> for f64 { type Output = Color; fn mul(self, rhs: Color) -> Self::Output { Color(Vec3(self * rhs.x(), self * rhs.y(), self * rhs.z())) } } // impl fmt::Display for Color { // fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // // Write the translated [0,255] value of each color component. // write!( // f, // "{} {} {}", // (255.999 * self.x()) as i32, // (255.999 * self.y()) as i32, // (255.999 * self.z()) as i32, // ) // } // } #[allow(dead_code)] impl Color { // Create a new color with rgb values. pub fn new(r: f64, g: f64, b: f64) -> Self { Color(Vec3(r, g, b)) } /// Explicit conversion from Vec3 to color. pub fn from_vec(vec: Vec3) -> Self { Color(vec) } pub fn random() -> Self { Color(Vec3(random_f64(), random_f64(), random_f64())) } pub fn get_color_string(&self, samples_per_pixel: i32) -> String { let mut r: f64 = self.x(); let mut g: f64 = self.y(); let mut b: f64 = self.z(); // Divide the color total by the number of samples and gamma-correct for gamma=2.0. let scale = 1.0 / samples_per_pixel as f64; r = f64::sqrt(scale * r); g = f64::sqrt(scale * g); b = f64::sqrt(scale * b); // Write the translated [0,255] value of each color component. format!( "{} {} {}\n", (256.0 * utility::clamp(r, 0.0, 0.999)) as i32, (256.0 * utility::clamp(g, 0.0, 0.999)) as i32, (256.0 * utility::clamp(b, 0.0, 0.999)) as i32, ) } } // TODO - more tests #[cfg(test)] mod tests { use crate::vec3::Vec3; #[test] fn vec3_binary_ops() { assert_eq!( Vec3(4.0, 3.0, 2.0) - Vec3(2.0, 1.0, 0.5), Vec3(2.0, 2.0, 1.5) ); } }
//! # Low level reading use super::file::*; use super::parser; use assembly_core::reader::{FileError, FileResult}; use assembly_core::{nom::Finish, reader::ParseAt}; use std::io::prelude::*; use std::{io::SeekFrom, num::NonZeroU32}; /// A low level reader class pub struct LevelReader<T> { inner: T, } impl<T> LevelReader<T> { pub fn new(inner: T) -> Self { Self { inner } } } fn get_offset(header: &FileMetaChunkData, id: u32) -> Option<NonZeroU32> { match id { 2000 => NonZeroU32::new(header.chunk_2000_offset), 2001 => NonZeroU32::new(header.chunk_2001_offset), 2002 => NonZeroU32::new(header.chunk_2002_offset), _ => None, } } impl<T> LevelReader<T> where T: Read + Seek, { /// Seek to the chunk data pub fn seek_to(&mut self, header: &ChunkHeader) -> FileResult<()> { self.inner.seek(SeekFrom::Start(header.offset.into()))?; Ok(()) } pub fn load_buf(&mut self, base: u32, header: &ChunkHeader) -> FileResult<Vec<u8>> { self.seek_to(header)?; let len = header.size - (header.offset - base); let mut buf = vec![0; len as usize]; self.inner.read_exact(&mut buf[..])?; Ok(buf) } /// Seek meta pub fn get_chunk( &mut self, header: &FileMetaChunkData, id: u32, ) -> Option<FileResult<ChunkHeader>> { get_offset(header, id).map(|offset| { self.inner.seek(SeekFrom::Start(u32::from(offset).into()))?; self.get_chunk_header() }) } /// Load a chunk header pub fn get_chunk_header(&mut self) -> FileResult<ChunkHeader> { let mut header_bytes = [0; 20]; self.inner.read_exact(&mut header_bytes)?; let (_rest, header) = parser::parse_chunk_header(&header_bytes) .finish() .at(0xbeef, &header_bytes)?; Ok(header) } /// Get the chunk meta data pub fn get_meta_chunk_data(&mut self) -> FileResult<FileMetaChunkData> { let mut meta_chunk_data_bytes = [0u8; 20]; self.inner.read_exact(&mut meta_chunk_data_bytes)?; let (_rest, meta_chunk_data) = parser::parse_file_meta_chunk_data(&meta_chunk_data_bytes) .finish() .at(0xbeef, &meta_chunk_data_bytes)?; Ok(meta_chunk_data) } /// Get the meta chunk pub fn get_meta_chunk(&mut self) -> FileResult<FileMetaChunk> { let header = self.get_chunk_header()?; self.inner.seek(SeekFrom::Start(header.offset.into()))?; let data = self.get_meta_chunk_data()?; Ok(FileMetaChunk { header, data }) } pub fn read_level_file(&mut self) -> FileResult<Level> { let header_1000 = self.get_chunk_header()?; if !header_1000.id == 1000 { return Err(FileError::Custom("Expected first chunk to be of type 1000")); } self.seek_to(&header_1000)?; let meta = self.get_meta_chunk_data()?; let env = self .get_chunk(&meta, 2000) .map(|res| { let header_2000 = res?; if header_2000.id != 2000 { return Err(FileError::Custom("Expected 2000 chunk to be of type 2000")); } let buf = self.load_buf(meta.chunk_2000_offset, &header_2000)?; let env = parser::parse_env_chunk_data(&buf) .finish() .at(meta.chunk_2000_offset.into(), &buf)? .1; // first section let sec1_base = (env.section1_address - header_2000.offset) as usize; let sec1 = parser::parse_section1(meta.version, &buf[sec1_base..]) .finish() .at(env.section1_address.into(), &buf[sec1_base..])? .1; // sky section let sky_base = (env.sky_address - header_2000.offset) as usize; let sky = parser::parse_sky_section(&buf[sky_base..]) .finish() .at(env.sky_address.into(), &buf[sky_base..])? .1; // TODO: third section Ok(Environment { sec1, sky }) }) .transpose()?; let objects = self .get_chunk(&meta, 2001) .map(|res| { let header_2001 = res?; if header_2001.id != 2001 { return Err(FileError::Custom("Expected 2001 chunk to be of type 2001")); } let buf = self.load_buf(meta.chunk_2001_offset, &header_2001)?; let obj = parser::parse_objects_chunk_data(meta.version, &buf) .finish() .at(meta.chunk_2001_offset.into(), &buf)? .1; let obj = obj .parse_settings() .map_err(|_| FileError::Custom("Failed to parse object settings"))?; Ok(obj.objects) }) .transpose()? .unwrap_or_default(); Ok(Level { env, objects }) } }
// Copyright 2020 The MWC Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::swap::message::Message; use crate::swap::swap::SwapJournalRecord; use crate::swap::types::{Action, SwapTransactionsConfirmations}; use crate::swap::{Context, ErrorKind, Swap}; use std::fmt; /// We need to reprty post transaction we we don't see it on the blockchain pub const POST_MWC_RETRY_PERIOD: i64 = 300; /// For BTC - let's use same period. BTC is visible into the mem pool quickly, so it is expected to be delivered after 5 minutes... pub const POST_SECONDARY_RETRY_PERIOD: i64 = 300; /// Retry period for the messages, including files pub const SEND_MESSAGE_RETRY_PERIOD: i64 = 300; /// Journal messages that are repeatable for State pub const JOURNAL_CANCELLED_BY_USER: &str = "Cancelled by user"; /// Journal messages that are repeatable for State pub const JOURNAL_CANCELLED_BY_TIMEOUT: &str = "Cancelled as expired"; /// Journal messages that are repeatable for State pub const JOURNAL_CANCELLED_BYER_LOCK_TOO_MUCH_FUNDS: &str = "Cancelled because the buyer posted funds greater than the agreed upon amount to the lock account"; /// Journal messages that are repeatable for State pub const JOURNAL_NOT_LOCKED: &str = "Funds are not locking any more, switching back to waiting"; /// Height limit to bump the fees for BTC. If BTC Tx still in memory pool for so many blocks, /// we can increase the fees pub const SECONDARY_HEIGHT_TO_INCREASE_FEE: u64 = 5; /// Multiplier to increase the fees. 20% per every 5 blocks if it still stale. pub const SECONDARY_INCREASE_FEE_K: f32 = 1.2; // In case the Tx is not mined during 5 blocks, fee will be increased by 20%. /// StateId of the swap finite state machine. #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum StateId { // ---------------- Seller Happy path ----------------- /// Seller created Offer (Initial state for Seller) SellerOfferCreated, /// Seller want to send the offer message SellerSendingOffer, /// Seller waiting for the message to be accepted SellerWaitingForAcceptanceMessage, /// Seller wait for the Buyer to start locking the funds (optional, depend on swap offer) SellerWaitingForBuyerLock, /// Seller need to post MWC lock slate SellerPostingLockMwcSlate, /// Seller waiting for Locks SellerWaitingForLockConfirmations, /// Seller waiting for InitRedeem message from the Buyer SellerWaitingForInitRedeemMessage, /// Seller responds Back to Buyer with Init redeem message SellerSendingInitRedeemMessage, /// Seller waiting when Buyer will redeem MWC SellerWaitingForBuyerToRedeemMwc, /// Seller knows the secret and now it can redeem BTC SellerRedeemSecondaryCurrency, /// Seller waiting for confirmations on BTC SellerWaitingForRedeemConfirmations, /// Seller complete the swap process SellerSwapComplete, // ------------- Seller calcellation with refund path (secondary happy path, redeem wasn't made yet) ----------------- /// Seller waiting when Refunds can be issued. SellerWaitingForRefundHeight, /// Seller posting refund Slate SellerPostingRefundSlate, /// Seller waiting for refund confirmations SellerWaitingForRefundConfirmations, /// Seller cancelled and get a refund. SellerCancelledRefunded, /// Simple cancelled State for the seller (never was locked, refunded) SellerCancelled, // -------------- Buyer happy path ---------------- /// Buyer offer is created (initial state for the Buyer) BuyerOfferCreated, /// Buyer sending accept offer message BuyerSendingAcceptOfferMessage, /// If Seller lock first, let's wait for that BuyerWaitingForSellerToLock, /// Buyer waiting until enough BTC will be posted to the account. BuyerPostingSecondaryToMultisigAccount, /// Wating to needed number of cinfirmations for both locks BuyerWaitingForLockConfirmations, /// Buyer sending InitRedeem message to Seller BuyerSendingInitRedeemMessage, /// Buyer waiting for a seller to respond with data to finalize Redeem slate BuyerWaitingForRespondRedeemMessage, /// Buyer post MWC redeem slate BuyerRedeemMwc, /// Buyer waiting for confirmation of the redeem. BuyerWaitForRedeemMwcConfirmations, /// Buyer is done with a swap sucessfully BuyerSwapComplete, // ------------- Buyer calcellation with refund path ----------------- /// Waiting until BTC lock time will be expired BuyerWaitingForRefundTime, /// Posting refund BTC transaction BuyerPostingRefundForSecondary, /// Buyer waiting until BTC transaction will be confirmed BuyerWaitingForRefundConfirmations, /// Trade is cancelled and already refunded BuyerCancelledRefunded, /// SImple cencelled stated for the Buyer, nothing was locked, no refunds needed BuyerCancelled, } impl fmt::Display for StateId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let disp = match &self { StateId::SellerOfferCreated => "Offer is created", StateId::SellerSendingOffer => "Send offer message to Buyer", StateId::SellerWaitingForAcceptanceMessage => "Waiting for Buyer to accept an offer", StateId::SellerWaitingForBuyerLock => "Waiting for Buyer to start locking", StateId::SellerPostingLockMwcSlate => "Posting Lock MWC slate", StateId::SellerWaitingForLockConfirmations => "Waiting for funds to be locked", StateId::SellerWaitingForInitRedeemMessage => "Waiting for Buyer to init redeem", StateId::SellerSendingInitRedeemMessage => "Send init redeem response to Buyer", StateId::SellerWaitingForBuyerToRedeemMwc => "Waiting for Buyer to redeem MWC", StateId::SellerRedeemSecondaryCurrency => "Redeem Secondary Currency", StateId::SellerWaitingForRedeemConfirmations => { "Waiting for confirmations of Redeem transaction" } StateId::SellerSwapComplete => "Complete", StateId::SellerWaitingForRefundHeight => "Waiting when refund Slate can be posted", StateId::SellerPostingRefundSlate => "Post MWC refund slate", StateId::SellerWaitingForRefundConfirmations => { "Waiting for Refund transaction confirmations" } StateId::SellerCancelledRefunded => "Seller swap was cancelled, refund was processed", StateId::SellerCancelled => { "Seller swap was cancelled, nothing was locked, no need to refund" } StateId::BuyerOfferCreated => "Offer is ready to Accept", StateId::BuyerSendingAcceptOfferMessage => "Send offer message to Seller", StateId::BuyerWaitingForSellerToLock => "Waiting for Seller to start locking", StateId::BuyerPostingSecondaryToMultisigAccount => { "Buyer posting Coins to lock account" } StateId::BuyerWaitingForLockConfirmations => "Waiting for funds to be locked", StateId::BuyerSendingInitRedeemMessage => "Send Init redeem message to Seller", StateId::BuyerWaitingForRespondRedeemMessage => { "Waiting for Redeem response form Seller" } StateId::BuyerRedeemMwc => "Redeem MWC", StateId::BuyerWaitForRedeemMwcConfirmations => { "Waiting for confirmations of Redeem transaction" } StateId::BuyerSwapComplete => "Complete", StateId::BuyerWaitingForRefundTime => "Waiting when refund Transaction can be posted", StateId::BuyerPostingRefundForSecondary => "Post Refund Transaction", StateId::BuyerWaitingForRefundConfirmations => { "Waiting for Refund transaction confirmations" } StateId::BuyerCancelledRefunded => "Buyer swap cancelled, refund was processed", StateId::BuyerCancelled => { "Buyer swap was cancelled, nothing was locked, no need to refund" } }; write!(f, "{}", disp) } } impl StateId { /// return true if this state is final and swap trade is done pub fn is_final_state(&self) -> bool { match self { StateId::SellerSwapComplete | StateId::BuyerSwapComplete | StateId::SellerCancelled | StateId::BuyerCancelled | StateId::SellerCancelledRefunded | StateId::BuyerCancelledRefunded => true, _ => false, } } /// Return true if trade is in initial state so it can be cancelled. Swap Marketplace feature. pub fn is_initial_state(&self) -> bool { match self { StateId::SellerOfferCreated | StateId::SellerSendingOffer | StateId::SellerWaitingForAcceptanceMessage | StateId::SellerWaitingForBuyerLock | StateId::BuyerOfferCreated | StateId::BuyerSendingAcceptOfferMessage | StateId::BuyerWaitingForSellerToLock => true, _ => false, } } /// Return true if trade is in initial state so it can be cancelled. Swap Marketplace feature. pub fn is_cancelled_no_refund(&self) -> bool { match self { StateId::SellerCancelled | StateId::BuyerCancelled => true, _ => false, } } /// Return state as a command. It is easier to parsing on the next lever (QT wallet) pub fn to_cmd_str(&self) -> String { let cmd_str = match &self { StateId::SellerOfferCreated => "SellerOfferCreated", StateId::SellerSendingOffer => "SellerSendingOffer", StateId::SellerWaitingForAcceptanceMessage => "SellerWaitingForAcceptanceMessage", StateId::SellerWaitingForBuyerLock => "SellerWaitingForBuyerLock", StateId::SellerPostingLockMwcSlate => "SellerPostingLockMwcSlate", StateId::SellerWaitingForLockConfirmations => "SellerWaitingForLockConfirmations", StateId::SellerWaitingForInitRedeemMessage => "SellerWaitingForInitRedeemMessage", StateId::SellerSendingInitRedeemMessage => "SellerSendingInitRedeemMessage", StateId::SellerWaitingForBuyerToRedeemMwc => "SellerWaitingForBuyerToRedeemMwc", StateId::SellerRedeemSecondaryCurrency => "SellerRedeemSecondaryCurrency", StateId::SellerWaitingForRedeemConfirmations => "SellerWaitingForRedeemConfirmations", StateId::SellerSwapComplete => "SellerSwapComplete", StateId::SellerWaitingForRefundHeight => "SellerWaitingForRefundHeight", StateId::SellerPostingRefundSlate => "SellerPostingRefundSlate", StateId::SellerWaitingForRefundConfirmations => "SellerWaitingForRefundConfirmations", StateId::SellerCancelledRefunded => "SellerCancelledRefunded", StateId::SellerCancelled => "SellerCancelled", StateId::BuyerOfferCreated => "BuyerOfferCreated", StateId::BuyerSendingAcceptOfferMessage => "BuyerSendingAcceptOfferMessage", StateId::BuyerWaitingForSellerToLock => "BuyerWaitingForSellerToLock", StateId::BuyerPostingSecondaryToMultisigAccount => { "BuyerPostingSecondaryToMultisigAccount" } StateId::BuyerWaitingForLockConfirmations => "BuyerWaitingForLockConfirmations", StateId::BuyerSendingInitRedeemMessage => "BuyerSendingInitRedeemMessage", StateId::BuyerWaitingForRespondRedeemMessage => "BuyerWaitingForRespondRedeemMessage", StateId::BuyerRedeemMwc => "BuyerRedeemMwc", StateId::BuyerWaitForRedeemMwcConfirmations => "BuyerWaitForRedeemMwcConfirmations", StateId::BuyerSwapComplete => "BuyerSwapComplete", StateId::BuyerWaitingForRefundTime => "BuyerWaitingForRefundTime", StateId::BuyerPostingRefundForSecondary => "BuyerPostingRefundForSecondary", StateId::BuyerWaitingForRefundConfirmations => "BuyerWaitingForRefundConfirmations", StateId::BuyerCancelledRefunded => "BuyerCancelledRefunded", StateId::BuyerCancelled => "BuyerCancelled", }; String::from(cmd_str) } /// Convert string name to State instance pub fn from_cmd_str(str: &str) -> Result<Self, ErrorKind> { match str { "SellerOfferCreated" => Ok(StateId::SellerOfferCreated), "SellerSendingOffer" => Ok(StateId::SellerSendingOffer), "SellerWaitingForAcceptanceMessage" => Ok(StateId::SellerWaitingForAcceptanceMessage), "SellerWaitingForBuyerLock" => Ok(StateId::SellerWaitingForBuyerLock), "SellerPostingLockMwcSlate" => Ok(StateId::SellerPostingLockMwcSlate), "SellerWaitingForLockConfirmations" => Ok(StateId::SellerWaitingForLockConfirmations), "SellerWaitingForInitRedeemMessage" => Ok(StateId::SellerWaitingForInitRedeemMessage), "SellerSendingInitRedeemMessage" => Ok(StateId::SellerSendingInitRedeemMessage), "SellerWaitingForBuyerToRedeemMwc" => Ok(StateId::SellerWaitingForBuyerToRedeemMwc), "SellerRedeemSecondaryCurrency" => Ok(StateId::SellerRedeemSecondaryCurrency), "SellerWaitingForRedeemConfirmations" => { Ok(StateId::SellerWaitingForRedeemConfirmations) } "SellerSwapComplete" => Ok(StateId::SellerSwapComplete), "SellerWaitingForRefundHeight" => Ok(StateId::SellerWaitingForRefundHeight), "SellerPostingRefundSlate" => Ok(StateId::SellerPostingRefundSlate), "SellerWaitingForRefundConfirmations" => { Ok(StateId::SellerWaitingForRefundConfirmations) } "SellerCancelledRefunded" => Ok(StateId::SellerCancelledRefunded), "SellerCancelled" => Ok(StateId::SellerCancelled), "BuyerOfferCreated" => Ok(StateId::BuyerOfferCreated), "BuyerSendingAcceptOfferMessage" => Ok(StateId::BuyerSendingAcceptOfferMessage), "BuyerWaitingForSellerToLock" => Ok(StateId::BuyerWaitingForSellerToLock), "BuyerPostingSecondaryToMultisigAccount" => { Ok(StateId::BuyerPostingSecondaryToMultisigAccount) } "BuyerWaitingForLockConfirmations" => Ok(StateId::BuyerWaitingForLockConfirmations), "BuyerSendingInitRedeemMessage" => Ok(StateId::BuyerSendingInitRedeemMessage), "BuyerWaitingForRespondRedeemMessage" => { Ok(StateId::BuyerWaitingForRespondRedeemMessage) } "BuyerRedeemMwc" => Ok(StateId::BuyerRedeemMwc), "BuyerWaitForRedeemMwcConfirmations" => Ok(StateId::BuyerWaitForRedeemMwcConfirmations), "BuyerSwapComplete" => Ok(StateId::BuyerSwapComplete), "BuyerWaitingForRefundTime" => Ok(StateId::BuyerWaitingForRefundTime), "BuyerPostingRefundForSecondary" => Ok(StateId::BuyerPostingRefundForSecondary), "BuyerWaitingForRefundConfirmations" => Ok(StateId::BuyerWaitingForRefundConfirmations), "BuyerCancelledRefunded" => Ok(StateId::BuyerCancelledRefunded), "BuyerCancelled" => Ok(StateId::BuyerCancelled), _ => Err(ErrorKind::Generic(format!("Unknown state value {}", str))), } } } /// State input #[derive(Debug)] pub enum Input { /// user request to cancel the trade. Cancel, /// Checking current Actions Check, /// Executing current action Execute, /// Process Income message IncomeMessage(Message), } /// Respond result #[derive(Debug)] pub struct StateProcessRespond { /// next state (new current state) pub next_state_id: StateId, /// next action that is expected from the user pub action: Option<Action>, /// time limit (seconds timestamp) for this action pub time_limit: Option<i64>, /// New swap journal records pub journal: Vec<SwapJournalRecord>, /// Last erro for this swap pub last_error: Option<String>, } impl StateProcessRespond { /// build reult foor state only, no action pub fn new(next_state_id: StateId) -> Self { StateProcessRespond { next_state_id, action: None, time_limit: None, journal: Vec::new(), last_error: None, } } /// Specify action for respond pub fn action(self, action: Action) -> Self { StateProcessRespond { next_state_id: self.next_state_id, action: Some(action), time_limit: self.time_limit, journal: self.journal, last_error: self.last_error, } } /// Specify time limit for respond pub fn time_limit(self, tl: i64) -> Self { StateProcessRespond { next_state_id: self.next_state_id, action: self.action, time_limit: Some(tl), journal: self.journal, last_error: self.last_error, } } } /// ETA or roadmap info the the state. #[derive(Serialize, Deserialize)] pub struct StateEtaInfo { /// True if this is current active state pub active: bool, /// Name of the state to show for user pub name: String, /// Starting time pub start_time: Option<i64>, /// Expiration time pub end_time: Option<i64>, } impl StateEtaInfo { /// Create a new instance for the ETA state pub fn new(name: &str) -> Self { StateEtaInfo { active: false, name: name.to_string(), start_time: None, end_time: None, } } /// Define ETA start time pub fn start_time(self, time: i64) -> Self { StateEtaInfo { active: self.active, name: self.name, start_time: Some(time), end_time: self.end_time, } } /// Define ETA end time pub fn end_time(self, time: i64) -> Self { StateEtaInfo { active: self.active, name: self.name, start_time: self.start_time, end_time: Some(time), } } /// Mark it as active pub fn active(self) -> Self { StateEtaInfo { active: true, name: self.name, start_time: self.start_time, end_time: self.end_time, } } } /// State that is describe a finite state machine pub trait State { /// This state Id fn get_state_id(&self) -> StateId; /// Get a state eta. Return None for states that are never executed fn get_eta(&self, swap: &Swap) -> Option<StateEtaInfo>; /// Check if it is cancellable fn is_cancellable(&self) -> bool; /// Process the state. Result will be the next state fn process( &mut self, input: Input, swap: &mut Swap, context: &Context, tx_conf: &SwapTransactionsConfirmations, ) -> Result<StateProcessRespond, ErrorKind>; /// Get the prev happy path State. fn get_prev_swap_state(&self) -> Option<StateId>; /// Get the next happy path State. fn get_next_swap_state(&self) -> Option<StateId>; }
use crate::diagnostics::Diagnostics; use std::fs; use std::io::{self, Read}; use std::path::PathBuf; use std::rc::Rc; use super::error::Error; use super::semantics; use super::syntax::*; pub type Result<T> = ::std::result::Result<T, Error>; pub struct File { pub content: String, pub lines: Vec<String>, } pub type FileRef = Rc<File>; impl File { pub fn from_path(path: &PathBuf) -> io::Result<Self> { let mut file = fs::File::open(path)?; let mut content = String::new(); file.read_to_string(&mut content)?; let lines: Vec<String> = content.as_str().lines().map(|s| s.to_string()).collect(); Ok(File { content, lines }) } pub fn line_from_pos(&self, pos: &token::Position) -> &str { let line = pos.line(); self.lines[line - 1].as_str() } } pub struct Compiler { source: PathBuf, diagnostics: Diagnostics, } impl Compiler { pub fn new(source: PathBuf) -> Result<Self> { Ok(Self { source, diagnostics: Diagnostics::new(), }) } fn load_file<T: Into<PathBuf>>(&mut self, path: T) -> Result<FileRef> { let path: PathBuf = path.into(); let file = File::from_path(&path).map_err(|e| Error::IoError(e))?; let file = FileRef::new(file); self.diagnostics.files.insert(path, file.clone()); Ok(file) } pub fn compile_source(&mut self) -> Result<()> { let file = self.load_file(self.source.clone())?; let mut parser = match Parser::new( file.content.as_str(), self.source.clone(), &mut self.diagnostics, ) { Ok(parser) => parser, Err(err) => match err { parser::Error::ParseError => return Err(Error::ParseError), parser::Error::OtherError(val) => { println!("Error: {}", val); return Err(Error::ParseError); } }, }; if let Ok(mut parsed_file) = parser.parse_file() { parsed_file.render(0); let mut semantic = semantics::Semantics::new(&mut self.diagnostics, "prelude/mod.rs"); semantic .check_program(&mut parsed_file) .map_err(|_e| Error::TypeError)?; Ok(()) } else { Err(Error::ParseError) } } }
extern crate openssl; use openssl::rsa::{Rsa, Padding}; use openssl::pkey::Private; use std::io::{Error, ErrorKind}; pub trait CryptoService { fn pubkey_pem(&self) -> String; // fn privkey(&self) -> Vec<u8>; fn encrypt(&self, text: &[u8]) -> Result<Vec<u8>, Error>; fn decrypt(&self, text: &[u8]) -> Result<Vec<u8>, Error>; } pub struct RSACryptoService { rsa: Rsa<Private>, pad: Padding, } impl RSACryptoService { pub fn new() -> Self { let rsa = Rsa::generate(2048).unwrap(); //let pkey = PKey::from_rsa(rsa).unwrap(); RSACryptoService {rsa: rsa, pad: Padding::NONE} } } impl CryptoService for RSACryptoService { fn pubkey_pem(&self) -> String { let pub_key = self.rsa.public_key_to_pem().unwrap(); String::from_utf8(pub_key).unwrap() } fn encrypt(&self, text_bytes: &[u8]) -> Result<Vec<u8>, Error> { if (self.rsa.size() as usize) < text_bytes.len() { return Err(Error::new(ErrorKind::InvalidInput, "Text length to big :(")); } let pad_size = self.rsa.size() as usize - text_bytes.len(); let mut data: Vec<u8> = vec![0; pad_size]; data.extend_from_slice(text_bytes); let mut result: Vec<u8> = vec![0; self.rsa.size() as usize]; try!(self.rsa.public_encrypt(data.as_slice(), result.as_mut_slice(), self.pad)); Ok(result) } fn decrypt(&self, text_bytes: &[u8]) -> Result<Vec<u8>, Error> { if (self.rsa.size() as usize) < text_bytes.len() { return Err(Error::new(ErrorKind::InvalidInput, "Text length to big :(")); } let pad_size = self.rsa.size() as usize - text_bytes.len(); let mut data: Vec<u8> = vec![0; pad_size]; data.extend_from_slice(text_bytes); let mut result: Vec<u8> = vec![0; self.rsa.size() as usize]; try!(self.rsa.private_decrypt(data.as_slice(), result.as_mut_slice(), self.pad)); Ok(result) } }