text
stringlengths
8
4.13M
use caches; use gfx; use io::timer::{Timer, Prescaler}; fn bench_busyloop(timer: &Timer) { extern { fn cbench_busywait(start: u32, end: u32); } let num = 0x100000-1; let now = timer.us_val(); unsafe { cbench_busywait(0, num); } let end = timer.us_val(); log!("{:#X} instructions took {}us", 3 * ((num as u64) + 1), end-now); } pub fn main() { gfx::clear_screen(0xFF, 0xFF, 0xFF); let timer = Timer::new(lease!(TIMER0), 0, 0, Prescaler::Div1, None); timer.start(); log!("Caches disabled..."); caches::disable_all(); bench_busyloop(&timer); log!("Caches enabled..."); caches::enable_all(); bench_busyloop(&timer); }
use super::mmap::ProtFlags; pub fn mprotect(addr: u64, len: u64, prot: u64) -> u64 { let prot = ProtFlags::from_bits(prot as i32).expect("unknown prot flags"); println!("Syscall: mprotect addr={:x} len={:x} prot={:?}", addr, len, prot); 0 }
use std::i16; use std::i32; use lazy_static::lazy_static; use sdl2::pixels::Color; use sdl2::rect::Point; use sdl2::rect::Rect; use sdl2::render::Canvas; use sdl2::render::TextureQuery; use sdl2::ttf::Font; use sdl2::video::Window; use tetris::Board; use tetris::Game; use tetris::GameOver; use tetris::HighScores; use tetris::Piece; use tetris::Pos; use tetris::Rotation; use tetris::Score; use tetris::Shape; use tetris::ShapeColor; use tetris::State; const INNER_BLOCK_SIZE: u8 = 22; const BLOCK_BORDER: u8 = 1; pub const BLOCK_SIZE: u8 = INNER_BLOCK_SIZE + BLOCK_BORDER * 2; pub const SCORE_OFFSET: i32 = 100; const BORDER_COLOR: Color = Color { r: 100, g: 100, b: 100, a: 255, }; pub struct Drawer<'a> { canvas: Canvas<Window>, font: Font<'a, 'a>, } fn shape_color_to_rgb(color: ShapeColor) -> Color { match color { ShapeColor::O => Color::RGB(255, 255, 0), ShapeColor::I => Color::RGB(0, 255, 255), ShapeColor::J => Color::RGB(0, 0, 255), ShapeColor::L => Color::RGB(255, 165, 0), ShapeColor::S => Color::RGB(0, 255, 0), ShapeColor::T => Color::RGB(255, 0, 255), ShapeColor::Z => Color::RGB(255, 0, 0), } } impl<'a> Drawer<'a> { pub fn new(canvas: Canvas<Window>, font: Font<'a, 'a>) -> Self { Drawer { canvas, font } } pub fn draw_state(&mut self, state: &State) { match state { State::Title(_) => self.title_draw(), State::Play(game) => self.draw_game(game), State::Paused(_) => self.pause_draw(), State::GameOver(game_over) => self.draw_game_over(game_over), } } fn title_draw(&mut self) { self.text() .size(4) .centered() .draw("Tetris") .size(1) .under() .offset(0, 10) .draw("[ Press Enter ]"); } fn pause_draw(&mut self) { self.text().centered().draw("Paused"); } pub fn draw_block(&mut self, pos: Pos, col: Color) { let x = pos.x() as i16; let y = pos.y() as i16; self.canvas.set_draw_color(col); let _ = self.canvas.fill_rect(Rect::new( i32::from(x) * i32::from(BLOCK_SIZE) + i32::from(BLOCK_BORDER), i32::from(y) * i32::from(BLOCK_SIZE) + i32::from(BLOCK_BORDER), u32::from(BLOCK_SIZE) - u32::from(BLOCK_BORDER), u32::from(BLOCK_SIZE) - u32::from(BLOCK_BORDER), )); } pub fn draw_game_over(&mut self, game_over: &GameOver) { let mut text = self .text() .top() .offset(0, 50) .size(3) .draw("Game Over") .under() .offset(0, 10) .size(1) .draw("final score") .under() .size(3) .draw(&game_over.score.value.to_string()); text = game_over.draw(text); if game_over.posting_hiscore() { text.size(1).draw("[ Enter Name and Press Enter ]"); } else { text.size(1).draw("[ Press Enter ]"); } } pub fn draw_game(&mut self, game: &Game) { self.draw_board(game.board()); self.draw_piece(game.piece()); self.draw_next(game.next_shape()); self.draw_game_score(game); } fn draw_board(&mut self, board: &Board) { self.set_viewport(*BOARD_BORDER_VIEW); self.draw_border(Pos::new( i16::from(Board::WIDTH), i16::from(Board::VISIBLE_ROWS), )); self.set_viewport(*BOARD_VIEW); for y in Board::HIDE_ROWS..Board::HEIGHT { for x in 0..Board::WIDTH { if let Some(color) = board.grid()[y as usize][x as usize] { let y = y - Board::HIDE_ROWS; let cell_pos = Pos::new(i16::from(x), i16::from(y)); self.draw_block(cell_pos, shape_color_to_rgb(color)) } } } } fn draw_next(&mut self, next: Shape) { self.set_viewport(*PREVIEW_VIEW); self.draw_border(Pos::new(i16::from(Shape::WIDTH), i16::from(Shape::HEIGHT))); self.draw_shape(next, Rotation::default(), Pos::new(1, 1)); } fn draw_game_score(&mut self, game: &Game) { self.set_viewport(*SCORE_VIEW); self.text() .draw("lines") .size(2) .left() .draw(&game.lines_cleared().to_string()) .size(1) .left() .offset(0, PAD) .draw("score") .size(2) .left() .draw(&game.score().to_string()); } fn draw_piece(&mut self, piece: &Piece) { self.draw_shape( piece.shape, piece.rot, piece.pos + Pos::new(0, -i16::from(Board::HIDE_ROWS)), ); } fn draw_shape(&mut self, shape: Shape, rot: Rotation, pos: Pos) { for block in shape.blocks(rot) { self.draw_block(pos + block, shape_color_to_rgb(shape.color)); } } fn draw_border(&mut self, size: Pos) { let size = size + Pos::new(1, 1); for y in 0..=size.y() { self.draw_block(Pos::new(0, y), BORDER_COLOR); self.draw_block(Pos::new(size.x(), y), BORDER_COLOR); } for x in 1..size.x() { self.draw_block(Pos::new(x, size.y()), BORDER_COLOR); self.draw_block(Pos::new(x, 0), BORDER_COLOR); } } fn draw_text(&mut self, text_pos: &TextPos, text: &str, size: u32, color: Color) -> Rect { let surface = self.font.render(text).solid(color).unwrap(); let texture_creator = self.canvas.texture_creator(); let texture = texture_creator .create_texture_from_surface(&surface) .unwrap(); let TextureQuery { width, height, .. } = texture.query(); let target = text_pos.apply(width * size, height * size); self.canvas.copy(&texture, None, Some(target)).unwrap(); target } pub fn text<'b>(&'b mut self) -> TextDrawer<'b, 'a> { TextDrawer { last_rect: Rect::new(0, 0, WINDOW_WIDTH, WINDOW_HEIGHT), pos: TextPos::At(0, 0), size: 1, color: Color::RGB(255, 255, 255), drawer: self, } } pub fn set_viewport(&mut self, rect: Rect) { self.canvas.set_viewport(Some(rect)); } pub fn clear(&mut self) { self.canvas.set_viewport(None); self.canvas.set_draw_color(Color::RGB(32, 48, 32)); self.canvas.clear(); } pub fn present(&mut self) { self.canvas.present(); } } pub struct TextDrawer<'a, 'b: 'a> { last_rect: Rect, pos: TextPos, size: u32, color: Color, drawer: &'a mut Drawer<'b>, } impl TextDrawer<'_, '_> { pub fn draw(mut self, text: &str) -> Self { self.last_rect = self .drawer .draw_text(&self.pos, text, self.size, self.color); self } pub fn color(mut self, color: Color) -> Self { self.color = color; self } pub fn reset_color(mut self) -> Self { self.color = Color::RGB(255, 255, 255); self } pub fn size(mut self, size: u32) -> Self { self.size = size; self } pub fn top(mut self) -> Self { self.pos = TextPos::Top(self.last_rect); self } pub fn under(mut self) -> Self { self.pos = TextPos::Under(self.last_rect); self } pub fn left(mut self) -> Self { self.pos = TextPos::Left(self.last_rect); self } pub fn centered(mut self) -> Self { self.pos = TextPos::Centered; self } pub fn offset(mut self, x: i32, y: i32) -> Self { self.pos = TextPos::Offset(Box::new(self.pos), x, y); self } } enum TextPos { At(i32, i32), Centered, Top(Rect), Under(Rect), Left(Rect), Offset(Box<TextPos>, i32, i32), } impl TextPos { fn apply(&self, width: u32, height: u32) -> Rect { match *self { TextPos::At(x, y) => Rect::new(x, y, width, height), TextPos::Centered => { let center_x = (WINDOW_WIDTH / 2) as i32; let center_y = (WINDOW_HEIGHT / 2) as i32; Rect::from_center(Point::new(center_x, center_y), width, height) } TextPos::Top(rect) => Rect::new( rect.center().x() - width as i32 / 2, rect.y(), width, height, ), TextPos::Under(rect) => Rect::new( rect.center().x() - width as i32 / 2, rect.bottom(), width, height, ), TextPos::Left(rect) => Rect::new(rect.x(), rect.bottom(), width, height), TextPos::Offset(ref pos, x, y) => { let mut rect = pos.apply(width, height); rect.offset(x, y); rect } } } } trait Drawable { fn draw<'a, 'b>(&self, text: TextDrawer<'a, 'b>) -> TextDrawer<'a, 'b>; } impl Drawable for Score { fn draw<'a, 'b>(&self, text: TextDrawer<'a, 'b>) -> TextDrawer<'a, 'b> { let name = if self.name.is_empty() { " " } else { &self.name }; text.offset(-SCORE_OFFSET, 0) .draw(name) .offset(SCORE_OFFSET * 2, 0) .draw(&self.value.to_string()) .under() .offset(-SCORE_OFFSET, 10) } } impl Drawable for GameOver { fn draw<'a, 'b>(&self, text: TextDrawer<'a, 'b>) -> TextDrawer<'a, 'b> { match &self.hiscores { Some(HighScores { higher_scores, lower_scores, has_hiscore, }) => { let mut text = text.size(3).under().offset(0, 10).draw("High Scores"); text = text.size(2).under().offset(0, 10); for score in higher_scores { text = score.draw(text); } if *has_hiscore { text = self .score .draw(text.color(Color::RGB(255, 255, 100))) .reset_color(); } for score in lower_scores { text = score.draw(text); } text.under().offset(-SCORE_OFFSET, 10) } None => text .size(1) .under() .offset(0, 10) .draw("[ ERROR Failed to retrieve High Scores ]") .offset(0, 20), } } } lazy_static! { static ref PREVIEW_VIEW: Rect = Rect::new(PREVIEW_X, PREVIEW_Y, PREVIEW_WIDTH, PREVIEW_HEIGHT); static ref SCORE_VIEW: Rect = Rect::new(SCORE_X, PAD, PREVIEW_WIDTH, BOARD_HEIGHT); static ref BOARD_BORDER_VIEW: Rect = Rect::new( 0, 0, BOARD_WIDTH + BOARD_BORDER * 2, BOARD_HEIGHT + BOARD_BORDER * 2 ); static ref BOARD_VIEW: Rect = Rect::new( BOARD_BORDER as i32, BOARD_BORDER as i32, BOARD_WIDTH, BOARD_HEIGHT ); } const BOARD_BORDER: u32 = BLOCK_SIZE as u32; const BOARD_WIDTH: u32 = Board::WIDTH as u32 * BLOCK_SIZE as u32; const BOARD_HEIGHT: u32 = Board::VISIBLE_ROWS as u32 * BLOCK_SIZE as u32; const TOTAL_BOARD_HEIGHT: u32 = BOARD_HEIGHT + BOARD_BORDER * 2; const PREVIEW_X: i32 = BOARD_WIDTH as i32 + BOARD_BORDER as i32; const PREVIEW_Y: i32 = TOTAL_BOARD_HEIGHT as i32 - (Shape::HEIGHT + 2) as i32 * BLOCK_SIZE as i32; const PREVIEW_WIDTH: u32 = (Shape::WIDTH + 2) as u32 * BLOCK_SIZE as u32; const PREVIEW_HEIGHT: u32 = (Shape::HEIGHT + 2) as u32 * BLOCK_SIZE as u32; const SCORE_X: i32 = PREVIEW_X + BOARD_BORDER as i32 + PAD; const PAD: i32 = BLOCK_SIZE as i32; pub const WINDOW_WIDTH: u32 = BOARD_WIDTH + BOARD_BORDER + PREVIEW_WIDTH; pub const WINDOW_HEIGHT: u32 = TOTAL_BOARD_HEIGHT; pub const WINDOW_RATIO: f32 = WINDOW_HEIGHT as f32 / WINDOW_WIDTH as f32;
use std::mem; use winapi::um::winbase::{ ABOVE_NORMAL_PRIORITY_CLASS, BELOW_NORMAL_PRIORITY_CLASS, HIGH_PRIORITY_CLASS, IDLE_PRIORITY_CLASS, NORMAL_PRIORITY_CLASS, REALTIME_PRIORITY_CLASS, }; use winapi::um::winnt::*; pub struct ExtendedLimitInfo(pub JOBOBJECT_EXTENDED_LIMIT_INFORMATION); #[repr(u32)] pub enum PriorityClass { Normal = NORMAL_PRIORITY_CLASS, Idle = IDLE_PRIORITY_CLASS, High = HIGH_PRIORITY_CLASS, Realtime = REALTIME_PRIORITY_CLASS, BelowNormal = BELOW_NORMAL_PRIORITY_CLASS, AboveNormal = ABOVE_NORMAL_PRIORITY_CLASS, } impl Default for ExtendedLimitInfo { fn default() -> Self { Self::new() } } /// Contains basic and extended limit information for a job object, with helper methods for /// easy limit manipulation. To apply limits, pass the instance of this struct to /// `Job::create_with_limit_info` or `job.set_extended_limit_info`. impl ExtendedLimitInfo { /// Return an empty extended info objects, without any limits. pub fn new() -> Self { let inner: JOBOBJECT_EXTENDED_LIMIT_INFORMATION = unsafe { mem::zeroed() }; ExtendedLimitInfo(inner) } /// Causes all processes associated with the job /// to use the same minimum and maximum working set sizes pub fn limit_working_memory(&mut self, min: usize, max: usize) -> &mut Self { self.0.BasicLimitInformation.MinimumWorkingSetSize = min; self.0.BasicLimitInformation.MaximumWorkingSetSize = max; self.0.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_WORKINGSET; self } /// Causes all processes associated with the job to terminate /// when the last handle to the job is closed. /// Note, that that `drop`ing the `Job` struct closes this handle, and if it's the only handle /// to the job **the current process will terminate** if it's assign to that job. pub fn limit_kill_on_job_close(&mut self) -> &mut Self { self.0.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; self } /// Causes all processes associated with the job to use the same priority class. /// Note: Processes and threads cannot modify their priority class. /// The calling process must enable the `SE_INC_BASE_PRIORITY_NAME` privilege. pub fn limit_priority_class(&mut self, priority_class: PriorityClass) -> &mut Self { self.0.BasicLimitInformation.PriorityClass = priority_class as u32; self.0.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS; self } /// Causes all processes in the job to use the same scheduling class. /// The valid values are 0 to 9. /// Use 0 for the least favorable scheduling class relative to other threads, /// and 9 for the most favorable scheduling class relative to other threads. /// By default, this value is 5. /// Note: To use a scheduling class greater than 5, /// the calling process must enable the `SE_INC_BASE_PRIORITY_NAME` privilege. pub fn limit_scheduling_class(&mut self, scheduling_class: u8) -> &mut Self { self.0.BasicLimitInformation.SchedulingClass = scheduling_class as u32; self.0.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_SCHEDULING_CLASS; self } /// Causes all processes associated with the job to use the same processor affinity. pub fn limit_affinity(&mut self, affinity: usize) -> &mut Self { self.0.BasicLimitInformation.Affinity = affinity; self.0.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_AFFINITY; self } /// Clear all limits. pub fn clear_limits(&mut self) -> &mut Self { self.0.BasicLimitInformation.LimitFlags = 0; self } } #[cfg(test)] mod tests { use crate::utils::{get_current_process, get_process_affinity_mask, get_process_memory_info}; use crate::{Job, PriorityClass}; use rusty_fork::rusty_fork_test; rusty_fork_test! { #[test] fn working_mem_limits() { let job = Job::create().unwrap(); let mut info = job.query_extended_limit_info().unwrap(); let min = 1 * 1024 * 1024; let max = 4 * 1024 * 1024; info.limit_working_memory(min, max); job.set_extended_limit_info(&mut info).unwrap(); job.assign_current_process().unwrap(); let test_vec_size = max * 4; let mut big_vec: Vec<u8> = Vec::with_capacity(test_vec_size); big_vec.resize_with(test_vec_size, || 1); let memory_info = get_process_memory_info(get_current_process()).unwrap(); assert!(memory_info.WorkingSetSize <= max * 2); info.clear_limits(); job.set_extended_limit_info(&mut info).unwrap(); } } rusty_fork_test! { #[test] fn kill_on_job_close_limits() { let job = Job::create().unwrap(); let mut info = job.query_extended_limit_info().unwrap(); info.limit_kill_on_job_close(); job.set_extended_limit_info(&mut info).unwrap(); job.assign_current_process().unwrap(); drop(job); // Never reached. panic!(); } } rusty_fork_test! { #[test] fn priority_class_limits() { let job = Job::create().unwrap(); let mut info = job.query_extended_limit_info().unwrap(); info.limit_priority_class(PriorityClass::BelowNormal); job.set_extended_limit_info(&mut info).unwrap(); let info = job.query_extended_limit_info().unwrap(); assert_eq!(info.0.BasicLimitInformation.PriorityClass, PriorityClass::BelowNormal as u32); } } rusty_fork_test! { #[test] fn scheduling_class_limits() { let job = Job::create().unwrap(); let mut info = job.query_extended_limit_info().unwrap(); info.limit_scheduling_class(1); job.set_extended_limit_info(&mut info).unwrap(); let info = job.query_extended_limit_info().unwrap(); assert_eq!(info.0.BasicLimitInformation.SchedulingClass, 1); } } rusty_fork_test! { #[test] fn affinity_limits() { let job = Job::create().unwrap(); let mut info = job.query_extended_limit_info().unwrap(); info.limit_affinity(1); job.set_extended_limit_info(&mut info).unwrap(); let (proc_affinity, _) = get_process_affinity_mask(get_current_process()).unwrap(); assert_ne!(proc_affinity, 1); job.assign_current_process().unwrap(); let (proc_affinity, _) = get_process_affinity_mask(get_current_process()).unwrap(); assert_eq!(proc_affinity, 1); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ use crate::dupe::Dupe; /// Extension traits on [`Option`](Option) where it holds a ref. pub trait OptionRefExt { type Item; /// Like `cloned`, but with a `Dupe` constraint. /// /// ``` /// use gazebo::prelude::*; /// use std::rc::Rc; /// let rc = Rc::new("test"); /// assert_eq!(Some(&rc).duped(), Some(rc)); /// assert_eq!(None::<&Rc<String>>.duped(), None); /// ``` fn duped(self) -> Option<Self::Item> where Self::Item: Dupe; } /// Extension traits on [`Option`](Option) where it holds any value or ref. pub trait OptionExt { type Item; /// Like `map`, but as a `Result` /// /// ``` /// use gazebo::prelude::*; /// /// assert_eq!(Some("foo").into_try_map(|x| Ok::<_, ()>(x.len())), Ok(Some(3))); /// assert_eq!(Some("foo").into_try_map(|x| Err::<(), _>(())), Err(())); /// ``` fn into_try_map<U, E, F: FnOnce(Self::Item) -> Result<U, E>>( self, f: F, ) -> Result<Option<U>, E>; /// Like `map`, but as a `Result` /// /// ``` /// use gazebo::prelude::*; /// /// assert_eq!(Some("foo").try_map(|x| Ok::<_, ()>(x.len())), Ok(Some(3))); /// assert_eq!(Some("foo").try_map(|x| Err::<(), _>(())), Err(())); /// ``` fn try_map<U, E, F: FnOnce(&Self::Item) -> Result<U, E>>(self, f: F) -> Result<Option<U>, E>; } impl<'a, T> OptionRefExt for Option<&'a T> { type Item = T; fn duped(self) -> Option<T> where T: Dupe, { self.map(|x| x.dupe()) } } impl<T> OptionExt for Option<T> { type Item = T; fn into_try_map<U, E, F: FnOnce(Self::Item) -> Result<U, E>>( self, f: F, ) -> Result<Option<U>, E> { Ok(match self { None => None, Some(x) => Some(f(x)?), }) } fn try_map<U, E, F: FnOnce(&Self::Item) -> Result<U, E>>(self, f: F) -> Result<Option<U>, E> { Ok(match &self { None => None, Some(x) => Some(f(x)?), }) } }
use bytes::Bytes; use uuid::Uuid; #[derive(Debug, Clone, PartialEq, Eq)] pub struct Image { pub name: String, pub data: Bytes, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Format { Png, Jpeg, } #[derive(Error, Debug)] pub enum Error { #[error("format {0:?} is not supported")] UnsupportedFormat(image::ImageFormat), #[error("image error")] ImageError(#[from] image::error::ImageError), } /* * ====== * Query * ====== */ impl Image { pub fn format(&self) -> Format { match self.name.rsplit(".").next() { Some("png") => Format::Png, Some("jpeg") => Format::Jpeg, _ => unreachable!(), } } } /* * ======= * Command * ======= */ impl Image { pub fn new(data: Bytes) -> Result<Self, Error> { let format_str = match image::guess_format(&data)? { image::ImageFormat::Png => "png", image::ImageFormat::Jpeg => "jpeg", f => return Err(Error::UnsupportedFormat(f)), }; Ok(Image { name: format!("{}.{}", Uuid::new_v4(), format_str), data, }) } }
use api_client::ApiClient; use utils::decode_list; use errors::*; // Client Structure #[derive(Debug, Clone, Serialize, Deserialize, Default)] pub struct Client { #[serde(default)] pub name: String, #[serde(default)] clientname: String, #[serde(default)] validator: bool, #[serde(default)] orgname: String, #[serde(default)] json_class: String, #[serde(default)] chef_type: String, } impl Client { pub fn show(client: &ApiClient, name: String) -> Result<Client> { let org = &client.config.organization_path(); let path = format!("{}/clients/{}", org, name); client.get::<Client>(path.as_ref()) } pub fn delete(client: &ApiClient, name: String) -> Result<Client> { let org = &client.config.organization_path(); let path = format!("{}/clients/{}", org, name); client.delete::<Client>(path.as_ref()) } } pub fn delete_client(client: &ApiClient, name: &str) -> Result<Client> { let org = &client.config.organization_path(); let path = format!("{}/clients/{}", org, name); client.delete::<Client>(path.as_ref()) } // Clients Structure #[derive(Debug)] pub struct Clients { count: usize, clients: Vec<String>, client: ApiClient, } impl Clients { pub fn list(client: &ApiClient) -> Clients { let org = &client.config.organization_path(); let path = format!("{}/clients", org); client .get(path.as_ref()) .and_then(decode_list) .and_then(|list| { Ok(Clients { count: 0, clients: list, client: client.clone(), }) }) .unwrap() } pub fn show(client: &ApiClient, name: String) -> Result<Client> { let org = &client.config.organization_path(); let path = format!("{}/clients/{}", org, name); client.get(path.as_ref()) } pub fn delete(client: &ApiClient, name: String) -> Result<Client> { let org = &client.config.organization_path(); let path = format!("{}/clients/{}", org, name); client.delete(path.as_ref()) } } // Itenarator for Clients impl Iterator for Clients { type Item = Result<Client>; fn count(self) -> usize { self.clients.len() } fn next(&mut self) -> Option<Self::Item> { if self.clients.len() >= 1 { Some(Client::show(&self.client, self.clients.remove(0))) } else { None } } }
use ocaml::{FromValue, ToValue}; #[derive(ToValue, FromValue)] enum Enum1 { Empty, First(ocaml::Int), Second(ocaml::Array<&'static str>), } #[ocaml::func] pub fn enum1_empty() -> Enum1 { Enum1::Empty } #[ocaml::func] pub fn enum1_first(i: ocaml::Int) -> Enum1 { Enum1::First(i) } #[ocaml::func] pub fn enum1_make_second(s: &'static str) -> Enum1 { let mut arr = ocaml::Array::alloc(1); let _ = arr.set(0, s); Enum1::Second(arr) } #[ocaml::func] pub fn enum1_get_second_value(e: Enum1) -> Option<ocaml::Array<&'static str>> { match e { Enum1::Second(x) => Some(x), Enum1::Empty | Enum1::First(_) => None, } } #[ocaml::func] pub fn enum1_is_empty(e: Enum1) -> bool { match e { Enum1::Empty => true, _ => false, } } #[derive(ToValue, FromValue, Default)] struct Struct1 { a: ocaml::Int, b: ocaml::Float, c: Option<String>, d: Option<ocaml::Array<&'static str>>, } #[ocaml::func] pub fn struct1_empty() -> Struct1 { Struct1::default() } #[ocaml::func] pub fn struct1_get_c(s: Struct1) -> Option<String> { s.c } #[ocaml::func] pub fn struct1_get_d(s: Struct1) -> Option<ocaml::Array<&'static str>> { s.d } #[ocaml::func] pub fn struct1_set_c(mut s: Struct1, v: String) { s.c = Some(v); } #[ocaml::func] pub unsafe fn string_non_copying(s: &str) -> ocaml::Value { ocaml::Value::of_str(s) }
use std::cmp::Ordering; use std::hash::{Hash, Hasher}; use std::collections::HashMap; #[derive(Debug)] pub struct NameLen { pub name: String, pub len: u64, } impl PartialEq for NameLen { fn eq(&self, other: &NameLen) -> bool { self.name == other.name } } impl Eq for NameLen {} impl Hash for NameLen { fn hash<H: Hasher>(&self, state: &mut H) { self.name.hash(state); } } impl Ord for NameLen { fn cmp(&self, other: &NameLen) -> Ordering { self.name.cmp(&other.name) } } impl PartialOrd for NameLen { fn partial_cmp(&self, other: &NameLen) -> Option<Ordering> { Some(self.cmp(other)) } } #[derive(Debug, Clone)] pub struct Interval { pub begin: u64, pub end: u64, } impl Ord for Interval { fn cmp(&self, other: &Interval) -> Ordering { let r = self.begin.cmp(&other.begin); return match r { Ordering::Equal => self.end.cmp(&other.end), _ => r, }; } } impl PartialOrd for Interval { fn partial_cmp(&self, other: &Interval) -> Option<Ordering> { Some(self.cmp(other)) } } impl PartialEq for Interval { fn eq(&self, other: &Interval) -> bool { self.begin == other.begin && self.end == other.end } } impl Eq for Interval {} pub type Read2Mapping = HashMap<NameLen, Vec<Interval>>;
use core::mem; use typenum::consts; use typenum::uint::Unsigned; use port::Port; struct Pic<DataId: Unsigned, ControlId: Unsigned> { data: Port<DataId>, control: Port<ControlId> } pub struct ChainedPics { master: Pic<consts::U32, consts::U33>, // 0x20 and 0x21 slave: Pic<consts::U160, consts::U161> // 0xa0 and 0xa1 } impl ChainedPics { pub unsafe fn get() -> ChainedPics { ChainedPics { master: Pic { data: Port::get(), control: Port::get() }, slave: Pic { data: Port::get(), control: Port::get() } } } pub fn remap(&mut self, offset: u8) { unsafe { fn wait() { unsafe { Port::<consts::U128>::get().write_u8(mem::uninitialized()); } } let mask_master = self.master.data.read_u8(); let mask_slave = self.slave.data.read_u8(); self.master.control.write_u8(0x11); wait(); self.slave.control.write_u8(0x11); wait(); self.master.data.write_u8(offset); wait(); self.slave.data.write_u8(offset + 8); wait(); self.master.data.write_u8(4); wait(); self.slave.data.write_u8(2); wait(); self.master.data.write_u8(1); wait(); self.slave.data.write_u8(1); wait(); self.master.data.write_u8(mask_master); self.slave.data.write_u8(mask_slave); } } }
// For test decorators #![feature(plugin, custom_attribute)] #![plugin(adorn)] #![plugin(docopt_macros)] // Literal maps for test purposes #[cfg(test)] #[macro_use] extern crate maplit; #[cfg(test)] #[macro_use] extern crate lazy_static; extern crate docopt; extern crate env_logger; extern crate mime; extern crate reqwest; extern crate rustc_serialize; extern crate tar; mod util; use std::collections::{HashMap, HashSet}; use std::fs; use std::fs::File; use std::hash::{Hash, Hasher}; use std::io; use std::io::{BufReader, BufWriter}; use std::io::prelude::*; use std::path::{Path, PathBuf}; use std::process::Command; use std::str; use reqwest::Client; use reqwest::Response; use reqwest::header::{Accept, Authorization, Bearer, Headers, qitem}; use reqwest::Method; use mime::{Mime, TopLevel, SubLevel}; use reqwest::StatusCode; use reqwest::Url; use rustc_serialize::json; use tar::Archive; use util::{find_common_keys, format_num_bytes, readers_identical, to_string_slices}; // https://github.com/rust-lang/rust/issues/13721 #[derive(Clone)] struct HashableHeader(tar::Header); impl HashableHeader { pub fn new(srcheader: &tar::Header) -> HashableHeader { HashableHeader(srcheader.clone()) } // stolen from tar-rs fn head_bytes(&self) -> &[u8; 512] { unsafe { &*(&self.0 as *const _ as *const [u8; 512]) } } } impl Hash for HashableHeader { fn hash<H: Hasher>(&self, state: &mut H) { self.head_bytes().hash(state); } } impl PartialEq for HashableHeader { fn eq(&self, other: &HashableHeader) -> bool { self.head_bytes()[..] == other.head_bytes()[..] } } impl Eq for HashableHeader {} // octal_from in tar-rs fn truncate(slice: &[u8]) -> &[u8] { match slice.iter().position(|i| *i == 0) { Some(i) => &slice[..i], None => slice, } } fn decimal_from(slice: &[u8]) -> io::Result<u64> { let num = match str::from_utf8(truncate(slice)) { Ok(n) => n, Err(_) => panic!("noo"), }; match u64::from_str_radix(num.trim(), 10) { Ok(n) => Ok(n), Err(_) => panic!("noo"), } } fn parse_extended_header_data(extended_header: &[u8]) -> HashMap<&str, &str> { let mut data = extended_header; let mut outmap: HashMap<&str, &str> = HashMap::new(); while data.len() != 0 { let spacepos: usize = data.iter().position(|c| *c == b' ').unwrap(); let (sizeslice, restdata) = data.split_at(spacepos); let size = decimal_from(sizeslice).unwrap(); let (spacekvslice, restdata2) = restdata.split_at(size as usize - sizeslice.len()); let kvslice = &spacekvslice[1..spacekvslice.len() - 1]; let eqpos: usize = kvslice.iter().position(|c| *c == b'=').unwrap(); let (key, eqval) = kvslice.split_at(eqpos); let val = &eqval[1..]; assert!(outmap.insert(str::from_utf8(key).unwrap(), str::from_utf8(val).unwrap()) .is_none()); data = restdata2 } outmap } fn get_header_map<'a, 'b>(arfiles: &'a mut Vec<tar::Entry<'b, fs::File>>) -> HashMap<HashableHeader, &'a mut tar::Entry<'b, fs::File>> { let mut arfilemap: HashMap<HashableHeader, &'a mut tar::Entry<'b, _>> = HashMap::new(); for file in arfiles.iter_mut() { arfilemap.insert(HashableHeader::new(file.header()), file); } arfilemap } fn make_layer_tar<'a, 'b: 'a, I1: Iterator<Item = &'a HashableHeader>, I2: Iterator<Item = &'a mut tar::Entry<'b, fs::File>>, F: Fn(&Path) -> tar::Header> (outname: &str, headeriter: I1, verbatimiter: I2, headertofilemap: &mut HashMap<HashableHeader, &mut tar::Entry<fs::File>>, mkdir: F) { let outfile = fs::File::create(outname).unwrap(); let outar = Archive::new(outfile); // Alphabetical ordering, lets us make assumptions about directory traversal let mut headers: Vec<&HashableHeader> = headeriter.collect(); headers.sort_by(|h1, h2| h1.0.path_bytes().cmp(&h2.0.path_bytes())); let mut lastdir = PathBuf::new(); // TODO: set trailing slash of dirs for belt and braces? for hheader in &headers { let header = &hheader.0; assert!(&header.ustar[..5] == b"ustar"); // TODO: get this as public? let path = header.path().unwrap(); // Climb up to find common prefix while !path.starts_with(&lastdir) { lastdir = lastdir.parent().unwrap().to_path_buf(); } // Climb down creating dirs as necessary let relpath = path.parent().unwrap().strip_prefix(&lastdir).unwrap().to_path_buf(); for relcomponent in relpath.iter() { lastdir.push(relcomponent); let newdir = mkdir(&lastdir); outar.append(&newdir, &mut io::empty()).unwrap(); } let file = headertofilemap.get_mut(&hheader).unwrap(); outar.append(&header, file).unwrap(); file.seek(io::SeekFrom::Start(0)).unwrap(); if header.link[0] == b'5' { lastdir = path.to_path_buf(); } } for af in verbatimiter { let hheader = HashableHeader::new(af.header()).clone(); outar.append(&hheader.0, af).unwrap(); af.seek(io::SeekFrom::Start(0)).unwrap(); } outar.finish().unwrap(); } fn get_archive_entries<'a>(ar: &'a Archive<fs::File>, tname: &str) -> (Vec<tar::Entry<'a, fs::File>>, Vec<tar::Entry<'a, fs::File>>) { println!("Loading {}", tname); let mut ignoredfiles: Vec<tar::Entry<_>> = vec![]; // Can't handle extended headers at the moment - skip the next block if // prefixed by an extended header let mut skipnext = false; let mut extpath = PathBuf::new(); let emptypath = PathBuf::new(); // If we've skipped directories because of an extended header, exclude // anything under that let mut skipdirs: HashSet<PathBuf> = HashSet::new(); let arfiles: Vec<_> = ar.entries().unwrap().filter_map(|res| { let af = res.unwrap(); let ftype = af.header().link[0]; // Handle extended headers, skip other headers if necessary if ftype == b'g' { panic!("Cannot handle global extended header") } else if ftype == b'x' { assert!(!skipnext && extpath == emptypath); skipnext = true; let mut extdata = vec![]; unsafe { // TODO: just to dodge mutability requirement let afm = &mut *(&af as *const tar::Entry<fs::File> as *mut tar::Entry<fs::File>); afm.read_to_end(&mut extdata).unwrap(); afm.seek(io::SeekFrom::Start(0)).unwrap(); } let extheadmap = parse_extended_header_data(&extdata); if extheadmap.contains_key("path") { extpath = PathBuf::from(extheadmap["path"]); } ignoredfiles.push(af); None // http://stackoverflow.com/questions/2078778/what-exactly-is-the-gnu-tar-longlink-trick // https://golang.org/pkg/archive/tar/ } else if b'A' <= ftype && ftype <= b'Z' { panic!("Unknown vendor-specific header: {}", ftype as char) } else if skipnext { if ftype == b'5' { // dir let headpath = af.header().path().unwrap().to_path_buf(); assert!( ((headpath == emptypath) ^ (extpath == emptypath)) || extpath.to_str().unwrap().starts_with(headpath.to_str().unwrap()) ); let path = if extpath != emptypath { &extpath } else { &headpath }; // Normalise it https://github.com/rust-lang/rust/issues/29008 skipdirs.insert(path.components().as_path().to_path_buf()); } skipnext = false; extpath = emptypath.clone(); ignoredfiles.push(af); None } else { // Does the path need to be skipped because a parent is skipped? { let path = af.header().path().unwrap().to_path_buf(); assert!(path != emptypath); let mut prefix = path.parent(); while prefix != None { let p = prefix.unwrap(); if skipdirs.contains(p) { ignoredfiles.push(af); return None } prefix = p.parent(); } } Some(af) } }).collect(); println!("Loading {}: found {} files, ignored {}", tname, arfiles.len(), ignoredfiles.len()); (arfiles, ignoredfiles) } // TODO // - check ustar at beginning // - check paths are not absolute // - be more intelligent about dirs - no point storing one child dir in common // tar because we have to store the parents as well, and then have to // overwrite the parents in specific tar // - implement rebasing 'onto' an image, with deletes for irrelevant files etc // - how do directory overwrites work in docker layers? e.g. if you chmod it, // presumably it will pull parent directories up from the previous layer, does // it grab children files as well? // - assert not more than one of the same name // - report files missed because of extended headers // - assert sane sequence of headers (x is followed by a normal file) // - handle extended headers // - assert it's a posix archives (i.e. dirs use type 5 rather than 1) // - ensure hard links don't get split across archives // dayer export-image <imagetar> docopt!(Args derive Debug, " Usage: dayer commonise-tar <tarpath> <tarpath> [<tarpath>...] dayer download-image <imageurl> <targetdir> dayer --help Options: --help Show this message. <imageurl> A fully qualified image url (e.g. `ubuntu` would be specified as `https://registry-1.docker.io/library/ubuntu:latest`) "); fn main() { let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit()); if args.cmd_commonise_tar { commonise_tars(&to_string_slices(&args.arg_tarpath)) } else if args.cmd_download_image { download_image(&args.arg_imageurl, &args.arg_targetdir) } else { unreachable!("no cmd") } } pub fn commonise_tars(tnames: &[&str]) { println!("Opening tars"); let ars: Vec<tar::Archive<_>> = tnames.iter() .map(|tname| { let file = fs::File::open(tname).unwrap(); Archive::new(file) }) .collect(); let mut arfiless: Vec<Vec<tar::Entry<_>>> = vec![]; let mut ignoredfiless: Vec<Vec<tar::Entry<_>>> = vec![]; for (ar, tname) in ars.iter().zip(tnames) { let (arfiles, ignoredfiles) = get_archive_entries(ar, tname); arfiless.push(arfiles); ignoredfiless.push(ignoredfiles); } println!("Phase 1: metadata compare"); let mut arheadmaps: Vec<HashMap<HashableHeader, &mut tar::Entry<_>>> = arfiless.iter_mut().map(|arfiles| get_header_map(arfiles)).collect(); // ideally would be &HashableHeader, but that borrows the maps as immutable // which then conflicts with the mutable borrow later because a borrow of // either keys or values applies to the whole hashmap // https://github.com/rust-lang/rfcs/issues/1215 let commonheaders: Vec<HashableHeader> = find_common_keys(&arheadmaps); let p1commonsize = commonheaders.iter().fold(0, |sum, h| sum + h.0.size().unwrap()); let p1commonsizestr = format_num_bytes(p1commonsize); println!("Phase 1 complete: possible {} files with {}", commonheaders.len(), p1commonsizestr); println!("Phase 2: data compare"); let mut commonfiles: Vec<HashableHeader> = vec![]; // TODO: sort by offset in archive? means not seeking backwards for (i, hheader) in commonheaders.iter().enumerate() { let mut files: Vec<&mut tar::Entry<_>> = arheadmaps.iter_mut() .map(|arhm| { &mut **arhm.get_mut(hheader).unwrap() }) .collect(); // Do the files have the same contents? // Note we've verified they have the same size by now if readers_identical(&mut files) { commonfiles.push(hheader.clone()); } if i % 100 == 0 { print!(" Done {}\r", i); io::stdout().flush().unwrap(); } // Reset the file - each entry keeps track of its own position for f in &mut files { f.seek(io::SeekFrom::Start(0)).unwrap(); } } let p2commonsize = commonfiles.iter().fold(0, |sum, h| sum + h.0.size().unwrap()); let p2commonsizestr = format_num_bytes(p2commonsize); println!("Phase 2 complete: actual {} files with {}", commonfiles.len(), p2commonsizestr); println!("Phase 3a: preparing for layer creation"); let tonormpath = |h: &HashableHeader| { // Normalise it https://github.com/rust-lang/rust/issues/29008 h.0.path().unwrap().components().as_path().to_path_buf() }; let commonmap: HashMap<PathBuf, &HashableHeader> = commonfiles.iter() .map(|h| (tonormpath(h), h)) .collect(); println!("Phase 3a complete"); println!("Phase 3b: common layer creation"); // Create a holding-place directory for the common layer as it will be // overwritten by the layer above let minimalmkdir = |dirpath: &Path| { let mut newdir = tar::Header::new(); newdir.set_path(&dirpath).unwrap(); // https://github.com/docker/docker/issues/783 newdir.set_mode(0o777); newdir.set_uid(0); newdir.set_gid(0); newdir.set_mtime(0); // cksum: calculated below newdir.link[0] = b'5'; // dir // linkname: irrelevant newdir.set_cksum(); newdir }; let outname = "common.tar"; // It doesn't matter which head map, these are common files! make_layer_tar(outname, commonfiles.iter(), vec![].iter_mut(), arheadmaps.get_mut(0).unwrap(), &minimalmkdir); println!("Phase 3b complete: created {}", outname); println!("Phase 3c: individual layer creation"); let thievingmkdir = |dirpath: &Path| commonmap[dirpath].clone().0; for (i, (arheadmap, ignoredfiles)) in arheadmaps.iter_mut() .zip(ignoredfiless.iter_mut()) .enumerate() { let outname = format!("individual_{}.tar", i); let outheads: Vec<_> = arheadmap.keys() .filter(|h| !commonmap.contains_key(&tonormpath(h))) .cloned() .collect(); make_layer_tar(&outname, outheads.iter(), ignoredfiles.iter_mut(), arheadmap, &thievingmkdir); } println!("Phase 3c complete: created {} individual tars", arheadmaps.len()); } fn req_maybe_bearer_auth(client: &Client, method: Method, url: Url, headers: Headers) -> Response { let res = client.request(Method::Get, url.clone()).headers(headers.clone()).send().unwrap(); if *res.status() != StatusCode::Unauthorized { return res } let auth_challenge = res.headers().get_raw("www-authenticate").unwrap(); assert!(auth_challenge.len() == 1); let mut auth_challenge = &auth_challenge[0][..]; assert!(auth_challenge.starts_with(b"Bearer ")); auth_challenge = &auth_challenge[b"Bearer ".len()..]; let mut auth_challenge_realm = None; let mut auth_challenge_service = None; let mut auth_challenge_scope = None; loop { let eqpos = auth_challenge.iter().position(|&b| b == b'=').unwrap(); let key = &auth_challenge[..eqpos]; assert!(auth_challenge[eqpos + 1] == b'"'); let valstart = eqpos + 2; let valend = valstart + auth_challenge.iter().skip(valstart).position(|&b| b == b'"').unwrap(); let val = String::from_utf8(auth_challenge[valstart..valend].to_vec()).unwrap(); match key { b"realm" => auth_challenge_realm = Some(val), b"service" => auth_challenge_service = Some(val), b"scope" => auth_challenge_scope = Some(val), _ => panic!(format!("unknown key in auth challenge {:?}", key)), } if auth_challenge.len() == valend + 1 { break } assert!(auth_challenge[valend + 1] == b','); auth_challenge = &auth_challenge[valend + 2..]; } let mut authurl = Url::parse(&auth_challenge_realm.unwrap()).unwrap(); authurl.query_pairs_mut() .append_pair("service", &auth_challenge_service.unwrap()) .append_pair("scope", &auth_challenge_scope.unwrap()); let authreq = client.request(Method::Get, authurl); let mut authjson = String::new(); authreq.send().unwrap().read_to_string(&mut authjson).unwrap(); #[derive(RustcDecodable)] struct AuthToken { token: String } let authtoken = json::decode::<AuthToken>(&authjson).unwrap().token; let newreq = client.request(method, url).headers(headers).header(Authorization(Bearer { token: authtoken })); newreq.send().unwrap() } fn download_image(imageurlstr: &str, targetdir: &str) { env_logger::init().unwrap(); let imageurl = Url::parse(imageurlstr).unwrap(); let imagename = imageurl.path(); assert!(&imagename[0..1] == "/"); let imagetagstart = imagename.bytes().position(|b| b == b':').unwrap(); let imagetag = &imagename[imagetagstart+1..]; let imagename = &imagename[1..imagetagstart]; let mut registryurl = imageurl.clone(); registryurl.set_path("v2/"); fs::create_dir(targetdir).unwrap(); let client = &Client::new().unwrap(); let url = registryurl.join(&format!("{}/manifests/{}", imagename, imagetag)).unwrap(); // https://docs.docker.com/registry/spec/api/#/pulling-an-image // https://docs.docker.com/registry/spec/manifest-v2-1/ // https://docs.docker.com/registry/spec/manifest-v2-2/ // Should really verify manifest // Ordered from base to top fn manifest_blobs(client: &Client, url: Url) -> Vec<String> { let mut manifestheaders = Headers::new(); manifestheaders.set(Accept(vec![ qitem(Mime(TopLevel::Application, SubLevel::Ext("vnd.docker.distribution.manifest.v2+json".to_owned()), vec![])), qitem(Mime(TopLevel::Application, SubLevel::Ext("vnd.docker.distribution.manifest.v1+json".to_owned()), vec![])), ])); let mut manifestjson = String::new(); req_maybe_bearer_auth(client, Method::Get, url, manifestheaders).read_to_string(&mut manifestjson).unwrap(); #[allow(non_snake_case)] #[derive(RustcDecodable)] struct ImageManifestSchemaVersion { schemaVersion: usize, } let schemavsn = json::decode::<ImageManifestSchemaVersion>(&manifestjson).unwrap().schemaVersion; if schemavsn == 1 { #[allow(non_snake_case)] #[derive(RustcDecodable)] struct FsLayer { blobSum: String } #[allow(non_snake_case)] #[derive(RustcDecodable)] struct ImageManifest { fsLayers: Vec<FsLayer>, } let manifest: ImageManifest = json::decode(&manifestjson).unwrap(); // The reverse only happens for v1 let mut layers: Vec<_> = manifest.fsLayers.into_iter().map(|fl| fl.blobSum).collect(); layers.reverse(); layers } else if schemavsn == 2 { #[allow(non_snake_case)] #[derive(RustcDecodable)] struct Layer { digest: String } #[allow(non_snake_case)] #[derive(RustcDecodable)] struct ImageManifest { layers: Vec<Layer>, } let manifest: ImageManifest = json::decode(&manifestjson).unwrap(); manifest.layers.into_iter().map(|fl| fl.digest).collect() } else { panic!("unknown manifest schemaVersion {}", schemavsn) } } let blobs: Vec<String> = manifest_blobs(client, url); println!("Found {} blobs", blobs.len()); let mut blobheaders = Headers::new(); blobheaders.set(Accept(vec![ qitem(Mime(TopLevel::Application, SubLevel::Ext("vnd.docker.image.rootfs.diff.tar.gzip".to_owned()), vec![])), ])); for blob in &blobs { println!("Downloading blob {}", blob); let file = File::create(blob).unwrap(); let bloburl = registryurl.join(&format!("{}/blobs/{}", imagename, blob)).unwrap(); let res = req_maybe_bearer_auth(client, Method::Get, bloburl, blobheaders.clone()); io::copy(&mut BufReader::new(res), &mut BufWriter::new(file)).unwrap(); } for blob in blobs.iter() { println!("Extracting blob {}", blob); let exit = Command::new("tar").args(&["--anchored", "--exclude=dev/*", "--force-local", "-C", targetdir, "-xf", blob]) .spawn().unwrap().wait().unwrap(); assert!(exit.success()); let output = Command::new("find").args(&[targetdir, "-type", "f", "-name", ".wh.*", "-print0"]).output().unwrap(); assert!(output.status.success()); let mut whfilesstr = output.stdout; if whfilesstr.is_empty() { continue } assert!(*whfilesstr.last().unwrap() == b'\0'); whfilesstr.pop(); let whfiles: Vec<&str> = whfilesstr.split(|&b| b == b'\0').map(|bs| str::from_utf8(bs).unwrap()).collect(); for whfile in whfiles { let whpath = Path::new(whfile); let whname = whpath.file_name().unwrap().to_str().unwrap(); assert!(whname.starts_with(".wh.")); let fname = &whname[4..]; let mut fpath = whpath.parent().unwrap().to_path_buf(); fpath.push(fname); if fpath.is_file() { fs::remove_file(fpath).unwrap() } else { fs::remove_dir_all(fpath).unwrap() } fs::remove_file(whpath).unwrap() } } let mut blobs = blobs; blobs.sort(); blobs.dedup(); for blob in blobs { println!("Removing {}", blob); fs::remove_file(blob).unwrap() } } #[cfg(test)] mod tests { extern crate tempdir; use std::collections::HashMap; use std::env::set_current_dir; use std::fs; use std::io::prelude::*; use std::sync::Mutex; use self::tempdir::TempDir; use self::DirTreeEntry::*; use super::tar::Archive; use super::commonise_tars; macro_rules! t { ($e:expr) => (match $e { Ok(v) => v, Err(e) => panic!("{} returned {}", stringify!($e), e), }) } lazy_static! { pub static ref TMPLOCK: Mutex<()> = Mutex::new(()); } // Does not put program back in original dir fn intmp<F>(f: F) where F: Fn() { let mut _guard = match TMPLOCK.lock() { // destroyed at end of fn Ok(guard) => guard, Err(poisoned) => poisoned.into_inner(), }; let td = TempDir::new("dayer").unwrap(); // destroyed at end of fn set_current_dir(td.path()).unwrap(); f(); } enum DirTreeEntry<'a> { F(&'a str), D, } #[test] #[adorn(intmp)] fn empty_tars() { let filetree = hashmap!{}; let infilelists = hashmap!{ "in0.tar" => vec![], "in1.tar" => vec![], }; let outfilelists = hashmap!{ "common.tar" => vec![], "individual_0.tar" => vec![], "individual_1.tar" => vec![], }; test_commonise(filetree, infilelists, outfilelists); } #[test] #[adorn(intmp)] fn simple_tars() { let filetree = hashmap!{ "0" => F("0content"), "1" => F("1content"), "common" => F("commoncontent"), }; let infilelists = hashmap!{ "in1.tar" => vec!["common", "0"], "in2.tar" => vec!["common", "1"], }; let outfilelists = hashmap!{ "common.tar" => vec!["common"], "individual_0.tar" => vec!["0"], "individual_1.tar" => vec!["1"], }; test_commonise(filetree, infilelists, outfilelists); } #[test] #[adorn(intmp)] fn leading_dirs() { let filetree = hashmap!{ "dir" => D, "dir/0" => F("0content"), "dir/1" => F("1content"), "common" => F("commoncontent"), }; let infilelists = hashmap!{ "in1.tar" => vec!["common", "dir", "dir/0"], "in2.tar" => vec!["common", "dir", "dir/1"], }; let outfilelists = hashmap!{ "common.tar" => vec!["common", "dir"], "individual_0.tar" => vec!["dir", "dir/0"], "individual_1.tar" => vec!["dir", "dir/1"], }; test_commonise(filetree, infilelists, outfilelists); } fn test_commonise(filetree: HashMap<&str, DirTreeEntry>, infilelists: HashMap<&str, Vec<&str>>, outfilelists: HashMap<&str, Vec<&str>>) { let mut fpaths: Vec<&str> = filetree.keys().map(|p| *p).collect(); fpaths.sort(); for path in fpaths.iter() { let entry = &filetree[path]; match entry { &F(content) => { let mut f = t!(fs::File::create(path)); t!(f.write_all(content.as_bytes())) } &D => t!(fs::create_dir(path)), } } for (inname, infilelist) in infilelists.iter() { let infile = t!(fs::File::create(inname)); let inar = Archive::new(infile); for fname in infilelist { t!(inar.append_path(fname)); } t!(inar.finish()); } let mut infilenames: Vec<_> = infilelists.keys().map(|s| *s).collect(); infilenames.sort(); commonise_tars(&infilenames[..]); for (outname, outfilelist) in outfilelists.iter() { let outfile = t!(fs::File::open(outname)); let outar = Archive::new(outfile); assert!(outfilelist.len() == t!(outar.entries()).count()); let acutalfilesiter = t!(outar.entries()).map(|rf| t!(rf)); for (expectedpathstr, actualfile) in outfilelist.iter().zip(acutalfilesiter) { let actualpath = t!(actualfile.header().path()).to_path_buf(); assert!(expectedpathstr == &actualpath.to_str().unwrap()); } } } }
#[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum ConditionCode { Equal, NotEqual, CarrySet, CarryClear, Minus, Plus, OverflowSet, OverflowClear, UnsignedHigher, UnsignedLowerOrSame, SignedGreaterThanOrEqual, SignedLessThan, SignedGreaterThan, SignedLessThanOrEqual, Always, Never, } impl ConditionCode { pub fn from_bits(bits: u8) -> ConditionCode { match bits { 0b0000 => ConditionCode::Equal, 0b0001 => ConditionCode::NotEqual, 0b0010 => ConditionCode::CarrySet, 0b0011 => ConditionCode::CarryClear, 0b0100 => ConditionCode::Minus, 0b0101 => ConditionCode::Plus, 0b0110 => ConditionCode::OverflowSet, 0b0111 => ConditionCode::OverflowClear, 0b1000 => ConditionCode::UnsignedHigher, 0b1001 => ConditionCode::UnsignedLowerOrSame, 0b1010 => ConditionCode::SignedGreaterThanOrEqual, 0b1011 => ConditionCode::SignedLessThan, 0b1100 => ConditionCode::SignedGreaterThan, 0b1101 => ConditionCode::SignedLessThanOrEqual, 0b1110 => ConditionCode::Always, 0b1111 => ConditionCode::Never, _ => panic!("Only the least significant 4 bits may be set"), } } } impl std::fmt::Display for ConditionCode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match *self { ConditionCode::Equal => write!(f, "eq"), ConditionCode::NotEqual => write!(f, "ne"), ConditionCode::CarrySet => write!(f, "cs"), ConditionCode::CarryClear => write!(f, "cc"), ConditionCode::Minus => write!(f, "mi"), ConditionCode::Plus => write!(f, "pl"), ConditionCode::OverflowSet => write!(f, "vs"), ConditionCode::OverflowClear => write!(f, "vc"), ConditionCode::UnsignedHigher => write!(f, "hi"), ConditionCode::UnsignedLowerOrSame => write!(f, "ls"), ConditionCode::SignedGreaterThanOrEqual => write!(f, "ge"), ConditionCode::SignedLessThan => write!(f, "lt"), ConditionCode::SignedGreaterThan => write!(f, "gt"), ConditionCode::SignedLessThanOrEqual => write!(f, "le"), ConditionCode::Always => write!(f, "al"), ConditionCode::Never => write!(f, "nv"), } } } #[cfg(test)] mod test { use super::*; #[test] fn decode_condition_codes() { let expected = [ ConditionCode::Equal, ConditionCode::NotEqual, ConditionCode::CarrySet, ConditionCode::CarryClear, ConditionCode::Minus, ConditionCode::Plus, ConditionCode::OverflowSet, ConditionCode::OverflowClear, ConditionCode::UnsignedHigher, ConditionCode::UnsignedLowerOrSame, ConditionCode::SignedGreaterThanOrEqual, ConditionCode::SignedLessThan, ConditionCode::SignedGreaterThan, ConditionCode::SignedLessThanOrEqual, ConditionCode::Always, ConditionCode::Never, ]; let decoded: Vec<_> = (0..16).map(ConditionCode::from_bits).collect(); assert_eq!(decoded.as_slice(), expected); let stringified: Vec<_> = expected.iter().map(|x| format!("{}", x)).collect(); assert_eq!( stringified.as_slice(), &[ "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc", "hi", "ls", "ge", "lt", "gt", "le", "al", "nv" ] ); } }
#[doc = "Reader of register CH0_DBG_CTDREQ"] pub type R = crate::R<u32, super::CH0_DBG_CTDREQ>; #[doc = "Reader of field `CH0_DBG_CTDREQ`"] pub type CH0_DBG_CTDREQ_R = crate::R<u8, u8>; impl R { #[doc = "Bits 0:5"] #[inline(always)] pub fn ch0_dbg_ctdreq(&self) -> CH0_DBG_CTDREQ_R { CH0_DBG_CTDREQ_R::new((self.bits & 0x3f) as u8) } }
use crate::{ error::TapeLoadError, host::{LoadableAsset, SeekFrom, SeekableAsset}, zx::tape::TapeImpl, Result, }; const PILOT_LENGTH: usize = 2168; const PILOT_PULSES_HEADER: usize = 8063; const PILOT_PULSES_DATA: usize = 3223; const SYNC1_LENGTH: usize = 667; const SYNC2_LENGTH: usize = 735; const BIT_ONE_LENGTH: usize = 1710; const BIT_ZERO_LENGTH: usize = 855; const PAUSE_LENGTH: usize = 3_500_000; const BUFFER_SIZE: usize = 128; #[derive(PartialEq, Eq, Clone, Copy)] enum TapeState { Stop, Play, Pilot { pulses_left: usize }, Sync, NextByte, NextBit { mask: u8 }, BitHalf { half_bit_delay: usize, mask: u8 }, Pause, } pub struct Tap<A: LoadableAsset + SeekableAsset> { asset: A, state: TapeState, prev_state: TapeState, buffer: [u8; BUFFER_SIZE], bufer_offset: usize, block_bytes_read: usize, current_block_size: Option<usize>, tape_ended: bool, // Non-fastload related fields curr_bit: bool, curr_byte: u8, delay: usize, } impl<A: LoadableAsset + SeekableAsset> Tap<A> { pub fn from_asset(asset: A) -> Result<Self> { let tap = Self { prev_state: TapeState::Stop, state: TapeState::Stop, curr_bit: false, curr_byte: 0x00, buffer: [0u8; BUFFER_SIZE], bufer_offset: 0, block_bytes_read: 0, current_block_size: None, delay: 0, asset, tape_ended: false, }; Ok(tap) } } impl<A: LoadableAsset + SeekableAsset> TapeImpl for Tap<A> { fn can_fast_load(&self) -> bool { self.state == TapeState::Stop } fn next_block_byte(&mut self) -> Result<Option<u8>> { if self.tape_ended { return Ok(None); } if let Some(block_size) = self.current_block_size { if self.block_bytes_read >= block_size { return Ok(None); } let mut buffer_read_pos = self.block_bytes_read - self.bufer_offset; // Read new buffer if required if buffer_read_pos >= BUFFER_SIZE { let bytes_to_read = (block_size - self.bufer_offset - BUFFER_SIZE).min(BUFFER_SIZE); self.asset.read_exact(&mut self.buffer[0..bytes_to_read])?; self.bufer_offset += BUFFER_SIZE; buffer_read_pos = 0; } // Check last byte in block if self.block_bytes_read >= block_size { self.current_block_size = None; self.block_bytes_read = 0; return Ok(None); } // Perform actual read and advance position let result = self.buffer[buffer_read_pos]; self.block_bytes_read += 1; return Ok(Some(result)); } Ok(None) } fn next_block(&mut self) -> Result<bool> { if self.tape_ended { return Ok(false); } // Skip leftovers from the previous block while self.next_block_byte()?.is_some() {} let mut block_size_buffer = [0u8; 2]; if self.asset.read_exact(&mut block_size_buffer).is_err() { self.tape_ended = true; return Ok(false); } let block_size = u16::from_le_bytes(block_size_buffer) as usize; let block_bytes_to_read = block_size.min(BUFFER_SIZE); self.asset .read_exact(&mut self.buffer[0..block_bytes_to_read])?; self.bufer_offset = 0; self.block_bytes_read = 0; self.current_block_size = Some(block_size); Ok(true) } fn current_bit(&self) -> bool { self.curr_bit } fn process_clocks(&mut self, clocks: usize) -> Result<()> { if self.state == TapeState::Stop { return Ok(()); } if self.delay > 0 { if clocks > self.delay { self.delay = 0; } else { self.delay -= clocks; } return Ok(()); } 'state_machine: loop { match self.state { TapeState::Stop => { // Reset tape but leave in Stopped state self.rewind()?; self.state = TapeState::Stop; break 'state_machine; } TapeState::Play => { if !self.next_block()? { self.state = TapeState::Stop; } else { let first_byte = self .next_block_byte()? .ok_or(TapeLoadError::InvalidTapFile)?; // Select appropriate pulse count for Pilot sequence let pulses_left = if first_byte == 0x00 { PILOT_PULSES_HEADER } else { PILOT_PULSES_DATA }; self.curr_byte = first_byte; self.curr_bit = true; self.delay = PILOT_LENGTH; self.state = TapeState::Pilot { pulses_left }; break 'state_machine; } } TapeState::Pilot { mut pulses_left } => { self.curr_bit = !self.curr_bit; pulses_left -= 1; if pulses_left == 0 { self.delay = SYNC1_LENGTH; self.state = TapeState::Sync; } else { self.delay = PILOT_LENGTH; self.state = TapeState::Pilot { pulses_left }; } break 'state_machine; } TapeState::Sync => { self.curr_bit = !self.curr_bit; self.delay = SYNC2_LENGTH; self.state = TapeState::NextBit { mask: 0x80 }; break 'state_machine; } TapeState::NextByte => { self.state = if let Some(byte) = self.next_block_byte()? { self.curr_byte = byte; TapeState::NextBit { mask: 0x80 } } else { TapeState::Pause } } TapeState::NextBit { mask } => { self.curr_bit = !self.curr_bit; if (self.curr_byte & mask) == 0 { self.delay = BIT_ZERO_LENGTH; self.state = TapeState::BitHalf { half_bit_delay: BIT_ZERO_LENGTH, mask, }; } else { self.delay = BIT_ONE_LENGTH; self.state = TapeState::BitHalf { half_bit_delay: BIT_ONE_LENGTH, mask, }; }; break 'state_machine; } TapeState::BitHalf { half_bit_delay, mut mask, } => { self.curr_bit = !self.curr_bit; self.delay = half_bit_delay; mask >>= 1; self.state = if mask == 0 { TapeState::NextByte } else { TapeState::NextBit { mask } }; break 'state_machine; } TapeState::Pause => { self.curr_bit = !self.curr_bit; self.delay = PAUSE_LENGTH; // Next block or end of the tape self.state = TapeState::Play; break 'state_machine; } } } Ok(()) } fn stop(&mut self) { let state = self.state; self.prev_state = state; self.state = TapeState::Stop; } fn play(&mut self) { if self.state == TapeState::Stop { if self.prev_state == TapeState::Stop { self.state = TapeState::Play; } else { self.state = self.prev_state; } } } fn rewind(&mut self) -> Result<()> { self.curr_bit = false; self.curr_byte = 0x00; self.block_bytes_read = 0; self.bufer_offset = 0; self.current_block_size = None; self.delay = 0; self.asset.seek(SeekFrom::Start(0))?; self.tape_ended = false; Ok(()) } }
extern crate serde_json; use std::collections::HashMap; use exonum::api::Api; use exonum::blockchain::Blockchain; use exonum::crypto::PublicKey; use exonum::encoding::serialize::FromHex; use hyper::header::ContentType; use iron::headers::AccessControlAllowOrigin; use iron::prelude::*; use iron::status; use prometheus::IntCounter; use router::Router; use currency::api::error::ApiError; use currency::api::ServiceApi; use currency::assets; use currency::assets::{AssetBundle, AssetId, AssetInfo}; use currency::wallet; use currency::wallet::Wallet; pub const PARAMETER_META_DATA_KEY: &str = "meta_data"; pub const PARAMETER_CREATORS_KEY: &str = "creators"; #[derive(Clone)] pub struct WalletApi { pub blockchain: Blockchain, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct WalletInfo { pub balance: u64, pub assets_count: u64, } impl WalletInfo { pub fn from(wallet: Wallet) -> Self { WalletInfo { balance: wallet.balance(), assets_count: wallet.assets().len() as u64, } } } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct ExtendedAsset { pub id: AssetId, pub amount: u64, #[serde(skip_serializing_if = "Option::is_none")] pub meta_data: Option<AssetInfo>, } impl ExtendedAsset { pub fn from_asset(asset: &AssetBundle, info: Option<AssetInfo>) -> Self { ExtendedAsset { id: asset.id(), amount: asset.amount(), meta_data: info, } } pub fn into(&self) -> AssetBundle { AssetBundle::new(self.id, self.amount) } } #[derive(Serialize, Deserialize, PartialEq, Debug)] pub struct WalletsResponseBody { pub wallets: HashMap<PublicKey, WalletInfo>, } #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct WalletAssetsResponseBody { pub total: u64, pub count: u64, pub assets: Vec<ExtendedAsset>, } pub type WalletResponse = Result<WalletInfo, ApiError>; pub type WalletsResponse = Result<WalletsResponseBody, ApiError>; pub type WalletAssetsResponse = Result<WalletAssetsResponseBody, ApiError>; pub type WalletAssetResponse = Result<ExtendedAsset, ApiError>; impl WalletApi { fn wallet(&self, pub_key: &PublicKey) -> Wallet { let view = &mut self.blockchain.fork(); wallet::Schema(view).fetch(pub_key) } fn wallets(&self) -> HashMap<PublicKey, WalletInfo> { let view = &mut self.blockchain.fork(); let index = wallet::Schema(view).index(); let mut result: HashMap<PublicKey, WalletInfo> = HashMap::new(); for v in index.iter() { let wi = WalletInfo::from(v.1); result.insert(v.0, wi); } result } fn pagination_wallets( &self, offset: u64, limit: u64, ) -> HashMap<PublicKey, WalletInfo> { let view = &mut self.blockchain.fork(); let idx = wallet::Schema(view).index(); let mut count: u64 = 0; let mut result: HashMap<PublicKey, WalletInfo> = HashMap::new(); for v in idx.iter() { if offset <= count && count < offset + limit { let wi = WalletInfo::from(v.1); result.insert(v.0, wi); } count += 1; } result } fn wallets_balance(&self) -> Vec<PublicKey> { let view = &mut self.blockchain.fork(); let index = wallet::Schema(view).index(); let wallets = index.into_iter().map(|v| v.0).collect(); wallets } fn assets(&self, pub_key: &PublicKey, creators: Vec<PublicKey>) -> Vec<AssetBundle> { if creators.len() > 0 { let mut assets: Vec<AssetBundle> = vec![]; for asset in self.wallet(pub_key).assets() { match self.asset_info(&asset.id()) { Some(info) => { if creators.iter().find(|& x| x == info.creator()).is_some() { assets.push(asset); } } None => {} } } return assets; } self.wallet(pub_key).assets() } fn asset_info(&self, asset_id: &AssetId) -> Option<AssetInfo> { let view = self.blockchain.fork(); assets::Schema(view).fetch(asset_id) } } lazy_static! { static ref LIST_REQUESTS: IntCounter = register_int_counter!( "dmbc_wallet_api_list_requests_total", "Wallet list requests." ).unwrap(); static ref LIST_RESPONSES: IntCounter = register_int_counter!( "dmbc_wallet_api_list_responses_total", "Wallet list responses." ).unwrap(); static ref BALANCE_REQUESTS: IntCounter = register_int_counter!( "dmbc_wallet_api_balance_requests_total", "Balance requests." ).unwrap(); static ref BALANCE_RESPONSES: IntCounter = register_int_counter!( "dmbc_wallet_api_balance_responses_total", "Balance responses." ).unwrap(); static ref ASSETS_REQUESTS: IntCounter = register_int_counter!( "dmbc_wallet_api_assets_requests_total", "Wallet asset list requests." ).unwrap(); static ref ASSETS_RESPONSES: IntCounter = register_int_counter!( "dmbc_wallet_api_assets_responses_total", "Wallet asset list responses." ).unwrap(); static ref ASSET_REQUESTS: IntCounter = register_int_counter!( "dmbc_wallet_api_asset_requests_total", "Wallet asset counter requests." ).unwrap(); static ref ASSET_RESPONSES: IntCounter = register_int_counter!( "dmbc_wallet_api_asset_responses_total", "Wallet asset counter responses." ).unwrap(); } impl Api for WalletApi { fn wire(&self, router: &mut Router) { // Gets status of the wallet corresponding to the public key. let self_ = self.clone(); let wallet_info = move |req: &mut Request| -> IronResult<Response> { BALANCE_REQUESTS.inc(); let path = req.url.path(); let wallet_key = path.last().unwrap(); let result: WalletResponse = match PublicKey::from_hex(wallet_key) { Ok(public_key) => { let wallet = self_.wallet(&public_key); Ok(WalletInfo::from(wallet)) } Err(_) => Err(ApiError::WalletHexInvalid), }; let mut res = Response::with(( result .clone() .err() .map(|e| e.to_status()) .unwrap_or(status::Ok), serde_json::to_string_pretty(&result).unwrap(), )); res.headers.set(ContentType::json()); res.headers.set(AccessControlAllowOrigin::Any); BALANCE_RESPONSES.inc(); Ok(res) }; // Gets status of all wallets. let self_ = self.clone(); let wallets_info = move |req: &mut Request| -> IronResult<Response> { LIST_REQUESTS.inc(); let (offset, limit) = ServiceApi::pagination_params(req); let wallets = self_.pagination_wallets(offset, limit); let result: WalletsResponse = Ok(WalletsResponseBody { wallets, }); let mut res = Response::with((status::Ok, serde_json::to_string_pretty(&result).unwrap())); res.headers.set(ContentType::json()); res.headers.set(AccessControlAllowOrigin::Any); LIST_RESPONSES.inc(); Ok(res) }; let self_ = self.clone(); let wallet_assets_info = move |req: &mut Request| -> IronResult<Response> { ASSETS_REQUESTS.inc(); let public_key_result = { let wallet_key = req .extensions .get::<Router>() .unwrap() .find("pub_key") .unwrap(); PublicKey::from_hex(wallet_key) }; let mut creators: Vec<PublicKey> = vec![]; let creators_str = ServiceApi::read_parameter(req, PARAMETER_CREATORS_KEY, String::new()); if creators_str.len() > 0 { let creators_results = creators_str.split(',').collect::<Vec<_>>().iter().map(|v| PublicKey::from_hex(v)).collect::<Vec<_>>(); if creators_results.len() > 10 { let result: WalletAssetsResponse = Err(ApiError::IncorrectRequest); let mut res = Response::with(( ApiError::IncorrectRequest.to_status(), serde_json::to_string_pretty(&result).unwrap(), )); res.headers.set(ContentType::json()); res.headers.set(AccessControlAllowOrigin::Any); ASSETS_RESPONSES.inc(); return Ok(res); } for creator in creators_results { if creator.is_err() { let result: WalletAssetsResponse = Err(ApiError::WalletHexInvalid); let mut res = Response::with(( ApiError::WalletHexInvalid.to_status(), serde_json::to_string_pretty(&result).unwrap(), )); res.headers.set(ContentType::json()); res.headers.set(AccessControlAllowOrigin::Any); ASSETS_RESPONSES.inc(); return Ok(res); } creators.push(creator.unwrap()); } } let result: WalletAssetsResponse = match public_key_result { Ok(public_key) => { let assets = self_.assets(&public_key, creators); let extend_assets = ServiceApi::read_parameter(req, PARAMETER_META_DATA_KEY, false); // apply pagination parameters if they exist let assets_to_send = ServiceApi::apply_pagination(req, &assets); let assets_list = if extend_assets { let mut extended_assets = Vec::<ExtendedAsset>::new(); for asset in assets_to_send { let info = self_.asset_info(&asset.id()); extended_assets.push(ExtendedAsset::from_asset(asset, info)); } extended_assets } else { assets_to_send .into_iter() .map(|a| ExtendedAsset::from_asset(a, None)) .collect() }; Ok(WalletAssetsResponseBody { total: assets.len() as u64, count: assets_to_send.len() as u64, assets: assets_list, }) } Err(_) => Err(ApiError::WalletHexInvalid), }; let mut res = Response::with(( result .clone() .err() .map(|e| e.to_status()) .unwrap_or(status::Ok), serde_json::to_string_pretty(&result).unwrap(), )); res.headers.set(ContentType::json()); res.headers.set(AccessControlAllowOrigin::Any); ASSETS_RESPONSES.inc(); Ok(res) }; let self_ = self.clone(); let wallet_asset_info = move |req: &mut Request| -> IronResult<Response> { ASSET_REQUESTS.inc(); let public_key_result = { let wallet_key = req .extensions .get::<Router>() .unwrap() .find("pub_key") .unwrap(); PublicKey::from_hex(wallet_key) }; let asset_id_result = { let id_hex = req .extensions .get::<Router>() .unwrap() .find("asset_id") .unwrap(); AssetId::from_hex(id_hex) }; let result: WalletAssetResponse = match public_key_result { Ok(public_key) => match asset_id_result { Ok(id) => { let assets = self_.assets(&public_key, vec![]); let info = if ServiceApi::read_parameter(req, PARAMETER_META_DATA_KEY, false) { self_.asset_info(&id) } else { None }; match assets.iter().find(|ref a| a.id() == id) { Some(asset) => Ok(ExtendedAsset::from_asset(asset, info)), None => Err(ApiError::AssetIdNotFound), } } Err(_) => Err(ApiError::AssetIdInvalid), }, Err(_) => Err(ApiError::WalletHexInvalid), }; let mut res = Response::with(( result .clone() .err() .map(|e| e.to_status()) .unwrap_or(status::Ok), serde_json::to_string_pretty(&result).unwrap(), )); res.headers.set(ContentType::json()); res.headers.set(AccessControlAllowOrigin::Any); ASSET_RESPONSES.inc(); Ok(res) }; router.get("/v1/wallets", wallets_info, "wallets_info"); router.get("/v1/wallets/:pub_key", wallet_info, "get_balance"); router.get( "/v1/wallets/:pub_key/assets", wallet_assets_info, "assets_info", ); router.get( "/v1/wallets/:pub_key/assets/:asset_id", wallet_asset_info, "asset_info", ); } }
use super::common::input; #[derive(Debug)] pub struct Person { first_name: String, last_name: String, age: u8 } impl Person { pub fn create() -> Person { Person { first_name: input("Please enter first name"), last_name: input("Please enter last name"), age: input("Please enter age").trim().parse().expect("Age must be a number"), } } pub fn from(person: &Person) -> Person { Person { first_name: String::from(&person.first_name), last_name: String::from(&person.last_name), age: person.age } } }
use std::fmt; use crate::cpu::registers::Register; use crate::cpu::registers::RegisterPair; use crate::memory::Memory; use crate::timer; #[derive(Debug, PartialEq)] pub enum Instruction { Nop, Stop, Halt, Load16(Load16Target, Load16Source), Load8(Load8Operand, Load8Operand), Add(ArithmeticOperand), Sub(ArithmeticOperand), AddCarry(ArithmeticOperand), SubCarry(ArithmeticOperand), And(ArithmeticOperand), Or(ArithmeticOperand), Xor(ArithmeticOperand), Compare(ArithmeticOperand), Increment(ArithmeticOperand), Decrement(ArithmeticOperand), AddPtr(PtrArithOperand, PtrArithOperand), IncrementPtr(PtrArithOperand), DecrementPtr(PtrArithOperand), Jump(JumpKind), EnableInterrupts, DisableInterrupts, Pop(RegisterPair), Push(RegisterPair), Return(ReturnKind), Rotate(RotateKind), SetCarryFlag, DecimalAdjust, Restart(u8), Call(CallKind), Instruction16(Instruction16), Complement, FlipCarry, } impl fmt::Display for Instruction { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let instruction_string = match self { Instruction::Nop => String::from("NOP"), Instruction::Stop => String::from("STOP"), Instruction::Halt => String::from("HALT"), Instruction::Complement => String::from("CPL"), Instruction::FlipCarry => String::from("CCF"), Instruction::Load16(target, source) => std::format!("LD {},{}", target, source), Instruction::Load8(target, source) => std::format!("LD {},{}", target, source), Instruction::Add(operand) => std::format!("ADD {}", operand), Instruction::Sub(operand) => std::format!("SUB {}", operand), Instruction::AddCarry(operand) => std::format!("ADC {}", operand), Instruction::SubCarry(operand) => std::format!("SBC {}", operand), Instruction::And(operand) => std::format!("AND {}", operand), Instruction::Or(operand) => std::format!("OR {}", operand), Instruction::Xor(operand) => std::format!("XOR {}", operand), Instruction::Compare(operand) => std::format!("CP {}", operand), Instruction::Increment(operand) => std::format!("INC {}", operand), Instruction::Decrement(operand) => std::format!("DEC {}", operand), Instruction::AddPtr(operand1, operand2) => { std::format!("ADD {},{}", operand1, operand2) } Instruction::IncrementPtr(operand) => std::format!("INC {}", operand), Instruction::DecrementPtr(operand) => std::format!("DEC {}", operand), Instruction::Jump(kind) => std::format!("J{}", kind), Instruction::EnableInterrupts => String::from("EI"), Instruction::DisableInterrupts => String::from("DI"), Instruction::Pop(reg_pair) => std::format!("POP {}", reg_pair), Instruction::Push(reg_pair) => std::format!("PUSH {}", reg_pair), Instruction::Rotate(kind) => std::format!("R{}A", kind), Instruction::SetCarryFlag => String::from("SCF"), Instruction::DecimalAdjust => String::from("DAA"), Instruction::Call(kind) => std::format!("CALL {}", kind), Instruction::Restart(n) => std::format!("RST {}", n), Instruction::Return(kind) => std::format!("RET{}", kind), Instruction::Instruction16(instruction) => std::format!("{}", instruction), }; write!(f, "{}", instruction_string) } } impl Instruction { pub fn from_bytes(memory: &Memory, a: u16) -> Instruction { let byte = memory.read_byte(a); let high_bits = (byte & 0xF0) >> 4; let low_bits = byte & 0x0F; match (high_bits, low_bits) { (0x0, 0x0) => Instruction::Nop, (0x1, 0x0) => Instruction::Stop, (0x2..=0x3, 0x0) => { let offset = memory.read_byte(a + 1); match high_bits { 0x2 => Instruction::Jump(JumpKind::JumpRelativeConditional( JumpCondition::NonZero, offset as i8, )), 0x3 => Instruction::Jump(JumpKind::JumpRelativeConditional( JumpCondition::NonCarry, offset as i8, )), _ => panic!("Invalid opcode: {:#x}", byte), } } (0x0..=0x3, 0x1) => { let target = match high_bits { 0x0 => Load16Target::Register16(RegisterPair::Bc), 0x1 => Load16Target::Register16(RegisterPair::De), 0x2 => Load16Target::Register16(RegisterPair::Hl), 0x3 => Load16Target::StackPointer, _ => panic!("Invalid opcode: {:#x}", byte), }; let data: u16 = (memory.read_byte(a + 1) as u16) | (memory.read_byte(a + 2) as u16) << 8; Instruction::Load16(target, Load16Source::Data(data)) } (0x0..=0x3, 0x2) => { let target = match high_bits { 0x0 => Load8Operand::AtReg16(RegisterPair::Bc), 0x1 => Load8Operand::AtReg16(RegisterPair::De), 0x2 => Load8Operand::AtHli, 0x3 => Load8Operand::AtHld, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Load8(target, Load8Operand::Register(Register::A)) } (0x0..=0x3, 0x3) => { let operand = match high_bits { 0x0 => PtrArithOperand::Register16(RegisterPair::Bc), 0x1 => PtrArithOperand::Register16(RegisterPair::De), 0x2 => PtrArithOperand::Register16(RegisterPair::Hl), 0x3 => PtrArithOperand::StackPointer, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::IncrementPtr(operand) } (0x0..=0x3, 0x4..=0x5) => { let operand = match high_bits { 0x0 => ArithmeticOperand::Register(Register::B), 0x1 => ArithmeticOperand::Register(Register::D), 0x2 => ArithmeticOperand::Register(Register::H), 0x3 => ArithmeticOperand::AtHl, _ => panic!("Invalid opcode: {:#x}", byte), }; if let 0x4 = low_bits { Instruction::Increment(operand) } else { Instruction::Decrement(operand) } } (0x0..=0x3, 0x6) => { let data = memory.read_byte(a + 1); let target = match high_bits { 0x0 => Load8Operand::Register(Register::B), 0x1 => Load8Operand::Register(Register::D), 0x2 => Load8Operand::Register(Register::H), 0x3 => Load8Operand::AtReg16(RegisterPair::Hl), _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Load8(target, Load8Operand::Data(data)) } (0x0..=0x1, 0x7) => match high_bits { 0x0 => Instruction::Rotate(RotateKind::LeftCircular), 0x1 => Instruction::Rotate(RotateKind::Left), _ => panic!("Invalid opcode: {:#x}", byte), }, (0x2..=0x3, 0x7) => match high_bits { 0x2 => Instruction::DecimalAdjust, 0x3 => Instruction::SetCarryFlag, _ => panic!("Invalid opcode: {:#x}", byte), }, (0x0, 0x8) => { let address: u16 = (memory.read_byte(a + 1) as u16) | (memory.read_byte(a + 2) as u16) << 8; Instruction::Load16(Load16Target::Address(address), Load16Source::StackPointer) } (0x1..=0x3, 0x8) => { let offset = memory.read_byte(a + 1); match high_bits { 0x1 => Instruction::Jump(JumpKind::JumpRelative(offset as i8)), 0x2 => Instruction::Jump(JumpKind::JumpRelativeConditional( JumpCondition::Zero, offset as i8, )), 0x3 => Instruction::Jump(JumpKind::JumpRelativeConditional( JumpCondition::Carry, offset as i8, )), _ => panic!("Invalid opcode: {:#x}", byte), } } (0x0..=0x3, 0x9) => { let operand = match high_bits { 0x0 => PtrArithOperand::Register16(RegisterPair::Bc), 0x1 => PtrArithOperand::Register16(RegisterPair::De), 0x2 => PtrArithOperand::Register16(RegisterPair::Hl), 0x3 => PtrArithOperand::StackPointer, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::AddPtr(PtrArithOperand::Register16(RegisterPair::Hl), operand) } (0x0..=0x3, 0xA) => { let src = match high_bits { 0x0 => Load8Operand::AtReg16(RegisterPair::Bc), 0x1 => Load8Operand::AtReg16(RegisterPair::De), 0x2 => Load8Operand::AtHli, 0x3 => Load8Operand::AtHld, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Load8(Load8Operand::Register(Register::A), src) } (0x0..=0x3, 0xC..=0xD) => { let inc_dec_target = match high_bits { 0x0 => ArithmeticOperand::Register(Register::C), 0x1 => ArithmeticOperand::Register(Register::E), 0x2 => ArithmeticOperand::Register(Register::L), 0x3 => ArithmeticOperand::Register(Register::A), _ => panic!("Invalid opcode: {:#x}", byte), }; match low_bits { 0xC => Instruction::Increment(inc_dec_target), 0xD => Instruction::Decrement(inc_dec_target), _ => panic!("Invalid opcode: {:#x}", byte), } } (0x0..=0x3, 0xE) => { let data = memory.read_byte(a + 1); let target = match high_bits { 0x0 => Load8Operand::Register(Register::C), 0x1 => Load8Operand::Register(Register::E), 0x2 => Load8Operand::Register(Register::L), 0x3 => Load8Operand::Register(Register::A), _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Load8(target, Load8Operand::Data(data)) } (0x0..=0x3, 0xB) => { let operand = match high_bits { 0x0 => PtrArithOperand::Register16(RegisterPair::Bc), 0x1 => PtrArithOperand::Register16(RegisterPair::De), 0x2 => PtrArithOperand::Register16(RegisterPair::Hl), 0x3 => PtrArithOperand::StackPointer, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::DecrementPtr(operand) } (0x0..=0x1, 0xF) => match high_bits { 0x0 => Instruction::Rotate(RotateKind::RightCircular), 0x1 => Instruction::Rotate(RotateKind::Right), _ => panic!("Invalid opcode: {:#x}", byte), }, (0x2..=0x3, 0xF) => match high_bits { 0x2 => Instruction::Complement, 0x3 => Instruction::FlipCarry, _ => panic!("Invalid opcode: {:#x}", byte), }, (0x7, 0x6) => Instruction::Halt, (0x4..=0x7, 0x0..=0x7) => { let target = match high_bits { 0x4 => Load8Operand::Register(Register::B), 0x5 => Load8Operand::Register(Register::D), 0x6 => Load8Operand::Register(Register::H), 0x7 => Load8Operand::AtReg16(RegisterPair::Hl), _ => panic!("Invalid opcode: {:#x}", byte), }; let source = match low_bits { 0x0 => Load8Operand::Register(Register::B), 0x1 => Load8Operand::Register(Register::C), 0x2 => Load8Operand::Register(Register::D), 0x3 => Load8Operand::Register(Register::E), 0x4 => Load8Operand::Register(Register::H), 0x5 => Load8Operand::Register(Register::L), 0x6 => Load8Operand::AtReg16(RegisterPair::Hl), 0x7 => Load8Operand::Register(Register::A), _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Load8(target, source) } (0x4..=0x7, 0x8..=0xF) => { let target = match high_bits { 0x4 => Load8Operand::Register(Register::C), 0x5 => Load8Operand::Register(Register::E), 0x6 => Load8Operand::Register(Register::L), 0x7 => Load8Operand::Register(Register::A), _ => panic!("Invalid opcode: {:#x}", byte), }; let source = match low_bits { 0x8 => Load8Operand::Register(Register::B), 0x9 => Load8Operand::Register(Register::C), 0xA => Load8Operand::Register(Register::D), 0xB => Load8Operand::Register(Register::E), 0xC => Load8Operand::Register(Register::H), 0xD => Load8Operand::Register(Register::L), 0xE => Load8Operand::AtReg16(RegisterPair::Hl), 0xF => Load8Operand::Register(Register::A), _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Load8(target, source) } (0x8..=0xB, 0x0..=0x7) => { let operand = match low_bits { 0x0 => ArithmeticOperand::Register(Register::B), 0x1 => ArithmeticOperand::Register(Register::C), 0x2 => ArithmeticOperand::Register(Register::D), 0x3 => ArithmeticOperand::Register(Register::E), 0x4 => ArithmeticOperand::Register(Register::H), 0x5 => ArithmeticOperand::Register(Register::L), 0x6 => ArithmeticOperand::AtHl, 0x7 => ArithmeticOperand::Register(Register::A), _ => panic!("Invalid opcode: {:#x}", byte), }; match high_bits { 0x8 => Instruction::Add(operand), 0x9 => Instruction::Sub(operand), 0xA => Instruction::And(operand), 0xB => Instruction::Or(operand), _ => panic!("Invalid opcode: {:#x}", byte), } } (0x8..=0xB, 0x8..=0xF) => { let operand = match low_bits { 0x8 => ArithmeticOperand::Register(Register::B), 0x9 => ArithmeticOperand::Register(Register::C), 0xA => ArithmeticOperand::Register(Register::D), 0xB => ArithmeticOperand::Register(Register::E), 0xC => ArithmeticOperand::Register(Register::H), 0xD => ArithmeticOperand::Register(Register::L), 0xE => ArithmeticOperand::AtHl, 0xF => ArithmeticOperand::Register(Register::A), _ => panic!("Invalid opcode: {:#x}", byte), }; match high_bits { 0x8 => Instruction::AddCarry(operand), 0x9 => Instruction::SubCarry(operand), 0xA => Instruction::Xor(operand), 0xB => Instruction::Compare(operand), _ => panic!("Invalid opcode: {:#x}", byte), } } (0xC..=0xD, 0x0) => { let cond = match high_bits { 0xC => JumpCondition::NonZero, 0xD => JumpCondition::NonCarry, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Return(ReturnKind::ReturnConditional(cond)) } (0xE..=0xF, 0x0) => { let a8 = memory.read_byte(a + 1); match high_bits { 0xE => Instruction::Load8( Load8Operand::AtAddress8(a8), Load8Operand::Register(Register::A), ), 0xF => Instruction::Load8( Load8Operand::Register(Register::A), Load8Operand::AtAddress8(a8), ), _ => panic!("Invalid opcode: {:#x}", byte), } } (0xC..=0xF, 0x1) => { let reg = match high_bits { 0xC => RegisterPair::Bc, 0xD => RegisterPair::De, 0xE => RegisterPair::Hl, 0xF => RegisterPair::Af, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Pop(reg) } (0xC..=0xD, 0x2) => { let a16 = (memory.read_byte(a + 1) as u16) | (memory.read_byte(a + 2) as u16) << 8; let cond = match high_bits { 0xC => JumpCondition::NonZero, 0xD => JumpCondition::NonCarry, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Jump(JumpKind::JumpConditional(cond, a16)) } (0xE..=0xF, 0x2) => match high_bits { 0xE => Instruction::Load8(Load8Operand::AtC, Load8Operand::Register(Register::A)), 0xF => Instruction::Load8(Load8Operand::Register(Register::A), Load8Operand::AtC), _ => panic!("Invalid opcode: {:#x}", byte), }, (0xC, 0x3) => { let a16: u16 = (memory.read_byte(a + 1) as u16) | (memory.read_byte(a + 2) as u16) << 8; Instruction::Jump(JumpKind::Jump(a16)) } (0xF, 0x3) => Instruction::DisableInterrupts, (0xF, 0xB) => Instruction::EnableInterrupts, (0xC..=0xD, 0x4) => { let a16: u16 = memory.read_2_bytes(a + 1); let cond = match high_bits { 0xC => JumpCondition::NonZero, 0xD => JumpCondition::NonCarry, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Call(CallKind::CallConditional(a16, cond)) } (0xC..=0xF, 0x5) => { let reg = match high_bits { 0xC => RegisterPair::Bc, 0xD => RegisterPair::De, 0xE => RegisterPair::Hl, 0xF => RegisterPair::Af, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Push(reg) } (0xC..=0xF, 0x6) => { let d8 = memory.read_byte(a + 1); match high_bits { 0xC => Instruction::Add(ArithmeticOperand::Data(d8)), 0xD => Instruction::Sub(ArithmeticOperand::Data(d8)), 0xE => Instruction::And(ArithmeticOperand::Data(d8)), 0xF => Instruction::Or(ArithmeticOperand::Data(d8)), _ => panic!("Invalid opcode: {:#x}", byte), } } (0xC..=0xF, 0x7) => match high_bits { 0xC => Instruction::Restart(0x0), 0xD => Instruction::Restart(0x10), 0xE => Instruction::Restart(0x20), 0xF => Instruction::Restart(0x30), _ => panic!("Invalid opcode: {:#x}", byte), }, (0xC..=0xD, 0x8) => { let cond = match high_bits { 0xC => JumpCondition::Zero, 0xD => JumpCondition::Carry, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Return(ReturnKind::ReturnConditional(cond)) } (0xE, 0x8) => { let s8: i8 = memory.read_byte(a + 1) as i8; Instruction::AddPtr(PtrArithOperand::StackPointer, PtrArithOperand::Data(s8)) } (0xC..=0xE, 0x9) => match high_bits { 0xC => Instruction::Return(ReturnKind::Return), 0xD => Instruction::Return(ReturnKind::ReturnInterrupt), 0xE => Instruction::Jump(JumpKind::JumpHl), _ => panic!("Invalid opcode: {:#x}", byte), }, (0xF, 0x8) => { let s8: i8 = memory.read_byte(a + 1) as i8; Instruction::Load16( Load16Target::Register16(RegisterPair::Hl), Load16Source::SpPlus(s8), ) } (0xF, 0x9) => Instruction::Load16(Load16Target::StackPointer, Load16Source::Hl), (0xC..=0xD, 0xA) => { let a16: u16 = memory.read_2_bytes(a + 1); let cond = match high_bits { 0xC => JumpCondition::Zero, 0xD => JumpCondition::Carry, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Jump(JumpKind::JumpConditional(cond, a16)) } (0xE..=0xF, 0xA) => { let a16: u16 = memory.read_2_bytes(a + 1); match high_bits { 0xE => Instruction::Load8( Load8Operand::AtAddress16(a16), Load8Operand::Register(Register::A), ), 0xF => Instruction::Load8( Load8Operand::Register(Register::A), Load8Operand::AtAddress16(a16), ), _ => panic!("Invalid opcode: {:#x}", byte), } } (0xC..=0xD, 0xC) => { let a16: u16 = memory.read_2_bytes(a + 1); let cond = match high_bits { 0xC => JumpCondition::Zero, 0xD => JumpCondition::Carry, _ => panic!("Invalid opcode: {:#x}", byte), }; Instruction::Call(CallKind::CallConditional(a16, cond)) } (0xC, 0xB) => { let suffix: u8 = memory.read_byte(a + 1); Instruction::Instruction16(Instruction::decode_i16_suffix(suffix)) } (0xC, 0xD) => { let a16 = memory.read_2_bytes(a + 1); Instruction::Call(CallKind::Call(a16)) } (0xC..=0xF, 0xE) => { let d8 = memory.read_byte(a + 1); match high_bits { 0xC => Instruction::AddCarry(ArithmeticOperand::Data(d8)), 0xD => Instruction::SubCarry(ArithmeticOperand::Data(d8)), 0xE => Instruction::Xor(ArithmeticOperand::Data(d8)), 0xF => Instruction::Compare(ArithmeticOperand::Data(d8)), _ => panic!("Invalid opcode: {:#x}", byte), } } (0xC..=0xF, 0xF) => match high_bits { 0xC => Instruction::Restart(0x08), 0xD => Instruction::Restart(0x18), 0xE => Instruction::Restart(0x28), 0xF => Instruction::Restart(0x38), _ => panic!("Invalid opcode: {:#x}", byte), }, _ => panic!("Couldn't match opcode for {:#x}/{:#x}", high_bits, low_bits), } } fn decode_i16_suffix(suffix: u8) -> Instruction16 { let high_bits = (suffix & 0xF0) >> 4; let low_bits = suffix & 0x0F; let reg = match low_bits { 0x0 | 0x8 => ArithmeticOperand::Register(Register::B), 0x1 | 0x9 => ArithmeticOperand::Register(Register::C), 0x2 | 0xA => ArithmeticOperand::Register(Register::D), 0x3 | 0xB => ArithmeticOperand::Register(Register::E), 0x4 | 0xC => ArithmeticOperand::Register(Register::H), 0x5 | 0xD => ArithmeticOperand::Register(Register::L), 0x6 | 0xE => ArithmeticOperand::AtHl, 0x7 | 0xF => ArithmeticOperand::Register(Register::A), _ => panic!("Invalid 16 bit instruction suffix: {:#x}", suffix), }; match low_bits { 0x0..=0x7 => match high_bits { 0x0 => Instruction16::RotateLeftCircular(reg), 0x1 => Instruction16::RotateLeft(reg), 0x2 => Instruction16::ShiftLeft(reg), 0x3 => Instruction16::Swap(reg), 0x4 => Instruction16::BitComplement(0, reg), 0x5 => Instruction16::BitComplement(2, reg), 0x6 => Instruction16::BitComplement(4, reg), 0x7 => Instruction16::BitComplement(6, reg), 0x8 => Instruction16::Reset(0, reg), 0x9 => Instruction16::Reset(2, reg), 0xA => Instruction16::Reset(4, reg), 0xB => Instruction16::Reset(6, reg), 0xC => Instruction16::Set(0, reg), 0xD => Instruction16::Set(2, reg), 0xE => Instruction16::Set(4, reg), 0xF => Instruction16::Set(6, reg), _ => panic!("Invalid 16 bit instruction suffix: {:#x}", suffix), }, 0x8..=0xF => match high_bits { 0x0 => Instruction16::RotateRightCircular(reg), 0x1 => Instruction16::RotateRight(reg), 0x2 => Instruction16::ShiftRightArithmetic(reg), 0x3 => Instruction16::ShiftRightLogical(reg), 0x4 => Instruction16::BitComplement(1, reg), 0x5 => Instruction16::BitComplement(3, reg), 0x6 => Instruction16::BitComplement(5, reg), 0x7 => Instruction16::BitComplement(7, reg), 0x8 => Instruction16::Reset(1, reg), 0x9 => Instruction16::Reset(3, reg), 0xA => Instruction16::Reset(5, reg), 0xB => Instruction16::Reset(7, reg), 0xC => Instruction16::Set(1, reg), 0xD => Instruction16::Set(3, reg), 0xE => Instruction16::Set(5, reg), 0xF => Instruction16::Set(7, reg), _ => panic!("Invalid 16 bit instruction suffix: {:#x}", suffix), }, _ => panic!("Invalid 16 bit instruction suffix: {:#x}", suffix), } } pub fn size_and_cycles(i: &Instruction) -> (u8, timer::Cycles) { match i { Instruction::Nop => (1, timer::Cycles::Cycles(1)), Instruction::Stop => (2, timer::Cycles::Cycles(1)), Instruction::Halt => (1, timer::Cycles::Cycles(1)), Instruction::Complement => (1, timer::Cycles::Cycles(1)), Instruction::FlipCarry => (1, timer::Cycles::Cycles(1)), Instruction::Load16(target, source) => match (target, source) { (_, Load16Source::Data(_)) => (3, timer::Cycles::Cycles(3)), (Load16Target::Address(_), Load16Source::StackPointer) => { (3, timer::Cycles::Cycles(5)) } (_, Load16Source::SpPlus(_)) => (2, timer::Cycles::Cycles(3)), _ => (1, timer::Cycles::Cycles(2)), }, Instruction::Load8(target, source) => match (target, source) { (Load8Operand::Register(_), Load8Operand::Register(_)) => { (1, timer::Cycles::Cycles(1)) } (Load8Operand::AtHli, _) | (_, Load8Operand::AtHli) => { (1, timer::Cycles::Cycles(2)) } (Load8Operand::AtHld, _) | (_, Load8Operand::AtHld) => { (1, timer::Cycles::Cycles(2)) } (Load8Operand::Register(_), Load8Operand::Data(_)) => (2, timer::Cycles::Cycles(2)), (Load8Operand::AtReg16(RegisterPair::Hl), Load8Operand::Data(_)) => { (2, timer::Cycles::Cycles(3)) } (Load8Operand::AtReg16(_), Load8Operand::Register(_)) => { (1, timer::Cycles::Cycles(2)) } (Load8Operand::Register(_), Load8Operand::AtReg16(_)) => { (1, timer::Cycles::Cycles(2)) } (Load8Operand::AtC, Load8Operand::Register(_)) => (1, timer::Cycles::Cycles(2)), (Load8Operand::Register(_), Load8Operand::AtC) => (1, timer::Cycles::Cycles(2)), (Load8Operand::AtAddress8(_), _) | (_, Load8Operand::AtAddress8(_)) => { (2, timer::Cycles::Cycles(3)) } (Load8Operand::AtAddress16(_), _) | (_, Load8Operand::AtAddress16(_)) => { (3, timer::Cycles::Cycles(4)) } _ => panic!("Can't figure out size of {}", i), }, // All arithmetic ops share size/cycle properties based on operand Instruction::Add(operand) | Instruction::Sub(operand) | Instruction::AddCarry(operand) | Instruction::SubCarry(operand) | Instruction::And(operand) | Instruction::Or(operand) | Instruction::Xor(operand) | Instruction::Compare(operand) => match operand { ArithmeticOperand::Register(_) => (1, timer::Cycles::Cycles(1)), ArithmeticOperand::AtHl => (1, timer::Cycles::Cycles(2)), ArithmeticOperand::Data(_) => (2, timer::Cycles::Cycles(2)), }, Instruction::Increment(target) | Instruction::Decrement(target) => match target { ArithmeticOperand::Register(_) => (1, timer::Cycles::Cycles(1)), ArithmeticOperand::AtHl => (1, timer::Cycles::Cycles(3)), _ => panic!("Can't figure out size of {}", i), }, Instruction::AddPtr(operand1, _) => match operand1 { PtrArithOperand::Register16(_) => (1, timer::Cycles::Cycles(2)), PtrArithOperand::StackPointer => (2, timer::Cycles::Cycles(4)), _ => panic!("Can't figure out size of {}", i), }, Instruction::IncrementPtr(_) | Instruction::DecrementPtr(_) => { (1, timer::Cycles::Cycles(2)) } Instruction::Jump(kind) => match kind { JumpKind::Jump(_) => (3, timer::Cycles::Cycles(4)), JumpKind::JumpConditional(_, _) => (3, timer::Cycles::ConditionalCycles(4, 3)), JumpKind::JumpRelative(_) => (2, timer::Cycles::Cycles(4)), JumpKind::JumpRelativeConditional(_, _) => { (2, timer::Cycles::ConditionalCycles(3, 2)) } JumpKind::JumpHl => (1, timer::Cycles::Cycles(1)), }, Instruction::EnableInterrupts => (1, timer::Cycles::Cycles(1)), Instruction::DisableInterrupts => (1, timer::Cycles::Cycles(1)), Instruction::Pop(_) => (1, timer::Cycles::Cycles(3)), Instruction::Push(_) => (1, timer::Cycles::Cycles(3)), Instruction::Rotate(_) => (1, timer::Cycles::Cycles(1)), Instruction::SetCarryFlag => (1, timer::Cycles::Cycles(1)), Instruction::DecimalAdjust => (1, timer::Cycles::Cycles(1)), Instruction::Call(kind) => match kind { CallKind::CallConditional(_, _) => (3, timer::Cycles::ConditionalCycles(6, 3)), CallKind::Call(_) => (3, timer::Cycles::Cycles(6)), }, Instruction::Restart(_) => (1, timer::Cycles::Cycles(4)), Instruction::Return(kind) => match kind { ReturnKind::Return | ReturnKind::ReturnConditional(_) => { (1, timer::Cycles::ConditionalCycles(5, 2)) } ReturnKind::ReturnInterrupt => (1, timer::Cycles::Cycles(4)), }, // TODO: Need to change this so (HL) 16 bit instructions take 4 cycles Instruction::Instruction16(_) => (2, timer::Cycles::Cycles(2)), } } } #[derive(Debug, PartialEq)] pub enum Load16Target { Register16(RegisterPair), StackPointer, Address(u16), } impl fmt::Display for Load16Target { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let operand_string = match self { Load16Target::Register16(reg_pair) => std::format!("{}", reg_pair), Load16Target::StackPointer => String::from("SP"), Load16Target::Address(data) => std::format!("${:#0x}", data), }; write!(f, "{}", operand_string) } } #[derive(Debug, PartialEq)] pub enum Load16Source { StackPointer, Data(u16), SpPlus(i8), Hl, } impl fmt::Display for Load16Source { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let operand_string = match self { Load16Source::Hl => String::from("HL"), Load16Source::StackPointer => String::from("SP"), Load16Source::SpPlus(r8) => std::format!("SP+{:#0x}", r8), Load16Source::Data(data) => std::format!("${:#0x}", data), }; write!(f, "{}", operand_string) } } #[derive(Debug, PartialEq)] pub enum Load8Operand { Register(Register), AtAddress16(u16), AtAddress8(u8), AtC, AtReg16(RegisterPair), AtHli, AtHld, Data(u8), } impl fmt::Display for Load8Operand { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let operand_string = match self { Load8Operand::Register(reg) => std::format!("{}", reg), Load8Operand::AtC => String::from("(C)"), Load8Operand::AtReg16(reg_pair) => std::format!("({})", reg_pair), Load8Operand::AtHli => String::from("(HL+)"), Load8Operand::AtHld => String::from("(HL-)"), Load8Operand::AtAddress16(a16) => std::format!("({:#0x})", a16), Load8Operand::AtAddress8(a8) => std::format!("({:#0x})", a8), Load8Operand::Data(data) => std::format!("${:#0x}", data), }; write!(f, "{}", operand_string) } } #[derive(Debug, PartialEq)] pub enum ArithmeticOperand { Register(Register), AtHl, Data(u8), } impl fmt::Display for ArithmeticOperand { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let operand_string = match self { ArithmeticOperand::Register(reg) => std::format!("{}", reg), ArithmeticOperand::AtHl => String::from("(HL)"), ArithmeticOperand::Data(d8) => std::format!("${:#0x}", d8), }; write!(f, "{}", operand_string) } } #[derive(Debug, PartialEq)] pub enum PtrArithOperand { Register16(RegisterPair), StackPointer, Data(i8), } impl fmt::Display for PtrArithOperand { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let operand_string = match self { PtrArithOperand::Register16(reg_pair) => std::format!("({})", reg_pair), PtrArithOperand::StackPointer => String::from("SP"), PtrArithOperand::Data(i8) => std::format!("${}", i8), }; write!(f, "{}", operand_string) } } #[derive(Debug, PartialEq)] pub enum JumpKind { JumpRelative(i8), JumpRelativeConditional(JumpCondition, i8), Jump(u16), JumpConditional(JumpCondition, u16), JumpHl, } impl fmt::Display for JumpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let kind = match self { JumpKind::JumpRelative(offset) => std::format!("R {:#0x}", offset), JumpKind::JumpRelativeConditional(cond, offset) => { std::format!("R {} {:#0x}", cond, offset) } JumpKind::Jump(address) => std::format!("P {:#0x}", address), JumpKind::JumpConditional(cond, address) => std::format!("P {} {:#0x}", cond, address), JumpKind::JumpHl => String::from("P (HL)"), }; write!(f, "{}", kind) } } #[derive(Debug, PartialEq)] pub enum JumpCondition { Zero, NonZero, Carry, NonCarry, } impl fmt::Display for JumpCondition { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let operand_string = match self { JumpCondition::Zero => String::from("Z"), JumpCondition::NonZero => String::from("NZ"), JumpCondition::Carry => String::from("C"), JumpCondition::NonCarry => String::from("NC"), }; write!(f, "{}", operand_string) } } #[derive(Debug, PartialEq)] pub enum ReturnKind { Return, ReturnInterrupt, ReturnConditional(JumpCondition), } impl fmt::Display for ReturnKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let kind_string = match self { ReturnKind::Return => String::from(""), ReturnKind::ReturnInterrupt => String::from("I"), ReturnKind::ReturnConditional(cond) => std::format!(" {}", cond), }; write!(f, "{}", kind_string) } } #[derive(Debug, PartialEq)] pub enum Instruction16 { RotateLeftCircular(ArithmeticOperand), RotateLeft(ArithmeticOperand), RotateRightCircular(ArithmeticOperand), RotateRight(ArithmeticOperand), ShiftLeft(ArithmeticOperand), ShiftRightArithmetic(ArithmeticOperand), ShiftRightLogical(ArithmeticOperand), Swap(ArithmeticOperand), BitComplement(u8, ArithmeticOperand), Reset(u8, ArithmeticOperand), Set(u8, ArithmeticOperand), } impl fmt::Display for Instruction16 { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let kind_string = match self { Instruction16::RotateLeftCircular(reg) => std::format!("RLC {}", reg), Instruction16::RotateLeft(reg) => std::format!("RL {}", reg), Instruction16::RotateRightCircular(reg) => std::format!("RRC {}", reg), Instruction16::RotateRight(reg) => std::format!("RR {}", reg), Instruction16::ShiftLeft(reg) => std::format!("SLA {}", reg), Instruction16::ShiftRightArithmetic(reg) => std::format!("SRA {}", reg), Instruction16::ShiftRightLogical(reg) => std::format!("SRL {}", reg), Instruction16::Swap(reg) => std::format!("SWAP {}", reg), Instruction16::BitComplement(val, reg) => std::format!("BIT {} {}", val, reg), Instruction16::Reset(val, reg) => std::format!("BIT {} {}", val, reg), Instruction16::Set(val, reg) => std::format!("Set {} {}", val, reg), }; write!(f, "{}", kind_string) } } #[derive(Debug, PartialEq)] pub enum RotateKind { Left, LeftCircular, Right, RightCircular, } impl fmt::Display for RotateKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let kind_string = match self { RotateKind::Left => String::from("L"), RotateKind::LeftCircular => String::from("LC"), RotateKind::Right => String::from("R"), RotateKind::RightCircular => String::from("RC"), }; write!(f, "{}", kind_string) } } #[derive(Debug, PartialEq)] pub enum CallKind { Call(u16), CallConditional(u16, JumpCondition), } impl fmt::Display for CallKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let kind_string = match self { CallKind::Call(a16) => std::format!("{:#0x}", a16), CallKind::CallConditional(a16, cond) => std::format!("{:#0x} {}", a16, cond), }; write!(f, "{}", kind_string) } } #[cfg(test)] mod tests { use super::*; #[test] fn display_nop() { assert_eq!(std::format!("{}", Instruction::Nop), "NOP"); } #[test] fn display_halt() { assert_eq!(std::format!("{}", Instruction::Halt), "HALT"); } #[test] fn display_load8() { assert_eq!( std::format!( "{}", Instruction::Load8(Load8Operand::Register(Register::A), Load8Operand::AtC) ), "LD A,(C)" ); } #[test] fn display_load16() { assert_eq!( std::format!( "{}", Instruction::Load16( Load16Target::Register16(RegisterPair::Hl), Load16Source::SpPlus(15) ) ), "LD HL,SP+15" ); } #[test] fn display_add() { assert_eq!( std::format!( "{}", Instruction::Add(ArithmeticOperand::Register(Register::B)) ), "ADD B" ); } #[test] fn display_sub() { assert_eq!( std::format!("{}", Instruction::Sub(ArithmeticOperand::AtHl)), "SUB (HL)" ); } #[test] fn display_adc() { assert_eq!( std::format!( "{}", Instruction::AddCarry(ArithmeticOperand::Register(Register::B)) ), "ADC B" ); } #[test] fn display_sbc() { assert_eq!( std::format!( "{}", Instruction::SubCarry(ArithmeticOperand::Register(Register::D)) ), "SBC D" ); } #[test] fn display_and() { assert_eq!( std::format!( "{}", Instruction::And(ArithmeticOperand::Register(Register::B)) ), "AND B" ); } #[test] fn display_or() { assert_eq!( std::format!( "{}", Instruction::Or(ArithmeticOperand::Register(Register::C)) ), "OR C" ); } #[test] fn display_xor() { assert_eq!( std::format!("{}", Instruction::Xor(ArithmeticOperand::Data(10))), "XOR $10" ); } #[test] fn display_cp() { assert_eq!( std::format!("{}", Instruction::Compare(ArithmeticOperand::Data(10))), "CP $10" ); } #[test] fn display_inc() { assert_eq!( std::format!( "{}", Instruction::Increment(ArithmeticOperand::Register(Register::A)) ), "INC A" ); } #[test] fn display_dec() { assert_eq!( std::format!( "{}", Instruction::Decrement(ArithmeticOperand::Register(Register::D)) ), "DEC D" ); } #[test] fn display_addptr() { assert_eq!( std::format!( "{}", Instruction::AddPtr(PtrArithOperand::StackPointer, PtrArithOperand::Data(25)) ), "ADD SP,$25" ); } #[test] fn decode_nop() { let mut memory = Memory::initialize(); memory.rom_bank0[0] = 0; assert_eq!(Instruction::from_bytes(&memory, 0), Instruction::Nop); } #[test] fn decode_nop_fails() { let memory = Memory::initialize(); assert_ne!(Instruction::from_bytes(&memory, 0), Instruction::Stop); } #[test] fn decode_stop() { let mut memory = Memory::initialize(); memory.rom_bank0[0] = 0x10; assert_eq!(Instruction::from_bytes(&memory, 0), Instruction::Stop); } #[test] fn decode_ld8() { let mut memory = Memory::initialize(); memory.rom_bank0[0] = 0x02; assert_eq!( Instruction::from_bytes(&memory, 0), Instruction::Load8( Load8Operand::AtReg16(RegisterPair::Bc), Load8Operand::Register(Register::A) ) ); } #[test] fn decode_ld16() { let mut memory = Memory::initialize(); memory.rom_bank0[0] = 0x01; memory.rom_bank0[1] = 0xCD; memory.rom_bank0[2] = 0xAB; assert_eq!( Instruction::from_bytes(&memory, 0), Instruction::Load16( Load16Target::Register16(RegisterPair::Bc), Load16Source::Data(0xABCD) ) ); } #[test] fn decode_inc16() { let mut memory = Memory::initialize(); memory.rom_bank0[0] = 0x23; assert_eq!( Instruction::from_bytes(&memory, 0), Instruction::IncrementPtr(PtrArithOperand::Register16(RegisterPair::Hl)) ); } #[test] fn decode_inc() { let mut memory = Memory::initialize(); memory.rom_bank0[0] = 0x24; assert_eq!( Instruction::from_bytes(&memory, 0), Instruction::Increment(ArithmeticOperand::Register(Register::H)) ); } #[test] fn decode_dec() { let mut memory = Memory::initialize(); memory.rom_bank0[0] = 0x35; assert_eq!( Instruction::from_bytes(&memory, 0), Instruction::Decrement(ArithmeticOperand::AtHl) ); } #[test] pub fn decode_bootrom() { let memory = Memory::initialize(); assert_eq!( Instruction::from_bytes(&memory, 0), Instruction::Load16(Load16Target::StackPointer, Load16Source::Data(0xFFFE)) ); assert_eq!( Instruction::from_bytes(&memory, 3), Instruction::Xor(ArithmeticOperand::Register(Register::A)) ); assert_eq!( Instruction::from_bytes(&memory, 4), Instruction::Load16( Load16Target::Register16(RegisterPair::Hl), Load16Source::Data(0x9FFF) ) ); assert_eq!( Instruction::from_bytes(&memory, 7), Instruction::Load8(Load8Operand::AtHld, Load8Operand::Register(Register::A)) ); assert_eq!( Instruction::from_bytes(&memory, 8), Instruction::Instruction16(Instruction16::BitComplement( 7, ArithmeticOperand::Register(Register::H) )) ); } #[test] pub fn load16_metadat() { assert_eq!( (3, timer::Cycles::Cycles(3)), Instruction::size_and_cycles(&Instruction::Load16( Load16Target::Register16(RegisterPair::Hl), Load16Source::Data(0x9FFF) )) ); } }
// Our use case use super::stream; pub type OpenProc = fn(file_path: &path::Path, open_mode: i32) -> stream::FileReader; pub type ReadProc = fn(file_reader: stream::FileReader, read_buffer: i32, count: i32) -> i32; pub type FileLengthProc = fn(file_reader: stream::FileReader) -> i32; pub fn open() { let compressed = true; } pub fn read() -> i32 { 0 } pub fn file_length() -> i32 { 0 } pub fn tell() -> i32 { 0 } pub fn close() -> i32 { 0 }
#[path = "support/macros.rs"] #[macro_use] mod macros; mod support; use criterion::{criterion_group, criterion_main, Criterion}; use glam::Mat4; use std::ops::Mul; use support::*; bench_unop!( mat4_transpose, "mat4 transpose", op => transpose, from => random_srt_mat4 ); bench_unop!( mat4_determinant, "mat4 determinant", op => determinant, from => random_srt_mat4 ); bench_unop!( mat4_inverse, "mat4 inverse", op => inverse, from => random_srt_mat4 ); bench_binop!( mat4_mul_vec4, "mat4 mul vec4", op => mul, from1 => random_srt_mat4, from2 => random_vec4 ); bench_binop!( mat4_transform_point3, "mat4 transform point3", op => transform_point3, from1 => random_srt_mat4, from2 => random_vec3 ); bench_binop!( mat4_transform_vector3, "mat4 transform vector3", op => transform_vector3, from1 => random_srt_mat4, from2 => random_vec3 ); bench_binop!( mat4_transform_point3a, "mat4 transform point3a", op => transform_point3a, from1 => random_srt_mat4, from2 => random_vec3a ); bench_binop!( mat4_transform_vector3a, "mat4 transform vector3a", op => transform_vector3a, from1 => random_srt_mat4, from2 => random_vec3a ); bench_binop!( mat4_mul_mat4, "mat4 mul mat4", op => mul, from => random_srt_mat4 ); bench_from_ypr!( mat4_from_ypr, "mat4 from ypr", ty => Mat4 ); pub fn mat4_from_srt(c: &mut Criterion) { use glam::{Quat, Vec3}; const SIZE: usize = 1 << 13; let mut rng = support::PCG32::default(); let inputs = criterion::black_box( (0..SIZE) .map(|_| { ( random_nonzero_vec3(&mut rng), random_quat(&mut rng), random_vec3(&mut rng), ) }) .collect::<Vec<(Vec3, Quat, Vec3)>>(), ); let mut outputs = vec![Mat4::default(); SIZE]; let mut i = 0; c.bench_function("mat4 from srt", |b| { b.iter(|| { i = (i + 1) & (SIZE - 1); unsafe { let data = inputs.get_unchecked(i); *outputs.get_unchecked_mut(i) = Mat4::from_scale_rotation_translation(data.0, data.1, data.2) } }) }); } criterion_group!( benches, mat4_determinant, mat4_from_srt, mat4_from_ypr, mat4_inverse, mat4_mul_mat4, mat4_mul_vec4, mat4_transform_point3, mat4_transform_point3a, mat4_transform_vector3, mat4_transform_vector3a, mat4_transpose, ); criterion_main!(benches);
use std::collections::HashMap; fn compress(data: &[u8]) -> Vec<u32> { // Build initial dictionary. let mut dictionary: HashMap<Vec<u8>, u32> = (0u32..=255) .map(|i| (vec![i as u8], i)) .collect(); let mut w = Vec::new(); let mut compressed = Vec::new(); for &b in data { let mut wc = w.clone(); wc.push(b); if dictionary.contains_key(&wc) { w = wc; } else { // Write w to output. compressed.push(dictionary[&w]); // wc is a new sequence; add it to the dictionary. dictionary.insert(wc, dictionary.len() as u32); w.clear(); w.push(b); } } // Write remaining output if necessary. if !w.is_empty() { compressed.push(dictionary[&w]); } compressed } fn decompress(mut data: &[u32]) -> Vec<u8> { // Build the dictionary. let mut dictionary: HashMap::<u32, Vec<u8>> = (0u32..=255) .map(|i| (i, vec![i as u8])) .collect(); let mut w = dictionary[&data[0]].clone(); data = &data[1..]; let mut decompressed = w.clone(); for &k in data { let entry = if dictionary.contains_key(&k) { dictionary[&k].clone() } else if k == dictionary.len() as u32 { let mut entry = w.clone(); entry.push(w[0]); entry } else { panic!("Invalid dictionary!"); }; decompressed.extend_from_slice(&entry); // New sequence; add it to the dictionary. w.push(entry[0]); dictionary.insert(dictionary.len() as u32, w); w = entry; } decompressed } fn main() { let compressed = compress("TOBEORNOTTOBEORTOBEORNOT".as_bytes()); println!("{:?}", compressed); let decompressed = decompress(&compressed); let decompressed = String::from_utf8(decompressed).unwrap(); println!("{}", decompressed); }
extern crate paper; use paper::configuration::Configuration; #[macro_use] extern crate clap; use clap::{App, Arg, ArgMatches}; #[tokio::main] async fn main() { let app = app(); let matches = matches_for_app(app); let mut configuration = Configuration { username: None, password: None, }; if let Some(config) = matches.value_of("config") { println!("Using config: {}", config); } if let Some(username) = matches.value_of("username") { configuration = configuration.with_username(Some(username.to_string())); } if let Some(password) = matches.value_of("password") { configuration = configuration.with_password(Some(password.to_string())); } let paper = paper::Paper::with_config(configuration); paper.initiate_commands().await; } fn app<'a, 'b>() -> App<'a, 'b> { App::new("paper") .version(crate_version!()) .author(crate_authors!()) .about("List paper crashes") } fn matches_for_app<'a>(app: App<'a, '_>) -> ArgMatches<'a> { app.arg( Arg::with_name("debug") .help("turn on debugging information") .long("debug") .short("d"), ) .args(&[ Arg::with_name("config") .help("sets a config file to use") .takes_value(true) .short("c") .long("config"), Arg::with_name("username") .help("the username to use") .takes_value(true) .short("u") .long("username") .required(true), Arg::with_name("password") .help("the password that matches the username") .takes_value(true) .short("p") .long("password") .required(true), ]) .get_matches() }
pub fn add_three_times_four(x : int) -> int { (x+3)*4 }
// Copyright (c) 2019 - 2020 ESRLabs // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use super::{Event, Notification, RepositoryId}; use crate::{ api, runtime::{EventTx, ExitStatus}, }; use api::model; use futures::{ future::join_all, sink::SinkExt, stream::{self, FuturesUnordered}, StreamExt, }; use log::{debug, error, info, trace, warn}; use std::{path::PathBuf, unreachable}; use thiserror::Error; use tokio::{ fs, io::{self, AsyncRead, AsyncWrite}, net::{TcpListener, UnixListener}, select, sync::{self, broadcast, oneshot}, task::{self}, time, }; use tokio_util::{either::Either, sync::CancellationToken}; use url::Url; // Request from the main loop to the console #[derive(Debug)] pub(crate) enum Request { Message(model::Message), Install(RepositoryId, PathBuf), } /// A console is responsible for monitoring and serving incoming client connections /// It feeds relevant events back to the runtime and forwards responses and notifications /// to connected clients pub(crate) struct Console { /// Tx handle to the main loop event_tx: EventTx, /// Listening address/url url: Url, /// Broadcast channel passed to connections to forward notifications notification_tx: broadcast::Sender<Notification>, /// Shutdown the console by canceling this token stop: CancellationToken, /// Listener tasks. Currently there's just one task but when the console /// is exposed to containers via unix sockets this list will grow tasks: Vec<task::JoinHandle<()>>, } #[derive(Error, Debug)] pub enum Error { #[error("Protocol error: {0}")] Protocol(String), #[error("IO error: {0} ({1})")] Io(String, #[source] io::Error), } impl Console { /// Construct a new console instance pub(super) fn new(url: &Url, event_tx: EventTx) -> Console { let (notification_tx, _notification_rx) = sync::broadcast::channel(100); Self { event_tx, url: url.clone(), notification_tx, stop: CancellationToken::new(), tasks: Vec::new(), } } /// Open a TCP socket and listen for incoming connections /// spawn a task for each connection pub(crate) async fn listen(&mut self) -> Result<(), Error> { let event_tx = self.event_tx.clone(); let notification_tx = self.notification_tx.clone(); // Stop token for self *and* the connections let stop = self.stop.clone(); match self.url.scheme() { "tcp" => { let addresses = self .url .socket_addrs(|| Some(4200)) .map_err(|e| Error::Io("Invalid console address".into(), e))?; let address = addresses .first() .ok_or_else(|| { Error::Io( "Invalid console url".into(), io::Error::new(io::ErrorKind::Other, ""), ) })? .to_owned(); debug!("Starting console on {}", &address); let listener = TcpListener::bind(&address).await.map_err(|e| { Error::Io(format!("Failed to open tcp listener on {}", &address), e) })?; debug!("Started console on {}", &address); let task = task::spawn(async move { // Connection tasks let mut connections = FuturesUnordered::new(); loop { select! { stream = listener.accept() => { match stream { Ok(stream) => { connections.push(task::spawn(Self::connection( stream.0, stream.1.to_string(), stop.clone(), event_tx.clone(), notification_tx.subscribe(), ))); } Err(e) => { warn!("Error listening: {}", e); break; } } } _ = connections.next(), if !connections.is_empty() => {}, _ = stop.cancelled() => { drop(listener); debug!("Closed listener on {}", address); if ! connections.is_empty() { debug!("Waiting for connections to be closed"); while connections.next().await.is_some() {} } break; } } } }); self.tasks.push(task); } "unix" => { let address = PathBuf::from(self.url.path()); debug!("Starting console on {}", address.display()); if address.exists() { fs::remove_file(&address) .await .map_err(|e| Error::Io("Failed to remove unix socket".into(), e))?; } let listener = UnixListener::bind(&address).map_err(|e| { Error::Io( format!("Failed to open unix listener on {}", address.display()), e, ) })?; debug!("Started console on {}", address.display()); let task = task::spawn(async move { // Connection tasks let mut connections = FuturesUnordered::new(); loop { select! { stream = listener.accept() => { match stream { Ok(stream) => { connections.push(task::spawn(Self::connection( stream.0, format!("{:?}", &stream.1), stop.clone(), event_tx.clone(), notification_tx.subscribe(), ))); } Err(e) => { warn!("Error listening: {}", e); break; } } } _ = connections.next(), if !connections.is_empty() => {}, _ = stop.cancelled() => { drop(listener); debug!("Closed listener on {}", address.display()); if address.exists() { fs::remove_file(&address) .await.expect("Failed to remove unix socket"); } if ! connections.is_empty() { debug!("Waiting for connections to be closed"); while connections.next().await.is_some() {} } break; } } } }); self.tasks.push(task); } _ => unreachable!(), } Ok(()) } /// Stop the listeners and wait for their shutdown pub async fn shutdown(self) -> Result<(), Error> { self.stop.cancel(); join_all(self.tasks).await; Ok(()) } /// Send a notification to the notification broadcast pub async fn notification(&self, notification: Notification) { self.notification_tx.send(notification).ok(); } async fn connection<T: AsyncRead + AsyncWrite + Unpin>( stream: T, peer: String, stop: CancellationToken, event_tx: EventTx, mut notification_rx: broadcast::Receiver<Notification>, ) -> Result<(), Error> { debug!("Client {} connected", peer); // Get a framed stream and sink interface. let mut network_stream = api::codec::framed(stream); // Wait for a connect message within timeout let connect = network_stream.next(); let connect = time::timeout(time::Duration::from_secs(5), connect); let (protocol_version, notifications, connect_message_id) = match connect.await { Ok(Some(Ok(m))) => match m.payload { model::Payload::Connect(model::Connect::Connect { version, subscribe_notifications, }) => (version, subscribe_notifications, m.id), _ => { warn!("{}: Received {:?} instead of Connect", peer, m.payload); return Ok(()); } }, Ok(Some(Err(e))) => { warn!("{}: Connection error: {}", peer, e); return Ok(()); } Ok(None) => { info!("{}: Connection closed before connect", peer); return Ok(()); } Err(_) => { info!("{}: Connection timed out", peer); return Ok(()); } }; // Check protocol version from connect message against local model version if protocol_version != model::version() { warn!( "{}: Client connected with invalid protocol version {}", peer, protocol_version ); // Send a ConnectNack and return -> closes the connection let connack = model::ConnectNack::InvalidProtocolVersion(model::version()); let connack = model::Connect::ConnectNack(connack); let message = model::Message { id: connect_message_id, payload: model::Payload::Connect(connack), }; network_stream.send(message).await.ok(); return Ok(()); } else { // Send ConnectAck let conack = model::Connect::ConnectAck; let message = model::Message { id: connect_message_id, payload: model::Payload::Connect(conack), }; if let Err(e) = network_stream.send(message).await { warn!("{}: Connection error: {}", peer, e); return Ok(()); } } // Notification input: If the client subscribe create a stream from the broadcast // receiver and otherwise drop it let notifications = if notifications { debug!("Client {} subscribed to notifications", peer); let stream = async_stream::stream! { loop { yield notification_rx.recv().await; } }; Either::Left(stream) } else { drop(notification_rx); Either::Right(stream::pending()) }; tokio::pin!(notifications); loop { select! { _ = stop.cancelled() => { info!("{}: Closing connection", peer); break; } notification = notifications.next() => { // Process notifications received via the notification // broadcast channel let notification = match notification { Some(Ok(notification)) => notification.into(), Some(Err(broadcast::error::RecvError::Closed)) => break, Some(Err(broadcast::error::RecvError::Lagged(_))) => { warn!("Client connection lagged notifications. Closing"); break; } None => break, }; if let Err(e) = network_stream .send(api::model::Message::new_notification(notification)) .await { warn!("{}: Connection error: {}", peer, e); break; } } item = network_stream.next() => { let message = if let Some(Ok(msg)) = item { msg } else { break; }; let message_id = message.id.clone(); trace!("{}: --> {:?}", peer, message); let mut keep_file = None; let request = if let api::model::Payload::Request( api::model::Request::Install(repository, size)) = message.payload { debug!( "{}: Received installation request with size {}", peer, bytesize::ByteSize::b(size) ); info!("{}: Using repository \"{}\"", peer, repository); // Get a tmpfile name let tmpfile = match tempfile::NamedTempFile::new() { Ok(f) => f, Err(e) => { warn!("Failed to create tempfile: {}", e); break; } }; // Create a tmpfile let mut file = match fs::File::create(&tmpfile.path()).await { Ok(f) => f, Err(e) => { warn!("Failed to open tempfile: {}", e); break; } }; // Receive size bytes and dump to the tempfile let start = time::Instant::now(); match io::copy( &mut io::AsyncReadExt::take(&mut network_stream, size), &mut file, ) .await { Ok(n) => { debug!( "{}: Received {} in {:?}", peer, bytesize::ByteSize::b(n), start.elapsed() ); } Err(e) => { warn!("{}: Connection error: {}", peer, e); break; } } let tmpfile_path = tmpfile.path().to_owned(); keep_file = Some(tmpfile); Request::Install(repository, tmpfile_path) } else { Request::Message(message) }; // Create a oneshot channel for the runtimes reply let (reply_tx, reply_rx) = oneshot::channel(); // Send the request to the runtime event_tx .send(Event::Console(request, reply_tx)) .await .expect("Internal channel error on main"); // Wait for the reply from the runtime select! { // If the runtime shuts down the connection shall be closed and not wait for // a reply _ = stop.cancelled() => break, Ok(response) = reply_rx => { keep_file.take(); // Report result to client let message = api::model::Message { id: message_id, payload: api::model::Payload::Response(response), }; trace!("{}: <-- {:?}", peer, message); if let Err(e) = network_stream.send(message).await { warn!("{}: Connection error: {}", peer, e); break; } } else => break, } } } } info!("{}: Connection closed", peer); Ok(()) } } impl From<ExitStatus> for model::ExitStatus { fn from(e: ExitStatus) -> Self { match e { ExitStatus::Exit(e) => model::ExitStatus::Exit(e), ExitStatus::Signaled(s) => model::ExitStatus::Signaled(s as u32), } } } impl From<Notification> for model::Notification { fn from(n: Notification) -> Self { match n { Notification::OutOfMemory(container) => model::Notification::OutOfMemory(container), Notification::Exit { container, status } => model::Notification::Exit { container, status: status.into(), }, Notification::Started(container) => model::Notification::Started(container), Notification::Stopped(container) => model::Notification::Stopped(container), } } }
use std::sync::Arc; use base64; use ring::constant_time::verify_slices_are_equal; use ring::{digest, hmac, rand, signature}; use untrusted; use algorithms::Algorithm; use errors::{new_error, ErrorKind, Result}; use keys::Key; /// The actual HS signing + encoding fn sign_hmac(alg: &'static digest::Algorithm, key: Key, signing_input: &str) -> Result<String> { let signing_key = match key { Key::Hmac(bytes) => hmac::SigningKey::new(alg, bytes), _ => return Err(ErrorKind::InvalidKeyFormat)?, }; let digest = hmac::sign(&signing_key, signing_input.as_bytes()); Ok(base64::encode_config::<hmac::Signature>(&digest, base64::URL_SAFE_NO_PAD)) } /// The actual ECDSA signing + encoding fn sign_ecdsa( alg: &'static signature::EcdsaSigningAlgorithm, key: Key, signing_input: &str, ) -> Result<String> { let signing_key = match key { Key::Pkcs8(bytes) => { signature::EcdsaKeyPair::from_pkcs8(alg, untrusted::Input::from(bytes))? } _ => { return Err(new_error(ErrorKind::InvalidKeyFormat)); } }; let rng = rand::SystemRandom::new(); let sig = signing_key.sign(&rng, untrusted::Input::from(signing_input.as_bytes()))?; Ok(base64::encode_config(&sig, base64::URL_SAFE_NO_PAD)) } /// The actual RSA signing + encoding /// Taken from Ring doc https://briansmith.org/rustdoc/ring/signature/index.html fn sign_rsa(alg: &'static signature::RsaEncoding, key: Key, signing_input: &str) -> Result<String> { let key_pair = match key { Key::Der(bytes) => signature::RsaKeyPair::from_der(untrusted::Input::from(bytes)) .map_err(|_| ErrorKind::InvalidRsaKey)?, Key::Pkcs8(bytes) => signature::RsaKeyPair::from_pkcs8(untrusted::Input::from(bytes)) .map_err(|_| ErrorKind::InvalidRsaKey)?, _ => { return Err(ErrorKind::InvalidKeyFormat)?; } }; let key_pair = Arc::new(key_pair); let mut signature = vec![0; key_pair.public_modulus_len()]; let rng = rand::SystemRandom::new(); key_pair .sign(alg, &rng, signing_input.as_bytes(), &mut signature) .map_err(|_| ErrorKind::InvalidRsaKey)?; Ok(base64::encode_config::<[u8]>(&signature, base64::URL_SAFE_NO_PAD)) } /// Take the payload of a JWT, sign it using the algorithm given and return /// the base64 url safe encoded of the result. /// /// Only use this function if you want to do something other than JWT. pub fn sign(signing_input: &str, key: Key, algorithm: Algorithm) -> Result<String> { match algorithm { Algorithm::HS256 => sign_hmac(&digest::SHA256, key, signing_input), Algorithm::HS384 => sign_hmac(&digest::SHA384, key, signing_input), Algorithm::HS512 => sign_hmac(&digest::SHA512, key, signing_input), Algorithm::ES256 => { sign_ecdsa(&signature::ECDSA_P256_SHA256_FIXED_SIGNING, key, signing_input) } Algorithm::ES384 => { sign_ecdsa(&signature::ECDSA_P384_SHA384_FIXED_SIGNING, key, signing_input) } Algorithm::RS256 => sign_rsa(&signature::RSA_PKCS1_SHA256, key, signing_input), Algorithm::RS384 => sign_rsa(&signature::RSA_PKCS1_SHA384, key, signing_input), Algorithm::RS512 => sign_rsa(&signature::RSA_PKCS1_SHA512, key, signing_input), Algorithm::PS256 => sign_rsa(&signature::RSA_PSS_SHA256, key, signing_input), Algorithm::PS384 => sign_rsa(&signature::RSA_PSS_SHA384, key, signing_input), Algorithm::PS512 => sign_rsa(&signature::RSA_PSS_SHA512, key, signing_input), } } /// See Ring docs for more details fn verify_ring( alg: &dyn signature::VerificationAlgorithm, signature: &str, signing_input: &str, key: &[u8], ) -> Result<bool> { let signature_bytes = base64::decode_config(signature, base64::URL_SAFE_NO_PAD)?; let public_key_der = untrusted::Input::from(key); let message = untrusted::Input::from(signing_input.as_bytes()); let expected_signature = untrusted::Input::from(signature_bytes.as_slice()); let res = signature::verify(alg, public_key_der, message, expected_signature); Ok(res.is_ok()) } fn verify_ring_es( alg: &dyn signature::VerificationAlgorithm, signature: &str, signing_input: &str, key: Key, ) -> Result<bool> { let bytes = match key { Key::Pkcs8(bytes) => bytes, _ => { return Err(ErrorKind::InvalidKeyFormat)?; } }; verify_ring(alg, signature, signing_input, bytes) } fn verify_ring_rsa( alg: &dyn signature::VerificationAlgorithm, signature: &str, signing_input: &str, key: Key, ) -> Result<bool> { let bytes = match key { Key::Der(bytes) | Key::Pkcs8(bytes) => bytes, _ => { return Err(ErrorKind::InvalidKeyFormat)?; } }; verify_ring(alg, signature, signing_input, bytes) } /// Compares the signature given with a re-computed signature for HMAC or using the public key /// for RSA. /// /// Only use this function if you want to do something other than JWT. /// /// `signature` is the signature part of a jwt (text after the second '.') /// /// `signing_input` is base64(header) + "." + base64(claims) pub fn verify( signature: &str, signing_input: &str, key: Key, algorithm: Algorithm, ) -> Result<bool> { match algorithm { Algorithm::HS256 | Algorithm::HS384 | Algorithm::HS512 => { // we just re-sign the data with the key and compare if they are equal let signed = sign(signing_input, key, algorithm)?; Ok(verify_slices_are_equal(signature.as_ref(), signed.as_ref()).is_ok()) } Algorithm::ES256 => { verify_ring_es(&signature::ECDSA_P256_SHA256_FIXED, signature, signing_input, key) } Algorithm::ES384 => { verify_ring_es(&signature::ECDSA_P384_SHA384_FIXED, signature, signing_input, key) } Algorithm::RS256 => { verify_ring_rsa(&signature::RSA_PKCS1_2048_8192_SHA256, signature, signing_input, key) } Algorithm::RS384 => { verify_ring_rsa(&signature::RSA_PKCS1_2048_8192_SHA384, signature, signing_input, key) } Algorithm::RS512 => { verify_ring_rsa(&signature::RSA_PKCS1_2048_8192_SHA512, signature, signing_input, key) } Algorithm::PS256 => { verify_ring_rsa(&signature::RSA_PSS_2048_8192_SHA256, signature, signing_input, key) } Algorithm::PS384 => { verify_ring_rsa(&signature::RSA_PSS_2048_8192_SHA384, signature, signing_input, key) } Algorithm::PS512 => { verify_ring_rsa(&signature::RSA_PSS_2048_8192_SHA512, signature, signing_input, key) } } }
extern crate clap; use clap::{crate_version, App, Arg}; use std::io::{self, BufRead}; mod math; type MathFunction = for<'r> fn(&'r Vec<f64>) -> f64; enum MathFunctionLabel { Sum, Mean, } fn parse_args() -> MathFunctionLabel { let matches = App::new("pipemath") .about( "Run math functions on numbers from stdin. Takes all input from stdin and applies a statistical function It ignores invalid input.", ) .version(crate_version!()) .arg( Arg::with_name("FUNCTION") .help("What function to compute") .index(1) .possible_values(&["mean", "sum"]) .required(true), ) .get_matches(); // Note, it's safe to call unwrap() because the arg is required match matches.value_of("FUNCTION").unwrap() { "mean" => MathFunctionLabel::Mean, "sum" => MathFunctionLabel::Sum, _ => unreachable!(), } } fn main() { let funcname = parse_args(); let mut nums: Vec<f64> = vec![]; let stdin = io::stdin(); for line in stdin.lock().lines() { match line { Err(e) => eprintln!("ERROR reading stdin: {}", e), // with ^Z Ok(s) => match s.as_str().parse() { Err(e) => eprintln!("ERROR cannot parse '{}': {}", s, e), Ok(f) => nums.push(f), }, } } if nums.len() == 0 { std::process::exit(1); } let func: MathFunction; match funcname { MathFunctionLabel::Mean => func = math::mean, MathFunctionLabel::Sum => func = math::sum, } println!("{}", func(&nums)); std::process::exit(0); }
// Copyright 2021 The Matrix.org Foundation C.I.C. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use parse_display::{Display, FromStr}; use serde::{Deserialize, Serialize}; #[derive( Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Display, FromStr, Serialize, Deserialize, )] pub enum CodeChallengeMethod { #[serde(rename = "plain")] #[display("plain")] Plain, #[serde(rename = "S256")] #[display("S256")] S256, } #[derive(Serialize, Deserialize)] pub struct Request { pub code_challenge_method: CodeChallengeMethod, pub code_challenge: String, }
extern crate rand; use rand::{thread_rng, Rng}; use std::cell::RefCell; use std::fmt; #[derive(PartialEq, Clone, Copy)] enum HandValue { Guu, Cho, Paa, } #[derive(PartialEq, Clone)] struct Hand { hand_value: HandValue, name: String, } impl Hand { fn new(hv: HandValue) -> Hand { match hv { HandValue::Guu => Hand { hand_value: hv, name: "グー".to_string() }, HandValue::Cho => Hand { hand_value: hv, name: "チョキ".to_string() }, HandValue::Paa => Hand { hand_value: hv, name: "パー".to_string() }, } } fn get_hand(hv: u32) -> Hand { match hv { 0 => Hand { hand_value: HandValue::Guu, name: "グー".to_string() }, 1 => Hand { hand_value: HandValue::Cho, name: "チョキ".to_string() }, 2 => Hand { hand_value: HandValue::Paa, name: "パー".to_string() }, _ => panic!("random number is over raange."), } } fn is_stronger_than(&self, h: &Hand) -> bool { self.fight(h) == 1 } fn is_weaker_than(&self, h: &Hand) -> bool { self.fight(h) == -1 } fn fight(&self, h: &Hand) -> i32 { let self_hv = self.hand_value as u32; let vs_hv = h.hand_value as u32; if self_hv == vs_hv { 0 } else if ((self_hv + 1) % 3) == vs_hv { 1 } else { -1 } } } trait Strategy { fn next_hand(&mut self) -> Hand; fn study(&mut self, win: bool); } #[derive(Clone)] struct WinningStrategy { won: bool, prev_hand: Hand, rng: rand::ThreadRng, } impl WinningStrategy { fn new() -> WinningStrategy { WinningStrategy { won: false, prev_hand: Hand::new(HandValue::Guu), rng: thread_rng(), } } } impl Strategy for WinningStrategy { fn next_hand(&mut self) -> Hand { if !self.won { let n = self.rng.gen_range(0, 3); self.prev_hand = Hand::get_hand(n); } self.prev_hand.clone() } fn study(&mut self, win: bool) { self.won = win; } } #[derive(Clone)] struct ProbStrategy{ prev_hand: Hand, current_hand: Hand, rng: rand::ThreadRng, history: [[u32; 3]; 3], } impl ProbStrategy { fn new() -> ProbStrategy { ProbStrategy { prev_hand: Hand::new(HandValue::Guu), current_hand: Hand::new(HandValue::Guu), rng: thread_rng(), history: [ [1, 1, 1], [1, 1, 1], [1, 1, 1], ], } } fn get_sum(&self) -> u32 { let prev_hv = self.current_hand.hand_value as usize; self.history[prev_hv].iter().fold(0, |sum, hv| sum + hv) } } impl Strategy for ProbStrategy { fn next_hand(&mut self) -> Hand { let current_hv = self.current_hand.hand_value as usize; let sum = self.get_sum(); let bet = self.rng.gen_range(0, sum); let hv = if bet < self.history[current_hv][0] { 0 } else if bet < (self.history[current_hv][0] + self.history[current_hv][1]) { 1 } else { 2 }; self.prev_hand = Hand::get_hand(current_hv as u32); self.current_hand = Hand::get_hand(hv); Hand::get_hand(hv) } fn study(&mut self, win: bool) { let prev_hv = self.prev_hand.hand_value as usize; let current_hv = self.current_hand.hand_value as usize; if win { self.history[prev_hv][current_hv] += 1; } else { self.history[prev_hv][(current_hv + 1) % 3] += 1; self.history[prev_hv][(current_hv + 2) % 3] += 1; } } } struct Player { name: String, strategy: RefCell<Box<Strategy>>, win_count: u32, lose_count: u32, game_count: u32, } impl Player { fn new(name: String, strategy: Box<Strategy>) -> Player { Player { name: name, strategy: RefCell::new(strategy), win_count: 0, lose_count: 0, game_count: 0, } } fn next_hand(&mut self) -> Hand { self.strategy.borrow_mut().next_hand() } fn win(&mut self) { self.strategy.borrow_mut().study(true); self.win_count += 1; self.game_count += 1; } fn lose(&mut self) { self.strategy.borrow_mut().study(false); self.lose_count += 1; self.game_count += 1; } fn even(&mut self) { self.game_count += 1; } } impl fmt::Display for Player{ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{}:{} games, {} win, {} lose]", self.name, self.game_count, self.win_count, self.lose_count) } } fn main() { let mut player1 = Player::new("Taro".to_string(), Box::new(WinningStrategy::new())); let mut player2 = Player::new("Hana".to_string(), Box::new(ProbStrategy::new())); for _ in 0..10000 { let next_hand1 = player1.next_hand(); let next_hand2 = player2.next_hand(); if next_hand1.is_stronger_than(&next_hand2) { println!("Winner:{}", player1); player1.win(); player2.lose(); } else if next_hand1.is_weaker_than(&next_hand2) { println!("Winner:{}", player2); player2.win(); player1.lose(); } else { println!("Even..."); player1.even(); player2.even(); } } println!("Total result:"); println!("{}", player1); println!("{}", player2); }
use crate::{Context, Middleware, Response}; use futures::future::BoxFuture; use std::time::Instant; #[derive(Debug, Clone, Default)] pub struct Logger; impl Logger { pub fn new() -> Self { Self::default() } } impl<State: Send + Sync + 'static> Middleware<Context<State>> for Logger { fn call<'a>(&'a self, cx: Context<State>) -> BoxFuture<'a, Response> { Box::pin(async move { let start = Instant::now(); let path = cx.uri().path().to_owned(); let method = cx.method().as_str().to_owned(); log::trace!("IN => {} {}", method, path); let res = cx.next().await; log::info!( "{} {} {} {}ms", method, path, res.status().as_str(), start.elapsed().as_millis() ); res }) } }
// blocks are expressions fn main() { let pi = std::f64::consts::PI; let r = 5.3; let area = { pi * r * r }; println!("area: {:?}", area); }
//! NOTE: this crate is really just a shim for testing //! the other no-std crate. mod multi_thread; mod single_thread; #[cfg(test)] mod tests { use bbqueue::{consts::*, BBBuffer, ConstBBBuffer, Error as BBQError}; #[test] fn deref_deref_mut() { let bb: BBBuffer<U6> = BBBuffer::new(); let (mut prod, mut cons) = bb.try_split().unwrap(); let mut wgr = prod.grant_exact(1).unwrap(); // deref_mut wgr[0] = 123; assert_eq!(wgr.len(), 1); wgr.commit(1); // deref let rgr = cons.read().unwrap(); assert_eq!(rgr[0], 123); rgr.release(1); } #[test] fn static_allocator() { // Check we can make multiple static items... static BBQ1: BBBuffer<U6> = BBBuffer(ConstBBBuffer::new()); static BBQ2: BBBuffer<U6> = BBBuffer(ConstBBBuffer::new()); let (mut prod1, mut cons1) = BBQ1.try_split().unwrap(); let (mut _prod2, mut cons2) = BBQ2.try_split().unwrap(); // ... and they aren't the same let mut wgr1 = prod1.grant_exact(3).unwrap(); wgr1.copy_from_slice(&[1, 2, 3]); wgr1.commit(3); // no data here... assert!(cons2.read().is_err()); // ...data is here! let rgr1 = cons1.read().unwrap(); assert_eq!(&*rgr1, &[1, 2, 3]); } #[test] fn direct_usage_sanity() { // Initialize let bb: BBBuffer<U6> = BBBuffer::new(); let (mut prod, mut cons) = bb.try_split().unwrap(); assert_eq!(cons.read(), Err(BBQError::InsufficientSize)); // Initial grant, shouldn't roll over let mut x = prod.grant_exact(4).unwrap(); // Still no data available yet assert_eq!(cons.read(), Err(BBQError::InsufficientSize)); // Add full data from grant x.copy_from_slice(&[1, 2, 3, 4]); // Still no data available yet assert_eq!(cons.read(), Err(BBQError::InsufficientSize)); // Commit data x.commit(4); ::std::sync::atomic::fence(std::sync::atomic::Ordering::SeqCst); let a = cons.read().unwrap(); assert_eq!(&*a, &[1, 2, 3, 4]); // Release the first two bytes a.release(2); let r = cons.read().unwrap(); assert_eq!(&*r, &[3, 4]); r.release(0); // Grant two more let mut x = prod.grant_exact(2).unwrap(); let r = cons.read().unwrap(); assert_eq!(&*r, &[3, 4]); r.release(0); // Add more data x.copy_from_slice(&[11, 12]); let r = cons.read().unwrap(); assert_eq!(&*r, &[3, 4]); r.release(0); // Commit x.commit(2); let a = cons.read().unwrap(); assert_eq!(&*a, &[3, 4, 11, 12]); a.release(2); let r = cons.read().unwrap(); assert_eq!(&*r, &[11, 12]); r.release(0); let mut x = prod.grant_exact(3).unwrap(); let r = cons.read().unwrap(); assert_eq!(&*r, &[11, 12]); r.release(0); x.copy_from_slice(&[21, 22, 23]); let r = cons.read().unwrap(); assert_eq!(&*r, &[11, 12]); r.release(0); x.commit(3); let a = cons.read().unwrap(); // NOTE: The data we just added isn't available yet, // since it has wrapped around assert_eq!(&*a, &[11, 12]); a.release(2); // And now we can see it let r = cons.read().unwrap(); assert_eq!(&*r, &[21, 22, 23]); r.release(0); // Ask for something way too big assert!(prod.grant_exact(10).is_err()); } #[test] fn zero_sized_grant() { let bb: BBBuffer<U1000> = BBBuffer::new(); let (mut prod, mut _cons) = bb.try_split().unwrap(); let size = 1000; let grant = prod.grant_exact(size).unwrap(); grant.commit(size); let grant = prod.grant_exact(0).unwrap(); grant.commit(0); } }
// Copyright (c) 2019 Chaintope Inc. use crate::errors::Error; use log::warn; use std::sync::mpsc::{channel, sync_channel, Receiver, Sender, SyncSender}; use std::sync::{Arc, Mutex, RwLock}; use std::thread::JoinHandle; use std::time::Duration; type ThreadSafeReceiver<T> = Arc<Mutex<Receiver<T>>>; fn to_thread_safe<T>(r: Receiver<T>) -> ThreadSafeReceiver<T> { Arc::new(Mutex::new(r)) } pub struct RoundTimeOutObserver { name: String, timelimit: Duration, sender: Sender<()>, pub receiver: Receiver<()>, command_sender: SyncSender<Command>, command_receiver: ThreadSafeReceiver<Command>, thread: Option<JoinHandle<()>>, state: Arc<RwLock<State>>, } pub enum Command { Stop, } #[derive(Debug)] pub struct State { started: bool, } impl RoundTimeOutObserver { pub fn new(name: &str, timelimit_secs: u64) -> Self { let (sender, receiver): (Sender<()>, Receiver<()>) = channel(); let (command_sender, command_receiver): (SyncSender<Command>, Receiver<Command>) = sync_channel(1); RoundTimeOutObserver { name: name.to_string(), timelimit: Duration::from_secs(timelimit_secs), thread: None, sender, receiver, command_sender, command_receiver: to_thread_safe(command_receiver), state: Arc::new(RwLock::new(State { started: false })), } } pub fn is_started(&self) -> bool { let guard = self .state .try_read() .expect("Can't read started state. is Locked."); guard.started // self.thread.try_lock().unwrap().is_some() } fn set_started_state(&self, flag: bool) { let mut state_writer = self .state .try_write() .expect("Can't state change to started!"); state_writer.started = flag; } pub fn start(&mut self) -> Result<(), Error> { if self.is_started() { return Err(Error::TimerAlreadyStarted); } let sender = self.sender.clone(); let command_receiver = self.command_receiver.clone(); let timelimit = self.timelimit; self.set_started_state(true); let thread_in_started = self.state.clone(); let stop = move || { let mut state = thread_in_started .try_write() .expect("State can not change to stop."); state.started = false; }; let name = self.name.clone(); let handler = std::thread::Builder::new() .name("RoundTimeoutObserverThread".to_string()) .spawn(move || { // TODO: lock取れない場合はリトライした方が良いか? 多分lock取れないのはエラーにしといていいとは思うが。。 let receiver = command_receiver .try_lock() .expect("Command_receiver can not have lock."); log::trace!("Start Timer name={} timelimit={:?}", name, timelimit); match receiver.recv_timeout(timelimit) { Ok(Command::Stop) => { log::trace!("Stop Timer by Stop command name={}", name); stop(); } Err(_e) => { log::trace!("Stop Timer by time out name={}", name); stop(); // time out, send timeout signal. match sender.send(()) { Ok(_) => {} Err(e) => log::warn!( "Round timeouted, but receiver not handle signal!: {:?}", e ), }; } } log::trace!("RoundTimeoutObserverThread finished."); }) .unwrap(); self.thread = Some(handler); Ok(()) } pub fn stop(&mut self) { if self.is_started() { match self.command_sender.try_send(Command::Stop) { Ok(_) => { // Should be wait to thread stopped. match self.thread.take() { Some(handler) => handler.join().expect("Timer thread invalid state."), None => {} } } Err(e) => { warn!( "RoundTimeoutObserver thread maybe already dead. error:{:?}", e ); } } }; self.set_started_state(false); } pub fn restart(&mut self) -> Result<(), Error> { self.stop(); self.start()?; Ok(()) } } impl Drop for RoundTimeOutObserver { fn drop(&mut self) { // wait thread finished. if let Some(handler) = self.thread.take() { handler.join().unwrap(); } } } #[cfg(test)] mod tests { use super::RoundTimeOutObserver; use std::time::Duration; #[test] fn test_timeout_signal() { let mut observer = RoundTimeOutObserver::new("test timer", 0); observer.start().unwrap(); match observer.receiver.recv_timeout(Duration::from_millis(300)) { Ok(_) => assert_eq!(observer.is_started(), false), Err(e) => panic!("Timeout signal not received. {:?}", e), } } #[test] fn test_timer_stop() { let mut observer = RoundTimeOutObserver::new("test timer", 1); observer.start().unwrap(); observer.stop(); match observer.receiver.recv_timeout(Duration::from_millis(1100)) { Ok(_) => panic!("Should not send stop signal."), Err(_e) => assert_eq!(observer.is_started(), false), // Observer thread should did stop. } } #[test] fn test_prevent_duplicate_start() { let mut observer = RoundTimeOutObserver::new("test timer", 1); observer.start().unwrap(); match observer.start() { Ok(_) => panic!("Should be Error!"), Err(e) => { let error = format!("{:?}", e); assert_eq!(error, "TimerAlreadyStarted"); } } } #[test] fn test_timeout_and_restart() { let mut observer = RoundTimeOutObserver::new("test timer", 1); observer.start().unwrap(); assert_eq!(observer.is_started(), true); match observer.receiver.recv_timeout(Duration::from_millis(1100)) { Ok(_) => assert_eq!(observer.is_started(), false), Err(e) => panic!("Timeout signal not received. {:?}", e), } println!("2nd round start."); observer.restart().unwrap(); match observer.receiver.recv_timeout(Duration::from_millis(1500)) { Ok(_) => assert_eq!(observer.is_started(), false), Err(e) => panic!("Timeout signal not received. {:?}", e), } } }
/* * Sliding window min/max test (Rust) * * Copyright (c) 2022 Project Nayuki. (MIT License) * https://www.nayuki.io/page/sliding-window-minimum-maximum-algorithm * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * - The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * - The Software is provided "as is", without warranty of any kind, express or * implied, including but not limited to the warranties of merchantability, * fitness for a particular purpose and noninfringement. In no event shall the * authors or copyright holders be liable for any claim, damages or other * liability, whether in an action of contract, tort or otherwise, arising from, * out of or in connection with the Software or the use or other dealings in the * Software. */ extern crate rand; use rand::Rng; use rand::distributions::IndependentSample; use rand::distributions::range::Range; mod slidingwindowminmax; fn main() { test_randomly(); test_incremental(); println!("Test passed"); } fn test_randomly() { let trials = 100_000; let rng = &mut rand::thread_rng(); let valuedist = Range::new(0u32, 100); let arraylendist = Range::new(0usize, 1000); let windowdist = Range::new(1usize, 31); for _ in 0 .. trials { let arraylen = arraylendist.ind_sample(rng); let array: Vec<u32> = (0 .. arraylen).map( |_| valuedist.ind_sample(rng)).collect(); let window = windowdist.ind_sample(rng); let maximize: bool = rng.gen(); let expect: Vec<u32> = compute_sliding_window_min_or_max_naive (&array, window, maximize); let actual: Vec<u32> = slidingwindowminmax::compute_sliding_window_min_or_max(&array, window, maximize); assert_eq!(actual, expect, "Array mismatch"); } } fn test_incremental() { let trials = 10_000; let rng = &mut rand::thread_rng(); let valuedist = Range::new(0i8, 100); for _ in 0 .. trials { let arraylen: usize = 1000; let array: Vec<i8> = (0 .. arraylen).map( |_| valuedist.ind_sample(rng)).collect(); let mut swm = slidingwindowminmax::SlidingWindowMinMax::new(); let mut start: usize = 0; let mut end: usize = 0; while start < array.len() { if start == end || (end < array.len() && rng.gen::<bool>()) { swm.add_tail(&array[end]); end += 1; } else { swm.remove_head(&array[start]); start += 1; } assert!(start <= end); if start < end { let subarr = &array[start .. end]; assert_eq!(*swm.get_minimum(), *subarr.iter().min().unwrap()); assert_eq!(*swm.get_maximum(), *subarr.iter().max().unwrap()); } } } } fn compute_sliding_window_min_or_max_naive<E: Ord + Clone>(array: &[E], window: usize, maximize: bool) -> Vec<E> { assert!(window > 0, "Window size must be positive"); array.windows(window).map(|subarr| { let iter = subarr.iter(); (if maximize { iter.max() } else { iter.min() }).unwrap().clone() }).collect() }
use bytes::{BufMut, BytesMut}; use rsocket_rust::extension::{self, CompositeMetadata, CompositeMetadataEntry, MimeType}; use rsocket_rust::utils::Writeable; #[test] fn test_encode_and_decode() { let bingo = |metadatas: Vec<&CompositeMetadataEntry>| { assert_eq!(2, metadatas.len()); assert_eq!( extension::MimeType::TEXT_PLAIN, *metadatas[0].get_mime_type() ); assert_eq!("Hello World!", metadatas[0].get_metadata_utf8().unwrap()); assert_eq!( MimeType::from("application/not_well"), *metadatas[1].get_mime_type() ); assert_eq!(b"Not Well!", metadatas[1].get_metadata().as_ref()); }; let cm = CompositeMetadata::builder() .push(MimeType::from("text/plain"), b"Hello World!") .push(MimeType::from("application/not_well"), "Not Well!") .build(); bingo(cm.iter().collect()); let mut bf = BytesMut::new(); cm.write_to(&mut bf); let cm2 = CompositeMetadata::decode(&mut bf).unwrap(); bingo(cm2.iter().collect()); } #[test] fn test_bad() { let mut bf = BytesMut::new(); bf.put_slice(b"must bad"); assert!( CompositeMetadata::decode(&mut bf).is_err(), "should be error" ) }
use std; fn ack(m: int, n: int) -> int { if m == 0 { ret n + 1 } else { if n == 0 { ret ack(m - 1, 1); } else { ret ack(m - 1, ack(m, n - 1)); } } } fn main(args: [str]) { // FIXME: #1527 sys::set_min_stack(1000000u); let n = if vec::len(args) == 2u { int::from_str(args[1]) } else { 8 }; std::io::println(#fmt("Ack(3,%d): %d\n", n, ack(3, n))); }
//! the example for how to decode hex string to message struct //! //! 转化过程 一个string 利用hex转换成 Vec<u8> //! u8数组再利用 bitcoin::deserialize 转换成相应的Address类型 //! 要求 Address 实现 Decode和Encode方法 不然无法实现serialize 和 deserialize //! //! fn decode_address() { use hex::decode as hex_decode; use bitcoin::consensus::{deserialize, serialize}; use crate::message::address::Address; use std::net::{SocketAddr, SocketAddrV4, IpAddr, Ipv4Addr}; //test for address hex decode to structures let from_sat = hex_decode("000000000000000000000000000000000000ffff7f000001208d").unwrap(); println!("The byte is {:?}", &from_sat); let decode: Result<Address, _> = deserialize(&from_sat); assert!(decode.is_ok()); let address = &decode.unwrap(); println!("The Address is {:?}", address); } //! 从一个Address 类型转换为 hex string //! 先组装类型 然后利用 bitcoin库提供的serialize方法转换为数组 然后利用hex库的encode方法转为hex string //! serialize方法要求实现Encode trait //! //! hex_decode 0xxxxx---->Vec //! deserialize Vec -----> struct //! //! serialize struct ----> Vec //! hex_encode Vec ------> 0xxxxxxxx //! //! vec是在网络上传播的形式 //! fn encode_address(){ //test for address to encode hex let s4 = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8333); let a4 = Address::new(&s4, 0); let v4c = serialize(&a4); let res = hex::encode(v4c); println!("The hex string is {:?}", res); } //! 主要流程的拼装数据的流程 fn example(){ // This message is from my satoshi node, morning of May 27 2014 // let from_sat = hex_decode("721101000100000000000000e6e0845300000000010000000000000000000000000000000000ffff0000000000000100000000000000fd87d87eeb4364f22cf54dca59412db7208d47d920cffce83ee8102f5361746f7368693a302e392e39392f2c9f040001").unwrap(); // // let decode: Result<VersionMessage, _> = deserialize(&from_sat); // println!("The version is {:#?}", decode.unwrap()); // assert!(decode.is_ok()); // let real_decode = decode.unwrap(); // assert_eq!(real_decode.version, 70002); // assert_eq!(real_decode.services, 1); // assert_eq!(real_decode.timestamp, 1401217254); // // address decodes should be covered by Address tests // assert_eq!(real_decode.nonce, 16735069437859780935); // assert_eq!(real_decode.user_agent, "/Satoshi:0.9.99/".to_string()); // assert_eq!(real_decode.start_height, 302892); // assert_eq!(real_decode.relay, true); // // assert_eq!(serialize(&real_decode), from_sat); //现在开始组合消息 从example的send开始组合 // 第一个是command s这个已经实现了 目标是编码成16进制的数组 形式和bitcoin Network protocol wiki上面表示的那样就行 // 先构建struct 然后序列化 // let cs = CommandString("version".to_owned()); // let s= serialize(&cs); // println!("The s is {:0x?}", s); // 接下来是对payload 本身的编码 要发的第一个消息是Version 所以先对version编码 // 步骤按照example中的来 // // 先是version // addr_recv 也就是接受这个消息的ip地址定位 192.168.1.7 port 8332 // addr_from 发送这个消息的ip地址我本机的ip是 192.168.101.61 port 指定一个 8332 // // // let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() as i64; // let remote = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(192, 168, 1, 7)), 8333); // // let version = VersionMessage { // version: 70001, // services: 0000000000000000,//spv only // timestamp, // receiver: Address::new(&remote, 1), // // sender is only dummy // sender: Address::new(&remote, 1), // nonce: 0000000000000000,//not used here // user_agent: "/Alvin Example:0.9.3/".to_string(), // start_height: 329107, // relay: true, // }; // let serialized_version = serialize(&version); // println!("The serialized_version is {:02x?}", &serialized_version); // println!("The len of serialized_version is {:?}", &serialized_version.len()); // 接下来构建checksum // example 里面有这句话 // Checksum is first 4 bytes of SHA256(SHA256(<payload>)) // [https://en.bitcoin.it/wiki/Protocol_documentation#version] // // 为了解决这个问题,我去定义Verack 算一下Verack 的checksum // // 经验证使用hmac_sha256 计算即可 得到的结果和示例是一样的 // [https://en.bitcoin.it/wiki/Protocol_documentation#verack] // 注意checksum值 // // let hash_m1 = hmac_sha256::Hash::hash("".as_bytes()); // let hash_m2 = hmac_sha256::Hash::hash(&hash_m1); // println!("I got hash_m SHA256(SHA256(verack)[0..4] {:0x?}", &hash_m2[0..4]); // 现在要序列化一下长度 很简单直接序列化x let length = serialize(&12); println!("The length is {:02x?}", length); // filterload 用固定值 // 参考message/filterload // getdata数据得序列化一下 发现基本上已经实现了 不用自己动手 } use hex::decode as hex_decode; use crate::message::address::Address; use std::net::{SocketAddr, IpAddr, Ipv4Addr, TcpStream}; use crate::message::version::VersionMessage; use crate::message::command::CommandString; use crate::message::{RawMessage, Payload, Magic}; use crate::message::filterload::FilterLoad; use crate::message::getdata::GetData; use std::io::{Write, Read}; use std::thread::sleep; use std::time; use log::info; const IO_BUFFER_SIZE: usize = 1024 * 1024; fn main(){ let remote = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(192, 168, 1, 7)), 8333); //组装一个version let version_message = VersionMessage { version: 70001, services: 0, timestamp: 1415484102, receiver: Address::new(&remote, 1), // sender is only dummy sender: Address::new(&remote, 1), nonce: 0, bytes: 0x1b, user_agent: "/Bitcoin.org Example:0.9.3/".to_string(), start_height: 329107, relay: true, }; let raw_version = RawMessage::new(Magic::Main, CommandString("version".to_owned()), Payload::Version(version_message), ); let vec_version = raw_version.combine(); info!("vec_version {:02x?}", &vec_version); //组装一个Verack Verack没有值 let raw_verack = RawMessage::new(Magic::Main, CommandString("verack".to_owned()), Payload::Verack); let vec_verack = raw_verack.combine(); info!("vec_verack {:02x?}", &vec_verack); //组装一个filterload //因为没有搞清楚机理 自己根据固定数据拼装一个filterload //send("filterload", //"02" # ........ Filter bytes: 2 //+ "b50f" # ....... Filter: 1010 1101 1111 0000 //+ "0b000000" # ... nHashFuncs: 11 //+ "00000000" # ... nTweak: 0/none //+ "00" # ......... nFlags: BLOOM_UPDATE_NONE //) let filterload_hex = "02b50f0b0000000000000000"; let filterload_vec = hex_decode(filterload_hex).unwrap(); info!("filterload_vec {:?}", filterload_vec); let raw_filterload = RawMessage::new(Magic::Main, CommandString("filterload".to_owned()), Payload::FilterLoad(FilterLoad(filterload_vec))); let vec_filterload = raw_filterload.combine(); info!("vec_verack {:02x?}", &vec_filterload); //组装一个getdata 为了快速实现 也采用上述方法 //send("getdata", // "01" # ................................. Number of inventories: 1 // + "03000000" # ........................... Inventory type: filtered block // + "a4deb66c0d726b0aefb03ed51be407fb" // + "ad7331c6e8f9eef231b7000000000000" # ... Block header hash //) let getdata_hex = "0103000000a4deb66c0d726b0aefb03ed51be407fbad7331c6e8f9eef231b7000000000000"; let getdata_vec = hex_decode(getdata_hex).unwrap(); info!("getdata_vec {:?}", getdata_vec); let raw_getdata = RawMessage::new(Magic::Main, CommandString("getdata".to_owned()), Payload::GetData(GetData(getdata_vec))); let vec_getdata = raw_getdata.combine(); info!("vec_verack {:02x?}", &vec_getdata); match TcpStream::connect("192.168.1.7:8333") { Ok(mut stream) => { info!("Successfully connected to server in port 8333"); stream.write(&vec_version).unwrap(); info!("Sent version, awaiting reply..."); sleep(time::Duration::from_secs(1)); stream.write(&vec_verack).unwrap(); info!("Sent vec_verack, awaiting reply..."); stream.write(&vec_filterload).unwrap(); info!("Sent vec_filterload, awaiting reply..."); stream.write(&vec_getdata).unwrap(); info!("Sent vec_getdata, awaiting reply..."); let mut iobuf = vec!(0u8; IO_BUFFER_SIZE); match stream.read(iobuf.as_mut_slice()) { Ok(len) => { info!("{}", &len); info!("{:02x?}", iobuf[0..len].to_vec()) } Err(e) => { info!("Failed to receive data: {}", e); } } } Err(e) => { info!("Failed to connect: {}", e); } } info!("Terminated."); }
use std::collections::HashMap; use log::{error, debug}; use std::time::{UNIX_EPOCH}; use crate::zone::Zone; use std::fs::{metadata, File}; use std::io::{Error, ErrorKind, BufReader}; use std::cell::{RefCell}; use serde::{Serialize, Deserialize}; use derive_new::{new}; pub type ControlNodes = HashMap<String, ControlNode>; pub type Zones = HashMap<String, Zone>; #[derive(Serialize, Deserialize)] pub struct FullConfig { pub general: Config, pub controls: ControlNodes } #[derive(Debug, new, Serialize, Deserialize)] pub struct ControlNode { #[serde(default)] pub name: String, #[serde(default)] pub control_pin: u8, pub zones: Zones } #[derive(Debug)] pub struct Settings { config: RefCell<Config> } impl Settings { pub fn new(config: Config) -> Settings { Settings { config: RefCell::new(config) } } pub fn replace(&self, config: Config) { self.config.replace(config); } pub fn name(&self) -> String { self.config.borrow().name.clone() } pub fn host(&self) -> String { self.config.borrow().host.clone() } pub fn heater_control_name(&self) -> String { self.config.borrow().heater_control_name.clone() } pub fn heater_control_pin(&self) -> u8 { self.config.borrow().heater_control_pin } pub fn acctuator_warmup_time(&self) -> u16 { self.config.borrow().acctuator_warmup_time } pub fn heater_pump_stop_time(&self) -> u16 { self.config.borrow().heater_pump_stop_time } pub fn constant_temperature_expected(&self) -> f32 { self.config.borrow().constant_temperature_expected } pub fn min_pwm_state(&self) -> u8 { self.config.borrow().min_pwm_state } pub fn min_temperature_diff_for_pwm(&self) -> f32 { self.config.borrow().min_temperature_diff_for_pwm } pub fn temperature_drop_wait(&self) -> f32 { self.config.borrow().temperature_drop_wait } pub fn version(&self) -> u64 { self.config.borrow().version } } #[derive(Debug, new, Serialize, Deserialize, Clone)] pub struct Config { name: String, host: String, heater_control_name: String, heater_control_pin: u8, #[new(value = "300")] acctuator_warmup_time: u16, #[new(value = "600")] heater_pump_stop_time: u16, #[new(value = "20.0")] constant_temperature_expected: f32, #[new(value = "30")] min_pwm_state: u8, #[new(value = "0.3")] min_temperature_diff_for_pwm: f32, #[new(value = "0.7")] temperature_drop_wait: f32, #[new(value = "0")] #[serde(default)] version: u64 } pub fn load_config(config_path: &str, verbosity: u8) -> Result<(Config, ControlNodes), Error> { let yaml_file = File::open(&config_path) .map_err(|err| error!("{:?}", err)) .map_err(|_| Error::new(ErrorKind::InvalidData, "Unable to open yaml file"))?; let reader = BufReader::new(yaml_file); let mut full_config: FullConfig = serde_yaml::from_reader(reader) .map_err(|err| error!("{:?}", err)) .map_err(|_| Error::new(ErrorKind::InvalidData, "Unable to parse yaml file"))?; debug!("Config loaded: {} Verbosity: {}", config_path, verbosity); let version = metadata(config_path) .and_then(|meta| meta.modified()) .map(|stime| if let Ok(dur) = stime.duration_since(UNIX_EPOCH) { dur.as_secs() } else { 0 }) .unwrap_or(0); if full_config.general.version != version { full_config.general.version = version; } Ok((full_config.general, full_config.controls)) } pub fn has_config_changed(config_path: &str, version: u64) -> bool { if let Ok(meta) = metadata(config_path) { if let Ok(stime) = meta.modified() { if let Ok(dur) = stime.duration_since(UNIX_EPOCH) { return version < dur.as_secs(); } } } false } #[cfg(test)] mod tests { use speculate::speculate; use super::*; use serde_yaml; use serde_json; speculate! { describe "config serialization" { it "should serialize full config" { let contents = " general: host: 192.168.0.140 name: sildymas # how long it takes for acctuator to warm up in secs acctuator_warmup_time: 180 # how long it takes for pump to stop working in secs heater_pump_stop_time: 600 # ignore zone config and expect this temperature when enabled constant_temperature_expected: 18.0 # min value for pwm pin in percent min_pwm_state: 30 # if the temperature difference is less then min_temperature_diff_for_pwm use min_pwm_state min_temperature_diff_for_pwm: 0.5 # when temperature reaches its expected value wait for it to drop temperature_drop_wait to turn acctuator back on temperature_drop_wait: 0.7 heater_control_name: main_control heater_control_pin: 83 controls: main_control: path: sildymas/nodes/main control_pin: 83 zones: salionas: times: - start: 4:00 end: 21:00 expected_temperature: 21.0 - start: 4:00 end: 21:00 expected_temperature: 21.0 sensor_pin: 2 control_pin: 4 slave_control: path: sildymas/nodes/slave zones: miegamasis: times: - start: 2:00 end: 23:00 expected_temperature: 20.5 control_pin: 10 sensor_pin: 2 vaiku: times: - start: 2:00 end: 23:00 expected_temperature: 20.5 control_pin: 9 sensor_pin: 2 "; let config: FullConfig = serde_yaml::from_str(&contents).unwrap(); let json = serde_json::to_string(&config).unwrap(); } } } }
use anyhow::*; use derive_more::*; use serde::{Deserialize, Serialize}; use smart_default::SmartDefault; use std::{fmt, str::FromStr}; #[derive(Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Display, DebugCustom, SmartDefault)] pub enum NumWithUnit { #[display(fmt = "{}%", .0)] #[debug(fmt = "{}%", .0)] Percent(i32), #[display(fmt = "{}px", .0)] #[debug(fmt = "{}px", .0)] #[default] Pixels(i32), } impl NumWithUnit { pub fn relative_to(&self, max: i32) -> i32 { match *self { NumWithUnit::Percent(n) => ((max as f64 / 100.0) * n as f64) as i32, NumWithUnit::Pixels(n) => n, } } } impl FromStr for NumWithUnit { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { lazy_static::lazy_static! { static ref PATTERN: regex::Regex = regex::Regex::new("^(-?\\d+)(.*)$").unwrap(); }; let captures = PATTERN.captures(s).with_context(|| format!("could not parse '{}'", s))?; let value = captures.get(1).unwrap().as_str().parse::<i32>()?; let value = match captures.get(2).unwrap().as_str() { "px" | "" => NumWithUnit::Pixels(value), "%" => NumWithUnit::Percent(value), _ => bail!("couldn't parse {}, unit must be either px or %", s), }; Ok(value) } } #[derive(Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Display, Default)] #[display(fmt = "{}*{}", x, y)] pub struct Coords { pub x: NumWithUnit, pub y: NumWithUnit, } impl FromStr for Coords { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let (x, y) = s .split_once(|x: char| x.to_ascii_lowercase() == 'x' || x.to_ascii_lowercase() == '*') .ok_or_else(|| anyhow!("must be formatted like 200x500"))?; Coords::from_strs(x, y) } } impl fmt::Debug for Coords { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CoordsWithUnits({}, {})", self.x, self.y) } } impl Coords { pub fn from_pixels(x: i32, y: i32) -> Self { Coords { x: NumWithUnit::Pixels(x), y: NumWithUnit::Pixels(y) } } /// parse a string for x and a string for y into a [`Coords`] object. pub fn from_strs(x: &str, y: &str) -> Result<Coords> { Ok(Coords { x: x.parse().with_context(|| format!("Failed to parse '{}'", x))?, y: y.parse().with_context(|| format!("Failed to parse '{}'", y))?, }) } /// resolve the possibly relative coordinates relative to a given containers size pub fn relative_to(&self, width: i32, height: i32) -> (i32, i32) { (self.x.relative_to(width), self.y.relative_to(height)) } } #[cfg(test)] mod test { use super::*; use pretty_assertions::assert_eq; #[test] fn test_parse_num_with_unit() { assert_eq!(NumWithUnit::Pixels(55), NumWithUnit::from_str("55").unwrap()); assert_eq!(NumWithUnit::Pixels(55), NumWithUnit::from_str("55px").unwrap()); assert_eq!(NumWithUnit::Percent(55), NumWithUnit::from_str("55%").unwrap()); assert!(NumWithUnit::from_str("55pp").is_err()); } #[test] fn test_parse_coords() { assert_eq!(Coords { x: NumWithUnit::Pixels(50), y: NumWithUnit::Pixels(60) }, Coords::from_str("50x60").unwrap()); assert!(Coords::from_str("5060").is_err()); } }
#[derive(Copy, Clone)] pub enum Distance { Manhattan, Diagonal, Circle } pub fn draw_line<F>(x1: i32, y1:i32, x2: i32, y2: i32, mut f:F) where F: FnMut(i32, i32) -> bool { let dx = (x2 - x1).abs(); let dy = (y2 - y1).abs(); let sx = if x1 < x2 {1} else {-1}; let sy = if y1 < y2 {1} else {-1}; let mut err = if dx > dy {dx / 2} else {-dy / 2}; let (mut x, mut y) = (x1, y1); if !f(x, y) {return}; while x != x2 || y != y2 { let err2 = err; if err2 > -dx { err -= dy; x += sx; if !f(x, y) {return}; } if err2 < dy { err += dx; y += sy; if !f(x, y) {return}; } } } pub fn draw_circle_algo<F>(x: i32, y: i32, radius: i32, _width: i32, _height: i32, algo: Distance, mut f: F) where F: FnMut(i32, i32) -> bool { let mut dx = 0; let mut dy = radius; while dx <= dy { if !f(x + dy, y + dx) {return}; if !f(x + dx, y + dy) {return}; if !f(x - dx, y + dy) {return}; if !f(x - dy, y + dx) {return}; if !f(x - dy, y - dx) {return}; if !f(x - dx, y - dy) {return}; if !f(x + dx, y - dy) {return}; if !f(x + dy, y - dx) {return}; let r1 = distance(x, y, x + dx + 1, y + dy, algo); let r2 = distance(x, y, x + dx + 1, y + dy - 1, algo); if (r1 - radius as f32).abs() < (r2 - radius as f32).abs() { dx += 1; } else { dx += 1; dy -= 1; } } } pub fn draw_circle<F>(x: i32, y: i32, radius: i32, _width: i32, _height: i32, mut f: F) where F: FnMut(i32, i32) -> bool { let mut dx = 0; let mut dy = radius; let mut d = 3 - 2 * radius; while dx <= dy { if !f(x + dy, y + dx) {return}; if !f(x + dx, y + dy) {return}; if !f(x - dx, y + dy) {return}; if !f(x - dy, y + dx) {return}; if !f(x - dy, y - dx) {return}; if !f(x - dx, y - dy) {return}; if !f(x + dx, y - dy) {return}; if !f(x + dy, y - dx) {return}; if d < 0 { d = d + 4 * dx + 6; dx += 1; } else { d = d + 4 * (dx - dy) + 10; dx += 1; dy -= 1; } } } pub fn distance(x1: i32, y1: i32, x2: i32, y2: i32, algo: Distance) -> f32 { match algo { Distance::Manhattan => (x1 - x2).abs() as f32 + (y1 - y2).abs() as f32, Distance::Diagonal => (x1 - x2).abs().max((y1 - y2).abs()) as f32, Distance::Circle => (((x1 - x2) * (x1 - x2)) as f32 + ((y1 - y2) * (y1 - y2)) as f32).sqrt() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_draw_line() { let tests = [ [[5, 5], [10, 10]], [[0, 0], [4, 12]], [[10, 5], [2, 2]], [[10, 5], [2, 10]], [[-10, 5], [2, 2]] ]; for test in tests.iter() { let start = test[0]; let end = test[1]; let mut results = Vec::new(); draw_line(start[0], start[1], end[0], end[1], |x, y| { results.push([x, y]); true }); println!("{:?}", results); assert_eq!(Some(&end), results.last()); } } }
extern crate csv; extern crate rustc_serialize; #[macro_use] extern crate hyper; extern crate rayon; use std::io; use std::io::Write; use std::fs::OpenOptions; use std::thread; use std::sync::Arc; use csv::Reader; use hyper::{Url, Client}; use hyper::client::response::Response; use hyper::header::{Headers, UserAgent}; use rayon::prelude::*; header! { (Accept, "Accept") => [String] } header! { (AcceptLanguage, "AcceptLanguage") => [String] } #[derive(RustcDecodable)] struct Bookmark { title: String, url: String, tags: String, description: String, comments: String, annotations: String, created_at: String, } const BOOKMARKS_PATH: &'static str = "../3070477_csv_2017_01_08_56075.csv"; const ERROR_LOG_PATH: &'static str = "./error.log"; fn log_error(message: &String) { let output = message.to_string() + "\n"; let output_bytes = output.as_bytes(); match io::stdout().write_all(&output_bytes) { Ok(_) => (), Err(_) => (), } let error_log_result = OpenOptions::new() .create(true) .append(true) .open(ERROR_LOG_PATH); if let Ok(mut error_log) = error_log_result { match error_log.write_all(&output_bytes) { Ok(_) => (), Err(_) => (), } } } fn check_bookmark(bookmark: &Bookmark) { // println!("Start {}", bookmark.url); let url_result = Url::parse(&bookmark.url); if let Err(err) = url_result { log_error(&format!("Could not parse {}: {}", bookmark.url, err)); return; } let url = url_result.unwrap(); let mut headers = Headers::new(); headers.set(UserAgent("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36".to_string())); headers.set(Accept("text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8".to_string())); headers.set(AcceptLanguage("en,de;q=0.8".to_string())); let result = Client::new() .head(url) .headers(headers) .send(); match result { Ok(response) => { let status = response.status; if status.is_success() { println!("{} {}", bookmark.url, response.status); } else { log_error(&format!("{} {}", bookmark.url, status)); } } Err(err) => { log_error(&format!("{} {:?}", bookmark.url, err)); } }; } fn main() { Reader::from_file(BOOKMARKS_PATH) .unwrap() .decode() .collect::<csv::Result<Vec<Bookmark>>>() .unwrap() .par_iter() .map(|bookmark| { check_bookmark(bookmark) }) .collect::<Vec<_>>(); }
mod controller; use crate::controller::{ app::{ApplicationController, ANNOTATION_APP_NAME}, ControllerConfig, }; use anyhow::{anyhow, Context}; use async_std::sync::{Arc, Mutex}; use dotenv::dotenv; use drogue_client::registry; use drogue_cloud_operator_common::{ controller::base::{ queue::WorkQueueConfig, BaseController, EventDispatcher, FnEventProcessor, ResourceProcessor, }, watcher::RunStream, }; use drogue_cloud_registry_events::{ stream::{KafkaEventStream, KafkaStreamConfig}, Event, }; use drogue_cloud_service_common::{ config::ConfigFromEnv, defaults, health::{HealthServer, HealthServerConfig}, openid::TokenConfig, }; use futures::FutureExt; use k8s_openapi::api::core::v1::Secret; use kube::{api::ListParams, core::DynamicObject, discovery, Api}; use kube_runtime::watcher; use serde::Deserialize; use std::fmt::Debug; use url::Url; #[derive(Clone, Debug, Deserialize)] struct Config { #[serde(default = "defaults::max_json_payload_size")] pub max_json_payload_size: usize, #[serde(default = "defaults::bind_addr")] pub bind_addr: String, #[serde(default)] pub registry: RegistryConfig, #[serde(default)] pub health: HealthServerConfig, pub controller: ControllerConfig, pub work_queue: WorkQueueConfig, pub kafka_source: KafkaStreamConfig, } #[derive(Clone, Debug, Deserialize)] pub struct RegistryConfig { #[serde(default = "defaults::registry_url")] pub url: Url, } impl Default for RegistryConfig { fn default() -> Self { Self { url: defaults::registry_url(), } } } fn is_relevant(event: &Event) -> Option<String> { match event { Event::Application { path, application, .. } if // watch the creation of a new application path == "." || // watch the finalizer addition path == ".metadata" => Some(application.clone()), _ => None, } } const GROUP_KAFKA_STRIMZI_IO: &str = "kafka.strimzi.io"; const KIND_KAFKA_TOPIC: &str = "KafkaTopic"; const KIND_KAFKA_USER: &str = "KafkaUser"; #[actix::main] async fn main() -> anyhow::Result<()> { env_logger::init(); dotenv().ok(); let config = Config::from_env()?; let kube = kube::client::Client::try_default() .await .context("Failed to create Kubernetes client")?; // k8s resources let group = discovery::group(&kube, GROUP_KAFKA_STRIMZI_IO).await?; let (kafka_topic_resource, _caps) = group .recommended_kind(KIND_KAFKA_TOPIC) .ok_or_else(|| anyhow!("Unable to discover '{}'", KIND_KAFKA_TOPIC))?; let kafka_topics = Api::<DynamicObject>::namespaced_with( kube.clone(), &config.controller.topic_namespace, &kafka_topic_resource, ); let (kafka_user_resource, _caps) = group .recommended_kind(KIND_KAFKA_USER) .ok_or_else(|| anyhow!("Unable to discover '{}'", KIND_KAFKA_USER))?; let kafka_users = Api::<DynamicObject>::namespaced_with( kube.clone(), &config.controller.topic_namespace, &kafka_user_resource, ); let secrets = Api::<Secret>::namespaced(kube.clone(), &config.controller.topic_namespace); // client let client = reqwest::Client::new(); let registry = registry::v1::Client::new( client.clone(), config.registry.url, Some( TokenConfig::from_env_prefix("REGISTRY")? .amend_with_env() .discover_from(client.clone()) .await?, ), ); // controller let controller = Arc::new(Mutex::new(BaseController::new( config.work_queue, "app", ApplicationController::new( config.controller, registry, kafka_topic_resource, kafka_topics.clone(), kafka_user_resource, kafka_users.clone(), secrets.clone(), ), )?)); // event source - device registry let registry_dispatcher = EventDispatcher::one(FnEventProcessor::new(controller.clone(), is_relevant)); let registry = KafkaEventStream::new(config.kafka_source)?; let registry = registry.run(registry_dispatcher); // event source - KafkaTopic let watcher_topics = watcher(kafka_topics, ListParams::default()); let watcher_topics = watcher_topics.run_stream(EventDispatcher::one(ResourceProcessor::new( controller.clone(), ANNOTATION_APP_NAME, ))); // event source - KafkaUser let watcher_users = watcher(kafka_users, ListParams::default()); let watcher_users = watcher_users.run_stream(EventDispatcher::one(ResourceProcessor::new( controller.clone(), ANNOTATION_APP_NAME, ))); // event source - Secret let watcher_secret = watcher(secrets, ListParams::default()); let watcher_secret = watcher_secret.run_stream(EventDispatcher::one(ResourceProcessor::new( controller, ANNOTATION_APP_NAME, ))); // health server let health = HealthServer::new(config.health, vec![]); // run log::info!("Running service ..."); futures::select! { _ = health.run().fuse() => {}, _ = registry.fuse() => {}, _ = watcher_topics.fuse() => {}, _ = watcher_users.fuse() => {}, _ = watcher_secret.fuse() => {}, }; // exiting Ok(()) }
use crate::tokens::Expr; #[derive(Debug, PartialEq)] pub(crate) enum Type { Number, Bool, Str, Expression, Var, }
// use crate::components::comparisons::{get_comparisons, Comparison, FeatureSupport}; use crate::components::container::{Container, ContainerProps}; use perseus::{t, GenericNode, Template}; use std::rc::Rc; use sycamore::prelude::Template as SycamoreTemplate; use sycamore::prelude::*; #[component(ComparisonsPage<G>)] pub fn comparisons_page() -> SycamoreTemplate<G> { template! { Container(ContainerProps { title: t!("perseus"), children: template! { div(class = "flex flex-col justify-center text-center dark:text-white mt-14 xs:mt-16 sm:mt-20 lg:mt-25") { div { h1(class = "text-5xl xs:text-7xl sm:text-8xl md:text-9xl p-2 font-extrabold") { "Comparisons" } br() p(class = "text-lg") { "See how Perseus compares to other web development frameworks." } p(class = "italic") { "Is there anything we're missing here? Please " a(href = "https://github.com/arctic-hen7/perseus/issues/new/choose") { "open an issue" } " and let us know!" } } br(class = "mb-24") p(class = "text-xl") { (t!("comparisons-todo")) } } } }) } } pub fn get_template<G: GenericNode>() -> Template<G> { Template::new("comparisons") .template(Rc::new(|_| { template! { ComparisonsPage() } })) .head(Rc::new(|_| { template! { title { (format!("{} | {}", t!("comparisons-title"), t!("perseus"))) } } })) }
#[macro_use] extern crate serde_derive; extern crate clap; use std::fs::File; use std::io::Error as IoError; use std::io::Read; use clap::{App, Arg}; mod execute; mod parse_config; mod task_output; fn parentpath(path: String) -> String { let mut v: Vec<&str> = path.split("/").collect(); let len = v.len(); v.remove(len - 1); let retval: String = v.join("/"); retval } fn read_sirenfile(sirenfile_path: String) -> Result<String, IoError> { let mut sirenfile = File::open(sirenfile_path)?; let mut string_json = String::new(); sirenfile.read_to_string(&mut string_json)?; Ok(string_json) } fn main() { let matches = App::new("Siren") .version("1.5.1") .author("Alessio Biancalana <dottorblaster@gmail.com>") .about("Your tiny friendly rusty neighborhood monitoring CLI tool") .arg( Arg::new("file") .short('f') .long("file") .value_name("FILE") .help("Sets a custom Sirenfile") .takes_value(true), ) .arg( Arg::new("json-output") .short('j') .long("json-output") .value_name("JSON") .help("Enable JSON output") .takes_value(false), ) .get_matches(); let sirenfile_path = matches .value_of("file") .unwrap_or("./Sirenfile.json") .to_owned(); let output_json = matches.is_present("json-output"); let configstring = match read_sirenfile(sirenfile_path) { Ok(jsoncontent) => jsoncontent, Err(err) => { println!("Error! Probably Sirenfile is missing\n{}", err.to_string()); String::new() } }; let conf = parse_config::string_to_config(configstring); let cwd_path = match conf.switch_cwd { true => parentpath( matches .value_of("file") .unwrap_or("./Sirenfile.json") .to_owned(), ), false => String::from("."), }; execute::run(conf.tasks, cwd_path, output_json); }
mod world; /// Use the 'cover' sizing strategy that ensure the rendered area covers the /// entire viewport. struct PerspectiveCamera { /// Aspect-ratio from width by height. screen: (u32, u32), /// Field of view in angle. Normal human vision is limited to `pi / 6` /// radians vertically. /// /// See https://en.wikipedia.org/wiki/Human_eye#Field_of_view. fov: f32, clip_range: f32, } impl ToMatrix for PerspectiveCamera { fn to_matrix(&self) -> Matrix { let tan = (self.fov / 2.0).tan(); let (near, far) = self.clip_range; let scale = (tan * near).recip(); let (screen_w, screen_h) = self.screen; let (m11, m22) = if screen_w < screen_h { // Normalize portrait coordinates. (scale, scale * (screen_w / screen_h)) } else { // Normalize landscape coordinates. (scale * (screen_h / screen_w), scale) }; // Unit depth. let m33 = (near - far).recip(); // Depth offset for near plain, which is the origin of our frustum. let m34 = near * m33; // Keep a copy of the actual depth. let m43 = -1.0; Matrix( _mm_set_ps(0.0, 0.0, 0.0, m11), _mm_set_ps(0.0, 0.0, m22, 0.0), _mm_set_ps(m34, m33, 0.0, 0.0), _mm_set_ps(0.0, m43, 0.0, 0.0), ) } }
use lazy_static::lazy_static; use std::collections::HashSet; lazy_static! { static ref FREQUENCY_CHANGES: Vec<i32> = include_str!("input.txt") .lines() .map(|line| line.parse().unwrap()) .collect(); } fn part1() { println!("{}", FREQUENCY_CHANGES.iter().sum::<i32>()); } fn part2() { let mut set = HashSet::new(); let mut freq = 0; for n in FREQUENCY_CHANGES.iter().cycle() { if set.contains(&freq) { break; } set.insert(freq); freq += n; } println!("{}", freq); } fn main() { part1(); part2(); }
use std::collections::HashMap; use super::super::utils::http_get; use crate::error::Result; use serde::{Deserialize, Serialize}; use serde_json::Value; #[derive(Clone, Serialize, Deserialize)] #[allow(non_snake_case)] struct FutureMarket { name: String, underlying: String, cycle: String, #[serde(rename = "type")] type_: String, in_delisting: bool, #[serde(flatten)] extra: HashMap<String, Value>, } // See https://www.gateio.pro/docs/apiv4/zh_CN/index.html#595cd9fe3c-2 fn fetch_future_markets_raw(settle: &str) -> Result<Vec<FutureMarket>> { let txt = http_get( format!("https://api.gateio.ws/api/v4/delivery/{}/contracts", settle).as_str(), None, )?; let markets = serde_json::from_str::<Vec<FutureMarket>>(&txt)?; Ok(markets .into_iter() .filter(|x| !x.in_delisting) .collect::<Vec<FutureMarket>>()) } pub(super) fn fetch_inverse_future_symbols() -> Result<Vec<String>> { let symbols = fetch_future_markets_raw("btc")? .into_iter() .map(|m| m.name) .collect::<Vec<String>>(); Ok(symbols) } pub(super) fn fetch_linear_future_symbols() -> Result<Vec<String>> { let symbols = fetch_future_markets_raw("usdt")? .into_iter() .map(|m| m.name) .collect::<Vec<String>>(); Ok(symbols) }
pub fn remove_duplicate_letters(s: String) -> String { let mut remaining = vec![0; 26]; let mut visited = vec![false; 26]; let mut st = vec![]; let ind = |c| (c as u8 - 'a' as u8) as usize; for c in s.chars() { remaining[ind(c)] += 1; } for c in s.chars() { remaining[ind(c)] -= 1; if visited[ind(c)] { continue; } while let Some(&l) = st.last() { if l > c && remaining[ind(l)] > 0 { st.pop(); visited[ind(l)] = false; } else { break; } } st.push(c); visited[ind(c)] = true; } st.into_iter().collect() }
use amethyst::{ core::{ nalgebra::{base::Matrix, Vector2}, timing::Time, transform::components::Transform, }, ecs::{Join, Read, ReadStorage, System, WriteStorage}, input::InputHandler, }; use crate::components::{ for_characters::{Engine, FuelTank}, physics::Dynamics, }; #[derive(Default)] pub struct EngineForceSystem; impl<'s,> System<'s,> for EngineForceSystem { type SystemData = ( ReadStorage<'s, Transform,>, WriteStorage<'s, Dynamics,>, WriteStorage<'s, FuelTank,>, ReadStorage<'s, Engine,>, Read<'s, Time,>, Read<'s, InputHandler<String, String,>,>, ); fn run( &mut self, (transforms, mut dynamics, mut fuel_tanks, engines, time, input,): Self::SystemData, ) { let dt = time.delta_seconds(); for (transform, mut dynamic, mut tank, engine,) in (&transforms, &mut dynamics, &mut fuel_tanks, &engines,).join() { // Input gathering ( !! not multiplayer friendly, add playerID's, e.g. in ship_base !! ) let mut engine_scaling = Vector2::new(0.0, 0.0,); { let engine_scaling_x = input.axis_value("right",); let engine_scaling_y = input.axis_value("up",); if let Some(engine_scaling_x_temp,) = engine_scaling_x { engine_scaling[0] = engine_scaling_x_temp as f32; }; if let Some(engine_scaling_y_temp,) = engine_scaling_y { engine_scaling[1] = engine_scaling_y_temp as f32; }; } let mut engine_force_vec = engine.max_force; engine_force_vec.component_mul_assign(&engine_scaling); let engine_force_attempt = engine_force_vec.magnitude(); { let fuel_consumption = (engine_force_attempt * engine.consumption * dt as f32) / engine.efficiency; if fuel_consumption > tank.fuel_level { // Provide as much force as the fuel allows and set the tank empty. let engine_force_actual = engine.efficiency * tank.fuel_level / (engine.consumption * dt as f32); tank.fuel_level = 0.0; let scaling = engine_force_actual / engine_force_attempt; engine_force_vec *= scaling; } else { // If enough fuel is present, only reduce fuel level. tank.fuel_level -= fuel_consumption; } } // Add engine force (player input) to *natural*/physical forces, e.g. gravitational force. // TODO: Player rotation, as soon as its implemented let world_force_x = Matrix::dot( &engine_force_vec, &Vector2::new( 1.0, 0.0, ) ); let world_force_y = Matrix::dot( &engine_force_vec, &Vector2::new( 0.0, -1.0, ) ); let world_force_vec = Vector2::new(world_force_x, world_force_y,); dynamic.force += world_force_vec; } } }
use pipers::Pipe; use regex::Regex; use std::str::FromStr; use std::process::Command; fn main() { // let bla = "pacmd set-card-profile $index off"; // let bla2 = "pacmd set-card-profile $index a2dp_sink_ldac"; // pacmd list-cards | grep -e 'index:' -e 'active profile' let out = Pipe::new("pacmd list-cards") .then("grep bluez_card -B1") .then("grep index") .finally() .expect("Commands dit not pipe") .wait_with_output() .expect("failed to wait on child"); let output = &String::from_utf8(out.stdout).unwrap(); let mut indexes: Vec<u32> = vec![]; let re = Regex::new(r"\d+").unwrap(); for cap in re.captures_iter(output) { println!("bluetooth profile index is {}", &cap[0]); indexes.push(FromStr::from_str(&cap[0]).unwrap()); } if indexes.len() > 1 { panic!("More than one bluetooth profile found"); } println!("Ok, one device found, changing card profile to LDAC.."); //let _foo = format!("{} off", indexes[0]); //let foo2 = format!("{} a2dp_sink_ldac", indexes[0]); //Command::new("pacmd") //.args(&["set-card-profile", &foo2]) //.spawn() //.expect("failed"); }
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. */ //! A drop-in replacement for `std::process::Command` that provides the ability //! to set up namespaces, a seccomp filter, and more. #![deny(missing_docs)] #![deny(rustdoc::broken_intra_doc_links)] #![cfg_attr(feature = "nightly", feature(internal_output_capture))] mod builder; mod child; mod clone; mod container; mod env; mod error; mod exit_status; mod fd; mod id_map; mod mount; mod namespace; mod net; mod pid; mod pty; pub mod seccomp; mod spawn; mod stdio; mod util; use std::ffi::CString; pub use child::Child; pub use child::Output; pub use container::Container; pub use container::RunError; pub use error::Context; pub use error::Error; pub use exit_status::ExitStatus; pub use mount::Bind; pub use mount::Mount; pub use mount::MountFlags; pub use mount::MountParseError; pub use namespace::Namespace; // Re-export Signal since it is used by `Child::signal`. pub use nix::sys::signal::Signal; pub use pid::Pid; pub use pty::Pty; pub use pty::PtyChild; pub use stdio::ChildStderr; pub use stdio::ChildStdin; pub use stdio::ChildStdout; pub use stdio::Stdio; use syscalls::Errno; /// A builder for spawning a process. // See the builder.rs for documentation of each field. pub struct Command { program: CString, args: util::CStringArray, pre_exec: Vec<Box<dyn FnMut() -> Result<(), Errno> + Send + Sync>>, container: Container, } impl Command { /// Converts [`std::process::Command`] into [`Command`]. Note that this is a /// very basic and *lossy* conversion. /// /// This only preserves the /// - program path, /// - arguments, /// - environment variables, /// - and working directory. /// /// # Caveats /// /// Since [`std::process::Command`] is rather opaque and doesn't provide /// access to all fields, this will *not* preserve: /// - stdio handles, /// - `env_clear`, /// - any `pre_exec` callbacks, /// - `arg0` (if not the same as `program`), /// - `uid`, `gid`, or `groups`. pub fn from_std_lossy(cmd: &std::process::Command) -> Command { let mut result = Command::new(cmd.get_program()); result.args(cmd.get_args()); for (key, value) in cmd.get_envs() { match value { Some(value) => result.env(key, value), None => result.env_remove(key), }; } if let Some(dir) = cmd.get_current_dir() { result.current_dir(dir); } result } /// This provides a *lossy* conversion to [`std::process::Command`]. The /// features that are not supported by [`std::process::Command`] but *are* /// supported by [`Command`] cannot be converted. For example, namespace and /// mount configurations cannot be converted since they are not supported by /// [`std::process::Command`]. pub fn into_std_lossy(self) -> std::process::Command { let mut result = std::process::Command::new(self.get_program()); result.args(self.get_args()); if self.container.env.is_cleared() { result.env_clear(); } for (key, value) in self.get_envs() { match value { Some(value) => result.env(key, value), None => result.env_remove(key), }; } if let Some(dir) = self.get_current_dir() { result.current_dir(dir); } #[cfg(unix)] { use std::os::unix::process::CommandExt; result.arg0(self.get_arg0()); for mut f in self.pre_exec { unsafe { result.pre_exec(move || f().map_err(Into::into)); } } } result.stdin(self.container.stdin); result.stdout(self.container.stdout); result.stderr(self.container.stderr); result } } #[cfg(test)] mod tests { use std::collections::BTreeMap; use std::fs; use std::path::Path; use std::str::from_utf8; use super::*; use crate::ExitStatus; #[tokio::test] async fn spawn() { assert_eq!( Command::new("true").spawn().unwrap().wait().await.unwrap(), ExitStatus::Exited(0) ); assert_eq!( Command::new("false").spawn().unwrap().wait().await.unwrap(), ExitStatus::Exited(1) ); } #[test] fn wait_blocking() { assert_eq!( Command::new("true") .spawn() .unwrap() .wait_blocking() .unwrap(), ExitStatus::Exited(0) ); assert_eq!( Command::new("false") .spawn() .unwrap() .wait_blocking() .unwrap(), ExitStatus::Exited(1) ); } #[tokio::test] async fn spawn_fail() { assert_eq!( Command::new("/iprobablydonotexist").spawn().unwrap_err(), Error::new(Errno::ENOENT, Context::Exec) ); } #[tokio::test] async fn double_wait() { let mut child = Command::new("true").spawn().unwrap(); assert_eq!(child.wait().await.unwrap(), ExitStatus::Exited(0)); assert_eq!(child.wait().await.unwrap(), ExitStatus::Exited(0)); } #[tokio::test] async fn output() { let output = Command::new("echo") .arg("foo") .arg("bar") .output() .await .unwrap(); assert_eq!(output.stdout, b"foo bar\n"); assert_eq!(output.stderr, b""); assert_eq!(output.status, ExitStatus::Exited(0)); } fn parse_proc_status(stdout: &[u8]) -> BTreeMap<&str, &str> { from_utf8(stdout) .unwrap() .trim_end() .split('\n') .map(|line| { let mut items = line.splitn(2, ':'); let first = items.next().unwrap(); let second = items.next().unwrap(); (first, second.trim()) }) .collect() } #[tokio::test] async fn uid_namespace() { let output = Command::new("cat") .arg("/proc/self/status") .map_root() .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); let proc_status = parse_proc_status(&output.stdout); // We should be root user inside of the container. assert_eq!(proc_status["Uid"], "0\t0\t0\t0"); } #[tokio::test] async fn pid_namespace() { let output = Command::new("cat") .arg("/proc/self/status") .map_root() .unshare(Namespace::PID) .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); let proc_status = parse_proc_status(&output.stdout); assert_eq!(proc_status["NSpid"].split('\t').nth(1), Some("1"),); // Note that, since we haven't mounted a fresh /proc into the container, // the child still sees what the parent sees and so the PID will *not* // be 1. assert_ne!(proc_status["Pid"], "1"); } #[tokio::test] async fn mount_proc() { let output = Command::new("cat") .arg("/proc/self/status") .map_root() .unshare(Namespace::PID) .mount(Mount::proc()) .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); let proc_status = parse_proc_status(&output.stdout); // With /proc mounted, the child really believes it is the root process. assert_eq!(proc_status["NSpid"], "1"); assert_eq!(proc_status["Pid"], "1"); } #[tokio::test] async fn hostname() { let output = Command::new("cat") .arg("/proc/sys/kernel/hostname") .map_root() .hostname("foobar.local") .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); let hostname = from_utf8(&output.stdout).unwrap().trim(); assert_eq!(hostname, "foobar.local"); } #[tokio::test] async fn domainname() { let output = Command::new("cat") .arg("/proc/sys/kernel/domainname") .map_root() .domainname("foobar") .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); let domainname = from_utf8(&output.stdout).unwrap().trim(); assert_eq!(domainname, "foobar"); } #[tokio::test] async fn pty() { use tokio::io::AsyncReadExt; let mut pty = Pty::open().unwrap(); let pty_child = pty.child().unwrap(); let mut tty = pty_child.terminal_params().unwrap(); // Prevent post-processing of output so `\n` isn't translated to `\r\n`. tty.c_oflag &= !libc::OPOST; pty_child.set_terminal_params(&tty).unwrap(); pty_child.set_window_size(40, 80).unwrap(); // stty is in coreutils and should be available on most systems. let mut child = Command::new("stty") .arg("size") .pty(pty_child) .spawn() .unwrap(); // NOTE: read_to_end returns an EIO error once the child has exited. let mut buf = Vec::new(); assert!(pty.read_to_end(&mut buf).await.is_err()); assert_eq!(from_utf8(&buf).unwrap(), "40 80\n"); assert_eq!(child.wait().await.unwrap(), ExitStatus::SUCCESS); } #[tokio::test] async fn mount_devpts_basic() { let output = Command::new("ls") .arg("/dev/pts") .map_root() .mount(Mount::devpts("/dev/pts")) .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); // Should be totally empty except for `/dev/pts/ptmx` since we mounted a // new devpts. assert_eq!(output.stderr, b""); assert_eq!(output.stdout, b"ptmx\n"); } #[tokio::test] async fn mount_devpts_isolated() { let output = Command::new("ls") .arg("/dev/pts") .map_root() .mount(Mount::devpts("/dev/pts").data("newinstance,ptmxmode=0666")) .mount(Mount::bind("/dev/pts/ptmx", "/dev/ptmx")) .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); // Should be totally empty except for `/dev/pts/ptmx` since we mounted a // new devpts. assert_eq!(output.stderr, b""); assert_eq!(output.stdout, b"ptmx\n"); } #[tokio::test] async fn mount_tmpfs() { let output = Command::new("ls") .arg("/tmp") .map_root() .mount(Mount::tmpfs("/tmp")) .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); // Should be totally empty since we mounted a new tmpfs. assert_eq!(output.stderr, b""); assert_eq!(output.stdout, b""); } #[tokio::test] async fn mount_and_move_tmpfs() { let tmpfs = tempfile::tempdir().unwrap(); // Create a temporary directory that will be the only thing to remain in // the `/tmp` mount. let persistent = tempfile::tempdir().unwrap(); fs::write(persistent.path().join("foobar"), b"").unwrap(); let output = Command::new("ls") .arg("/tmp") .map_root() .mount(Mount::tmpfs(tmpfs.path())) // Bind-mount a directory from our upper /tmp to our new /tmp. .mount(Mount::bind(persistent.path(), &tmpfs.path().join("my-dir")).touch_target()) // Move our newly-created tmpfs to hide the upper /tmp folder. .mount(Mount::rename(tmpfs.path(), Path::new("/tmp"))) .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); // The only thing there should be our bind-mounted directory. assert_eq!(output.stderr, b""); assert_eq!(output.stdout, b"my-dir\n"); } #[tokio::test] async fn mount_bind() { let temp = tempfile::tempdir().unwrap(); let a = temp.path().join("a"); let b = temp.path().join("b"); fs::create_dir(&a).unwrap(); fs::create_dir(&b).unwrap(); fs::write(a.join("foobar"), "im a test").unwrap(); let output = Command::new("ls") .arg(&b) .map_root() .mount(Mount::bind(&a, &b)) .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0)); assert_eq!(output.stdout, b"foobar\n"); assert_eq!(output.stderr, b""); } #[tokio::test] async fn local_networking_ping() { let output = Command::new("ping") .arg("-c1") .arg("::1") .map_root() .local_networking_only() .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0), "{:?}", output); } #[tokio::test] async fn local_networking_loopback_flags() { let output = Command::new("cat") .arg("/sys/class/net/lo/flags") .map_root() .local_networking_only() .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0), "{:?}", output); assert_eq!(output.stdout, b"0x9\n", "{:?}", output); } /// Show that processes in two separate network namespaces can bind to the /// same port. #[tokio::test] async fn port_isolation() { use std::thread::sleep; use std::time::Duration; let mut command = Command::new("nc"); command .arg("-l") .arg("127.0.0.1") // Can bind to a low port without real root inside the namespace. .arg("80") .stdin(Stdio::null()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .map_root() .local_networking_only(); let server1 = match command.spawn() { // If netcat is not installed just exit successfully. Err(error) if error.errno() == Errno::ENOENT => return, other => other, } .unwrap(); let server2 = command.spawn().unwrap(); // Give them both time to start up. sleep(Duration::from_millis(100)); // Signal them to shut down. Otherwise, they will wait forever for a // connection that will never come. server1.signal(Signal::SIGINT).unwrap(); server2.signal(Signal::SIGINT).unwrap(); let output1 = server1.wait_with_output().await.unwrap(); let output2 = server2.wait_with_output().await.unwrap(); // Without network isolation, one of the servers would exit with an // "Address already in use" (exit status 2) error. assert_eq!( output1.status, ExitStatus::Signaled(Signal::SIGINT, false), "{:?}", output1 ); assert_eq!( output2.status, ExitStatus::Signaled(Signal::SIGINT, false), "{:?}", output2 ); } /// Make sure we can call `.local_networking_only` more than once. #[tokio::test] async fn local_networking_there_can_be_only_one() { let output = Command::new("true") .map_root() .local_networking_only() // If calling this twice mounted /sys twice, then we'd get a "Device // or resource busy" error. .local_networking_only() .output() .await .unwrap(); assert_eq!(output.status, ExitStatus::Exited(0), "{:?}", output); assert_eq!(output.stdout, b"", "{:?}", output); assert_eq!(output.stderr, b"", "{:?}", output); } #[test] fn from_std_lossy() { let mut stdcmd = std::process::Command::new("echo"); stdcmd.args(["arg1", "arg2"]); stdcmd.current_dir("/foo/bar"); stdcmd.env_clear(); stdcmd.env("FOO", "1"); stdcmd.env("BAR", "2"); let cmd = Command::from_std_lossy(&stdcmd); assert_eq!(cmd.get_program(), "echo"); assert_eq!(cmd.get_arg0(), "echo"); assert_eq!(cmd.get_args().collect::<Vec<_>>(), ["arg1", "arg2"]); let envs = cmd .get_envs() .filter_map(|(k, v)| Some((k.to_str()?, v.and_then(|v| v.to_str())))) .collect::<Vec<_>>(); assert_eq!(envs, [("BAR", Some("2")), ("FOO", Some("1"))]); } #[test] fn into_std_lossy() { let mut cmd = Command::new("env"); cmd.args(["-0"]); cmd.current_dir("/foo/bar"); cmd.env_clear(); cmd.env("FOO", "1"); cmd.env("BAR", "2"); let stdcmd = cmd.into_std_lossy(); assert_eq!(stdcmd.get_program(), "env"); assert_eq!(stdcmd.get_args().collect::<Vec<_>>(), ["-0"]); let envs = stdcmd .get_envs() .filter_map(|(k, v)| Some((k.to_str()?, v.and_then(|v| v.to_str())))) .collect::<Vec<_>>(); assert_eq!(envs, [("BAR", Some("2")), ("FOO", Some("1"))]); } #[tokio::test] async fn seccomp() { use syscalls::Sysno; use super::seccomp::*; let filter = FilterBuilder::new() .default_action(Action::Allow) .syscalls([(Sysno::brk, Action::KillProcess)]) .build(); let output = Command::new("cat") .arg("/proc/self/status") .seccomp(filter) .output() .await .unwrap(); assert_eq!( output.status, ExitStatus::Signaled(Signal::SIGSYS, true), "{:?}", output ); } #[tokio::test] async fn seccomp_notify() { use std::collections::HashMap; use futures::future::select; use futures::future::Either; use futures::stream::TryStreamExt; use syscalls::Sysno; use super::seccomp::*; let filter = FilterBuilder::new() .default_action(Action::Notify) .syscalls([ // FIXME: Because the first execve happens when the child is // spawned, we must allow this through. Otherwise, the // `.spawn()` below will deadlock because we can't process // seccomp notifications until after it returns. (Sysno::execve, Action::Allow), ]) .build(); let mut child = Command::new("cat") .arg("/proc/self/status") .seccomp(filter) .seccomp_notify() .spawn() .unwrap(); let mut summary = HashMap::new(); let exit_status = { let seccomp_notif = child.seccomp_notif.take(); let notifier = async { if let Some(mut notifier) = seccomp_notif { while let Some(notif) = notifier.try_next().await.unwrap() { *summary.entry(Sysno::from(notif.data.nr)).or_insert(0u64) += 1; // Simply let the syscall through. let resp = seccomp_notif_resp { id: notif.id, val: 0, error: 0, flags: SECCOMP_USER_NOTIF_FLAG_CONTINUE, }; notifier.send(&resp).unwrap(); } } }; let exit_status = child.wait(); futures::pin_mut!(notifier); futures::pin_mut!(exit_status); match select(notifier, exit_status).await { Either::Left((_, _)) => unreachable!(), Either::Right((exit_status, _)) => exit_status.unwrap(), } }; assert_eq!(exit_status, ExitStatus::SUCCESS); assert!(summary[&Sysno::read] > 0); assert!(summary[&Sysno::write] > 0); assert!(summary[&Sysno::close] > 0); } }
#![allow(dead_code)] extern crate env_logger; extern crate freetype; #[macro_use] extern crate log; #[macro_use] extern crate objc; pub mod buffer; pub mod pane; pub mod platform; pub mod project; pub mod workspace; use std::env; use std::path::Path; use std::sync::Arc; fn main() { env_logger::init().unwrap(); // println!("Hello, world!"); // let file = buffer::File::from_path(Path::new("./Cargo.toml")); // let lines = file.read_lines(); // println!("{:?}", lines); let application = Arc::new(platform::Application::new()); let project = project::Project { directory: env::current_dir().unwrap(), }; let mut workspace = workspace::Workspace::new(application.clone(), project); workspace.render(); let path = Path::new("./src/main.rs"); workspace.open_path(path); workspace.render(); application.run(); }
mod function; mod primitive; mod record; mod record_body; mod type_; pub use function::*; pub use primitive::*; pub use record::*; pub use record_body::*; pub use type_::*;
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use actix::clock::Duration; use actix::Message; use starcoin_types::account_address::AccountAddress; use starcoin_types::transaction::{RawUserTransaction, SignedUserTransaction}; use starcoin_wallet_api::{WalletAccount, WalletResult}; #[derive(Debug, Clone)] pub enum WalletRequest { CreateAccount(String), GetDefaultAccount(), GetAccounts(), GetAccount(AccountAddress), SignTxn { txn: Box<RawUserTransaction>, signer: AccountAddress, }, UnlockAccount(AccountAddress, String, Duration), ImportAccount { address: AccountAddress, private_key: Vec<u8>, password: String, }, ExportAccount { address: AccountAddress, password: String, }, } impl Message for WalletRequest { type Result = WalletResult<WalletResponse>; } #[derive(Debug, Clone)] pub enum WalletResponse { WalletAccount(Box<WalletAccount>), WalletAccountOption(Box<Option<WalletAccount>>), AccountList(Vec<WalletAccount>), SignedTxn(Box<SignedUserTransaction>), UnlockAccountResponse, ExportAccountResponse(Vec<u8>), None, }
use core::sync::atomic::{AtomicPtr, Ordering}; use core::fmt::{self, Debug, Formatter}; use crate::uses::*; // a non locking, synchronous vec pub struct NLVec<T>(AtomicPtr<Vec<*const T>>); impl<T> NLVec<T> { pub fn new() -> Self { let vec: Vec<*const T> = Vec::new(); let ptr = to_heap(vec); NLVec(AtomicPtr::new(ptr)) } pub fn len(&self) -> usize { self.read(|vec| vec.len()) } pub fn is_empty(&self) -> bool { self.read(|vec| vec.is_empty()) } pub fn get(&self, index: usize) -> Option<&T> { unsafe { self.read(|vec| vec.get(index).map(|ref_to_ptr| *ref_to_ptr)) .map(|ptr| ptr.as_ref().unwrap()) } } pub fn insert(&self, index: usize, element: T) { let ptr = to_heap(element); self.write(|vec| vec.insert(index, ptr)); } pub fn push(&self, element: T) { let ptr = to_heap(element); self.write(|vec| vec.push(ptr)); } pub fn remove(&self, index: usize) -> T { unsafe { from_heap(self.write(|vec| vec.remove(index))) } } pub fn pop(&self) -> Option<T> { unsafe { self.write(|vec| vec.pop()).map(|ptr| from_heap(ptr)) } } pub fn read<F, V>(&self, f: F) -> V where F: FnOnce(&Vec<*const T>) -> V, { unsafe { f(self.0.load(Ordering::Acquire).as_ref().unwrap()) } } pub fn write<F, V>(&self, mut f: F) -> V where F: FnMut(&mut Vec<*const T>) -> V, { loop { let ptr = self.0.load(Ordering::Acquire); let mut vec = unsafe { ptr.as_ref().unwrap().clone() }; let out = f(&mut vec); let new_ptr = to_heap(vec); let result = self .0 .compare_exchange(ptr, new_ptr, Ordering::AcqRel, Ordering::Acquire); match result { Ok(old_ptr) => { unsafe { // drop old value drop(Box::from_raw(old_ptr)); } return out; }, Err(_) => unsafe { drop(Box::from_raw(new_ptr)) }, } } } } impl<T: Debug> Debug for NLVec<T> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { self.read(|vec| { write!(f, "[").unwrap(); for (i, ptr) in vec.iter().enumerate() { let elem = unsafe { ptr.as_ref().unwrap() }; write!(f, "{:?}", elem).unwrap(); if i < vec.len() - 1 { write!(f, ", ").unwrap(); } } write!(f, "]").unwrap(); }); Ok(()) } }
use std::rc::Rc; use hyper; use super::configuration::Configuration; pub struct APIClient<C: hyper::client::Connect> { configuration: Rc<Configuration<C>>, ingredients_api: Box<::apis::IngredientsApi>, meal_planning_api: Box<::apis::MealPlanningApi>, menu_items_api: Box<::apis::MenuItemsApi>, misc_api: Box<::apis::MiscApi>, products_api: Box<::apis::ProductsApi>, recipes_api: Box<::apis::RecipesApi>, wine_api: Box<::apis::WineApi>, } impl<C: hyper::client::Connect> APIClient<C> { pub fn new(configuration: Configuration<C>) -> APIClient<C> { let rc = Rc::new(configuration); APIClient { configuration: rc.clone(), ingredients_api: Box::new(::apis::IngredientsApiClient::new(rc.clone())), meal_planning_api: Box::new(::apis::MealPlanningApiClient::new(rc.clone())), menu_items_api: Box::new(::apis::MenuItemsApiClient::new(rc.clone())), misc_api: Box::new(::apis::MiscApiClient::new(rc.clone())), products_api: Box::new(::apis::ProductsApiClient::new(rc.clone())), recipes_api: Box::new(::apis::RecipesApiClient::new(rc.clone())), wine_api: Box::new(::apis::WineApiClient::new(rc.clone())), } } pub fn ingredients_api(&self) -> &::apis::IngredientsApi{ self.ingredients_api.as_ref() } pub fn meal_planning_api(&self) -> &::apis::MealPlanningApi{ self.meal_planning_api.as_ref() } pub fn menu_items_api(&self) -> &::apis::MenuItemsApi{ self.menu_items_api.as_ref() } pub fn misc_api(&self) -> &::apis::MiscApi{ self.misc_api.as_ref() } pub fn products_api(&self) -> &::apis::ProductsApi{ self.products_api.as_ref() } pub fn recipes_api(&self) -> &::apis::RecipesApi{ self.recipes_api.as_ref() } pub fn wine_api(&self) -> &::apis::WineApi{ self.wine_api.as_ref() } }
/* Primitive Types Integers: u8, i8, u16, i16, u32, i32, u64, i64, u128, i128, u256, i256 Floats: f32, f64 Boolean: bool Characters: char Tuples Arrays */ // Rust is a statically typed language, but the compiler can usually infer what type we want to use based on the value and how we use it use std; pub fn run() { // Default is "i32" let x = 1; // Default is f64 let y = 2.5; // Explicit type let z: i64 = 32423234235345234; // Find max size println!("Max i32: {}", std::i32::MAX); println!("Max i64: {}", std::i64::MAX); // Boolean let is_active = true; // Get boolean from expression let is_greater = 5 > 3; // Char let a1 = 'a'; let smiley_face = '\u{1F600}'; println!("{:?}", (x, y, z, is_active, is_greater, a1, smiley_face)); println!("-----------------------"); }
mod integration_tests; mod tests; extern crate brotli; extern crate core; #[macro_use] extern crate alloc_no_stdlib; use core::ops; use brotli::CustomRead; pub struct Rebox<T> { b : Box<[T]>, } impl<T> core::default::Default for Rebox<T> { fn default() -> Self { let v : Vec<T> = Vec::new(); let b = v.into_boxed_slice(); return Rebox::<T>{b : b}; } } impl<T> ops::Index<usize> for Rebox<T>{ type Output = T; fn index(&self, index : usize) -> &T { return &(*self.b)[index] } } impl<T> ops::IndexMut<usize> for Rebox<T>{ fn index_mut(&mut self, index : usize) -> &mut T { return &mut (*self.b)[index] } } impl<T> alloc_no_stdlib::SliceWrapper<T> for Rebox<T> { fn slice(&self) -> & [T] { return &*self.b } } impl<T> alloc_no_stdlib::SliceWrapperMut<T> for Rebox<T> { fn slice_mut(&mut self) -> &mut [T] { return &mut*self.b } } pub struct HeapAllocator<T : core::clone::Clone>{ pub default_value : T, } #[cfg(not(feature="unsafe"))] impl<T : core::clone::Clone> alloc_no_stdlib::Allocator<T> for HeapAllocator<T> { type AllocatedMemory = Rebox<T>; fn alloc_cell(self : &mut HeapAllocator<T>, len : usize) -> Rebox<T> { let v : Vec<T> = vec![self.default_value.clone();len]; let b = v.into_boxed_slice(); return Rebox::<T>{b : b}; } fn free_cell(self : &mut HeapAllocator<T>, _data : Rebox<T>) { } } #[cfg(feature="unsafe")] impl<T : core::clone::Clone> alloc_no_stdlib::Allocator<T> for HeapAllocator<T> { type AllocatedMemory = Rebox<T>; fn alloc_cell(self : &mut HeapAllocator<T>, len : usize) -> Rebox<T> { let mut v : Vec<T> = Vec::with_capacity(len); unsafe{v.set_len(len);} let b = v.into_boxed_slice(); return Rebox::<T>{b : b}; } fn free_cell(self : &mut HeapAllocator<T>, _data : Rebox<T>) { } } #[allow(unused_imports)] use alloc_no_stdlib::{Allocator, SliceWrapperMut, SliceWrapper}; //use alloc::{SliceWrapper,SliceWrapperMut, StackAllocator, AllocatedStackMemory, Allocator}; use brotli::{HuffmanCode}; use std::io::{self, Read, Write, ErrorKind, Error}; use std::env; use std::fs::File; use std::path::Path; //declare_stack_allocator_struct!(MemPool, 4096, global); struct IoWriterWrapper<'a, OutputType: Write + 'a>(&'a mut OutputType); struct IoReaderWrapper<'a, OutputType: Read + 'a>(&'a mut OutputType); impl<'a, OutputType: Write> brotli::CustomWrite<io::Error> for IoWriterWrapper<'a, OutputType> { fn write(self: &mut Self, buf: &[u8]) -> Result<usize, io::Error> { loop { match self.0.write(buf) { Err(e) => { match e.kind() { ErrorKind::Interrupted => continue, _ => return Err(e), } } Ok(cur_written) => return Ok(cur_written), } } } } impl<'a, InputType: Read> brotli::CustomRead<io::Error> for IoReaderWrapper<'a, InputType> { fn read(self: &mut Self, buf: &mut [u8]) -> Result<usize, io::Error> { loop { match self.0.read(buf) { Err(e) => { match e.kind() { ErrorKind::Interrupted => continue, _ => return Err(e), } } Ok(cur_read) => return Ok(cur_read), } } } } struct IntoIoReader<OutputType: Read>(OutputType); impl<InputType: Read> brotli::CustomRead<io::Error> for IntoIoReader<InputType> { fn read(self: &mut Self, buf: &mut [u8]) -> Result<usize, io::Error> { loop { match self.0.read(buf) { Err(e) => { match e.kind() { ErrorKind::Interrupted => continue, _ => return Err(e), } } Ok(cur_read) => return Ok(cur_read), } } } } pub fn decompress<InputType, OutputType> (r : &mut InputType, mut w : &mut OutputType, buffer_size : usize) -> Result<(), io::Error> where InputType: Read, OutputType: Write { let mut alloc_u8 = HeapAllocator::<u8>{default_value:0}; let mut input_buffer = alloc_u8.alloc_cell(buffer_size); let mut output_buffer = alloc_u8.alloc_cell(buffer_size); return brotli::BrotliDecompressCustomIo(&mut IoReaderWrapper::<InputType>(r), &mut IoWriterWrapper::<OutputType>(w), input_buffer.slice_mut(), output_buffer.slice_mut(), alloc_u8, HeapAllocator::<u32>{default_value:0}, HeapAllocator::<HuffmanCode>{default_value:HuffmanCode::default()}, Error::new(ErrorKind::UnexpectedEof, "Unexpected EOF")); } // This decompressor is defined unconditionally on whether no-stdlib is defined // so we can exercise the code in any case pub struct BrotliDecompressor<R: Read>(brotli::DecompressorCustomIo<io::Error, IntoIoReader<R>, Rebox<u8>, HeapAllocator<u8>, HeapAllocator<u32>, HeapAllocator<HuffmanCode> >); impl<R: Read> BrotliDecompressor<R> { pub fn new(r: R, buffer_size : usize) -> Self { let mut alloc_u8 = HeapAllocator::<u8>{default_value : 0}; let buffer = alloc_u8.alloc_cell(buffer_size); let alloc_u32 = HeapAllocator::<u32>{default_value:0}; let alloc_hc = HeapAllocator::<HuffmanCode>{default_value:HuffmanCode::default()}; return BrotliDecompressor::<R>( brotli::DecompressorCustomIo::<Error, IntoIoReader<R>, Rebox<u8>, HeapAllocator<u8>, HeapAllocator<u32>, HeapAllocator<HuffmanCode> > ::new(IntoIoReader::<R>(r), buffer, alloc_u8, alloc_u32, alloc_hc, io::Error::new(ErrorKind::InvalidData, "Invalid Data"))); } } impl<R: Read> Read for BrotliDecompressor<R> { fn read(&mut self, mut buf: &mut [u8]) -> Result<usize, Error> { return self.0.read(buf); } } #[cfg(test)] fn writeln0<OutputType : Write> (strm : &mut OutputType, data: &str) -> core::result::Result<(), io::Error> { writeln!(strm, "{:}", data) } #[cfg(test)] fn writeln_time<OutputType : Write> (strm : &mut OutputType, data: &str, v0 : u64, v1 : u64, v2 : u32) -> core::result::Result<(), io::Error> { writeln!(strm, "{:} {:} {:}.{:09}", v0, data, v1, v2) } fn main() { if env::args_os().len() > 1 { let mut first = true; for argument in env::args() { if first { first = false; continue; } let mut input = match File::open(&Path::new(&argument)) { Err(why) => panic!("couldn't open {}: {:?}", argument, why), Ok(file) => file, }; let oa = argument + ".original"; let mut output = match File::create(&Path::new(&oa), ) { Err(why) => panic!("couldn't open file for writing: {:} {:?}", oa, why), Ok(file) => file, }; match decompress(&mut input, &mut output, 65536) { Ok(_) => {}, Err(e) => panic!("Error {:?}", e), } drop(output); drop(input); } } else { match decompress(&mut io::stdin(), &mut io::stdout(), 65536) { Ok(_) => return, Err(e) => panic!("Error {:?}", e), } } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TestResultFileResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub data: Option<String>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationsListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<OperationInfo>, #[serde(default, skip_serializing_if = "Option::is_none")] pub origin: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationInfo { #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, }
use crate::{ bitcoin::{ hashes::{sha512, Hash, HashEngine, Hmac, HmacEngine}, util::bip32::ExtendedPrivKey, Network, }, party::Proposal, }; use olivia_secp256k1::schnorr_fun::fun::{marker::*, Point, Scalar, G}; pub struct Keychain { seed: [u8; 64], proposal_hmac: HmacEngine<sha512::Hash>, offer_hmac: HmacEngine<sha512::Hash>, } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)] pub struct KeyPair { pub public_key: Point<EvenY>, pub secret_key: Scalar, } impl KeyPair { pub fn from_slice(bytes: &[u8]) -> Option<Self> { let mut secret_key = Scalar::from_slice_mod_order(&bytes[..32]) .expect("is 32 bytes long") .mark::<NonZero>()?; let public_key = Point::<EvenY>::from_scalar_mul(G, &mut secret_key); Some(KeyPair { public_key, secret_key, }) } } impl Keychain { pub fn new(seed: [u8; 64]) -> Self { let proposal_hmac = { let mut hmac = HmacEngine::<sha512::Hash>::new(b"gun-proposal"); hmac.input(&seed[..]); let res = Hmac::from_engine(hmac); HmacEngine::<sha512::Hash>::new(&res[..]) }; let offer_hmac = { let mut hmac = HmacEngine::<sha512::Hash>::new(b"gun-offer"); hmac.input(&seed[..]); let res = Hmac::from_engine(hmac); HmacEngine::<sha512::Hash>::new(&res[..]) }; Self { seed, proposal_hmac, offer_hmac, } } pub fn main_wallet_xprv(&self, network: Network) -> ExtendedPrivKey { ExtendedPrivKey::new_master(network, &self.seed).unwrap() } pub fn get_key_for_proposal(&self, proposal: &Proposal) -> KeyPair { let mut proposal = proposal.clone(); proposal.public_key = crate::placeholder_point(); let mut proposal_hmac = self.proposal_hmac.clone(); let bin = crate::encode::serialize(&proposal); proposal_hmac.input(&bin[..]); let res = Hmac::from_engine(proposal_hmac); let keypair = KeyPair::from_slice(&res[..]).expect("computationally unreachable"); proposal.public_key = keypair.public_key; keypair } pub fn keypair_for_offer(&self, proposal: &Proposal) -> KeyPair { let mut offer_hmac = self.offer_hmac.clone(); let bin = crate::encode::serialize(proposal); offer_hmac.input(&bin[..]); let res = Hmac::from_engine(offer_hmac); KeyPair::from_slice(&res[..]).expect("computationally unreachable") } }
use std::future::Future; use std::time::{Duration, SystemTime}; // use crate::application::APPLICATION; use crate::prelude::*; use abscissa_core::tracing::{debug, error, info, warn}; use abscissa_core::{Command, Options, Runnable}; use tendermint::lite::types::Header; use relayer::chain::tendermint::TendermintChain; use relayer::chain::Chain; use relayer::client::Client; use relayer::config::ChainConfig; use relayer::store::Store; #[derive(Command, Debug, Options)] pub struct StartCmd {} impl Runnable for StartCmd { fn run(&self) { let config = app_config().clone(); // FIXME: This just hangs and never runs the given future // abscissa_tokio::run(&APPLICATION, ...).unwrap(); debug!("launching 'start' command"); block_on(async { for chain_config in config.chains { info!(chain.id = %chain_config.id, "spawning light client"); let _handle = tokio::spawn(async move { let client = create_client(chain_config).await; let trusted_state = client.last_trusted_state().unwrap(); info!( chain.id = %client.chain().id(), "Spawned new client now at trusted state: {} at height {}", trusted_state.last_header().header().hash(), trusted_state.last_header().header().height(), ); update_headers(client).await; }); } start_relayer().await }) } } async fn start_relayer() { let mut interval = tokio::time::interval(Duration::from_secs(3)); loop { info!(target: "relayer_cli::relayer", "Relayer is running"); interval.tick().await; } } async fn update_headers<C: Chain, S: Store<C>>(mut client: Client<C, S>) { debug!(chain.id = %client.chain().id(), "updating headers"); let mut interval = tokio::time::interval(Duration::from_secs(3)); loop { let result = client.update(SystemTime::now()).await; match result { Ok(Some(trusted_state)) => info!( chain.id = %client.chain().id(), "Updated to trusted state: {} at height {}", trusted_state.header().hash(), trusted_state.header().height() ), Ok(None) => { warn!(chain.id = %client.chain().id(), "Ignoring update to a previous state") } Err(err) => { error!(chain.id = %client.chain().id(), "Error when updating headers: {}", err) } } interval.tick().await; } } async fn create_client( chain_config: ChainConfig, ) -> Client<TendermintChain, impl Store<TendermintChain>> { let chain = TendermintChain::from_config(chain_config).unwrap(); let store = relayer::store::persistent(format!("store_{}.db", chain.id())).unwrap(); //FIXME: unwrap let trust_options = store.get_trust_options().unwrap(); // FIXME: unwrap Client::new(chain, store, trust_options).await.unwrap() } fn block_on<F: Future>(future: F) -> F::Output { tokio::runtime::Builder::new() .basic_scheduler() .enable_all() .build() .unwrap() .block_on(future) }
//! Simulation parameters. use random::Seed; #[derive(Clone, Debug)] pub struct Params { /// Seed for the random number generator. pub seed: Seed, /// Number of simulation iterations. pub num_iterations: u64, /// Number of nodes to form a complete group. pub group_size: usize, /// Age of newly joined node. pub init_age: u8, /// Age at which a node becomes adult. pub adult_age: u8, /// Maximum number of nodes a section can have before the simulation fails. pub max_section_size: usize, /// Maximum number of reocation attempts after a `Live` event. pub max_relocation_attempts: usize, /// Maximum number of infants allowed in one section. pub max_infants_per_section: usize, /// Print statistics every Nth iteration (supress if 0) pub stats_frequency: u64, /// File to store network structure data. pub file: Option<String>, /// Log veribosity pub verbosity: usize, /// Disable colored output pub disable_colors: bool, } impl Params { /// Quorum size - a simple majority of the group. pub fn quorum(&self) -> usize { self.group_size / 2 + 1 } }
use crate::{common::Solution, reparse}; use itertools::Itertools; use lazy_static::lazy_static; use regex::Regex; lazy_static! { static ref SEAT_PATTERN: Regex = Regex::new(r"(.{7})(.{3})").unwrap(); } struct Ticket { code: String, _row: String, _column: String, } fn _bst_code(code: &String, limits: (u32, u32), mapping: (char, char)) -> u32 { let (mut min, mut max) = limits; let (lower, upper) = mapping; for id in code.chars() { match id { u if u == upper => min = (min + max) / 2 + 1, l if l == lower => max = (min + max) / 2, _ => {} } } max } fn shift_code(code: &String) -> u32 { let mut res = 0; for id in code.bytes() { res <<= 1; if id == b'B' || id == b'R' { res |= 0x1; } } res } // P1: 70us fn _count_seat_id2(seat_code: &Ticket) -> u32 { let row = _bst_code(&seat_code._row, (0, 127), ('F', 'B')); let column = _bst_code(&seat_code._column, (0, 7), ('L', 'R')); row * 8 + column } // P1: 60us fn count_seat_id(seat_code: &Ticket) -> u32 { shift_code(&seat_code.code) } // P2: 110us fn _find_seat2(input: &Vec<Ticket>) -> u32 { input .iter() .map(|x| &x.code) .map(shift_code) .sorted() .tuple_windows() .find(|(before, after)| after - before == 2) .map(|(before, _)| before + 1) .unwrap() } // P2: 65us fn find_seat(input: &Vec<Ticket>) -> u32 { let mut seats = [false; 128 * 8]; input .iter() .map(|x| &x.code) .for_each(|x| seats[shift_code(x) as usize] = true); seats .iter() .enumerate() .skip_while(|(_, &x)| !x) .skip_while(|(_, &x)| x) .next() .unwrap() .0 as u32 } fn part1(input: &InputType) -> String { input.iter().map(count_seat_id).max().unwrap().to_string() } fn part2(input: &InputType) -> String { find_seat(input).to_string() } type InputType = Vec<Ticket>; fn parse_input(raw_input: &[String]) -> InputType { raw_input .iter() .map(|x| { let (_row, _column) = reparse!(x, SEAT_PATTERN, String, String).unwrap(); Ticket { code: x.to_string(), _row, _column, } }) .collect() } pub fn solve(raw_input: &[String]) -> Solution { let input = parse_input(raw_input); use std::time::Instant; let now = Instant::now(); let solution = (part1(&input), part2(&input)); let elapsed = now.elapsed(); (solution, elapsed) }
use crate::convertor::img_to_txt; use crate::convertor::txt_to_img; use crate::opts::Opts; use image::io::Reader as ImageReader; use structopt::StructOpt; pub fn run() { let opts: Opts = Opts::from_args(); match ImageReader::open(opts.input.clone()) .expect(&format!( "Can't open input file {}", opts.input.to_str().unwrap() )) .decode() { Ok(img) => img_to_txt::img_to_txt(img, opts.output), Err(_) => txt_to_img::txt_to_img(opts.input, opts.output), } }
extern crate web3; use web3::futures::Future; use web3::types::BlockId; use web3::types::BlockNumber; use web3::types::U64; use structopt::StructOpt; use std::thread; use std::time::Duration; #[derive(StructOpt)] struct Arguments { start: u32, end: u32, node: String, } fn main() { let _args = Arguments::from_args(); let node = _args.node; let (_eloop, http) = web3::transports::Http::new(&node).unwrap(); let web3 = web3::Web3::new(http); let accounts = web3.eth().accounts().wait().unwrap(); println!("Account is : {:?}", accounts); let mut blk = _args.start; let mut total = 0 as u32; loop { thread::sleep(Duration::from_secs(1)); println!("block number is {}", blk); let index = blk.to_string(); let blk_index = U64::from_dec_str(index.as_str()).unwrap(); let counts = web3.eth().block_transaction_count(BlockId::Number(BlockNumber::Number(blk_index))).wait().unwrap(); let txs = match counts { Some(value) => value.as_u32(), // This prints "/root", if you run this in Rust playground None => 0 as u32, }; total = total + txs; println!("total is {}", total); if blk > _args.end { break; } blk += 1; } }
use crate::hal::{ clocks::LFCLK_FREQ, pac::WDT, wdt::{self, Parts, Watchdog as HalWatchdog, WatchdogHandle}, }; use rtic::time::duration::Milliseconds; use rtt_target::rprintln; pub struct Watchdog { handle: WatchdogHandle<wdt::handles::Hdl0>, } impl Watchdog { pub const PERIOD: u32 = 3 * LFCLK_FREQ; pub const PER_INTERVAL_MS: Milliseconds<u32> = Milliseconds(500); pub fn new(wdt: WDT) -> Self { // In case the watchdog is already running, just spin and let it expire, since // we can't configure it anyway. This usually happens when we first program // the device and the watchdog was previously active let (handle,) = match HalWatchdog::try_new(wdt) { Ok(mut watchdog) => { // Set the watchdog to timeout after 5 seconds (in 32.768kHz ticks) watchdog.set_lfosc_ticks(Self::PERIOD); let Parts { watchdog: _watchdog, mut handles, } = watchdog.activate::<wdt::count::One>(); handles.0.pet(); handles } Err(wdt) => match HalWatchdog::try_recover::<wdt::count::One>(wdt) { Ok(Parts { mut handles, .. }) => { rprintln!("Watchdog already active, recovering"); handles.0.pet(); handles } Err(_wdt) => { rprintln!("Watchdog already set, can't recovery, resetting"); loop { cortex_m::asm::nop(); } } }, }; Watchdog { handle } } pub fn pet(&mut self) { self.handle.pet(); } }
extern crate subprocess; extern crate log; use log::info; use std::io::Read; use subprocess::{Exec, Redirection}; /// Wrapper function for using ropebwt2 with collection of strings. /// This is primarily for performing easy tests within the Rust environment. /// For production, we recommend running the `ropebwt2` command separately. /// **IMPORTANT**: This requires that `ropebwt2` is installed locally and on the `PATH`. /// # Arguments /// * `data` - a string instance where individual strings are separated by newline characters ('\n') /// # Examples /// ```rust /// use fmlrc::ropebwt2_util::create_bwt_from_strings; /// let data: Vec<&str> = vec!["CCGT", "ACG"]; /// assert_eq!(create_bwt_from_strings(&data).unwrap(), "GT$$ACCCG\n".to_string()); /// ``` pub fn create_bwt_from_strings(data: &[&str]) -> Result<String, Box<dyn std::error::Error>> { info!("Concatenating string vector..."); let join_data = data.join("\n"); info!("Running BWT construction pipeline from strings..."); let out = ( Exec::cmd("sort") | Exec::cmd("tr").arg("NT").arg("TN") | Exec::cmd("ropebwt2").arg("-LR") | Exec::cmd("tr").arg("NT").arg("TN") ).stdin(join_data.as_str()).stdout(Redirection::Pipe).capture()?.stdout_str(); info!("BWT construction from strings complete."); Ok(out) } /// Wrapper function for using ropebwt2 with a collection of gzipped FASTQ files. /// Returns a streamable result from the ropebwt2 command. /// # Argument /// * `fastqs` - a vector of fastq filenames pub fn stream_bwt_from_fastqs(fastqs: &[&str]) -> Result<Box<dyn Read>, Box<dyn std::error::Error>> { info!("Streaming BWT construction pipeline from gzipped FASTQ files:\n{:?}", fastqs); let mut initial_command = Exec::cmd("gunzip").arg("-c"); for fq in fastqs { initial_command = initial_command.arg(fq); } let out = ( initial_command | Exec::cmd("awk").arg("NR % 4 == 2") | Exec::cmd("sort") | Exec::cmd("tr").arg("NT").arg("TN") | Exec::cmd("ropebwt2").arg("-LR") | Exec::cmd("tr").arg("NT").arg("TN") ).stdout(Redirection::Pipe).stream_stdout()?; Ok(Box::new(out)) } /* TODO: seems like we need a function that will take a list of compressed FASTQ files and write the BWT out will also need a way to test it; looks like a combination of tempfile and flate2 to create gzipped files, then rust-bio to write fastq records easily Follow up links: https://docs.rs/tempfile/3.1.0/tempfile/struct.Builder.html https://docs.rs/bio/0.31.0/bio/io/fastq/index.html https://docs.rs/flate2/1.0.14/flate2/struct.GzBuilder.html https://rust-cli.github.io/book/tutorial/testing.html */ #[cfg(test)] mod tests { use super::*; use flate2::{Compression, GzBuilder}; use std::io::Write; use tempfile::{Builder, NamedTempFile}; //these tests are marked as ignore because if someone doesn't have ropebwt2 installed (or any of the command really), // then these will obviously fail inexplicable #[test] #[ignore] fn test_from_strings() { let data: Vec<&str> = vec!["CCGT", "ACG", "N"]; assert_eq!(create_bwt_from_strings(&data).unwrap(), "GTN$$ACCC$G\n".to_string()); } fn write_strings_to_fqgz(data: Vec<&str>) -> NamedTempFile { let file: NamedTempFile = Builder::new().prefix("temp_data_").suffix(".fq.gz").tempfile().unwrap(); let mut gz = GzBuilder::new().write(file, Compression::default()); let mut i: u64 = 0; for s in data { writeln!(gz, "@seq_{}\n{}\n+\n{}", i, s, "F".repeat(s.len())).unwrap(); i += 1; } //have to keep the file handle or everything blows up gz.finish().unwrap() } #[test] #[ignore] fn test_from_fastqs() { //create a temporary file and get the filename out let data: Vec<&str> = vec!["CCGT", "N"]; let file = write_strings_to_fqgz(data); let data2: Vec<&str> = vec!["ACG"]; let file2 = write_strings_to_fqgz(data2); let fastq_filenames: Vec<&str> = vec![ &file.path().to_str().unwrap(), &file2.path().to_str().unwrap() ]; //now read the string in and verify correctness let mut bwt_stream = stream_bwt_from_fastqs(&fastq_filenames).unwrap(); let mut buffer: String = String::new(); let num_bytes = bwt_stream.read_to_string(&mut buffer).unwrap(); assert_eq!(num_bytes, 12); assert_eq!(buffer, "GTN$$ACCC$G\n".to_string()); } }
use std::{ cmp::Ordering, collections::{BinaryHeap, HashMap}, fmt, usize, }; #[derive(Eq, PartialEq)] enum Tile { Wall, Floor, } #[derive(Eq, PartialEq, Clone, Copy)] enum Allegiance { Elf, Goblin, } #[derive(Eq, PartialEq, Clone, Copy)] struct Unit { allegiance: Allegiance, pos: (usize, usize), hp: usize, } impl Unit { fn new(allegiance: Allegiance, pos: (usize, usize)) -> Self { Unit { allegiance, pos, hp: 300, } } } struct Board { tiles: Vec<Vec<Tile>>, units: Vec<Unit>, rounds_completed: usize, } impl Board { fn new(input: &str) -> Self { let mut units = Vec::new(); let tiles = input .lines() .enumerate() .map(|(y, line)| { line.chars() .enumerate() .map(|(x, c)| match c { '#' => Tile::Wall, '.' => Tile::Floor, 'E' => { units.push(Unit::new(Allegiance::Elf, (x, y))); Tile::Floor } 'G' => { units.push(Unit::new(Allegiance::Goblin, (x, y))); Tile::Floor } _ => unreachable!(), }) .collect() }) .collect(); Board { tiles, units, rounds_completed: 0, } } fn is_open(&self, (x, y): (usize, usize)) -> bool { self.tiles[y][x] == Tile::Floor && self.units.iter().all(|u| u.pos != (x, y)) } fn adjacent_tiles(&self, (x, y): (usize, usize)) -> Vec<(usize, usize)> { let mut candidates = vec![(x + 1, y), (x, y + 1)]; if x > 0 { candidates.push((x - 1, y)); } if y > 0 { candidates.push((x, y - 1)); } candidates.sort_by_key(|&(x, y)| (y, x)); candidates } fn adjacent_open_tiles(&self, pos: (usize, usize)) -> Vec<(usize, usize)> { let mut candidates = self.adjacent_tiles(pos); candidates.retain(|&pos| self.is_open(pos)); candidates } fn round(&mut self) -> bool { for unit in &self.units { if let Some(enemy) = self .adjacent_tiles(unit.pos) .iter() .filter_map(|&pos| { self.units .iter() .find(|u| u.pos == pos && u.allegiance != unit.allegiance) }) .next() { // attack enemy } else { let (_, (_, next_pos)) = self .units .iter() .filter(|u| u.allegiance != unit.allegiance) .flat_map(|u| self.adjacent_open_tiles(u.pos)) .filter_map(|pos| self.shortest_path(unit.pos, pos).map(|sp| (pos, sp))) .min_by_key(|&((x, y), (len, _))| (len, y, x)) .unwrap(); // move to new position } } self.rounds_completed += 1; true } fn shortest_path( &self, start: (usize, usize), goal: (usize, usize), ) -> Option<(usize, (usize, usize))> { #[derive(Copy, Clone, Eq, PartialEq)] struct State { dist: usize, pos: (usize, usize), first_step: Option<(usize, usize)>, } impl Ord for State { fn cmp(&self, other: &State) -> Ordering { other .dist .cmp(&self.dist) .then_with(|| self.pos.cmp(&other.pos)) .then_with(|| flip(other.pos).cmp(&flip(self.pos))) } } impl PartialOrd for State { fn partial_cmp(&self, other: &State) -> Option<Ordering> { Some(self.cmp(other)) } } let mut distances = HashMap::new(); let mut heap = BinaryHeap::new(); distances.insert(start, 0); heap.push(State { dist: 0, pos: start, first_step: None, }); while let Some(State { dist, pos, first_step, }) = heap.pop() { if pos == goal { return Some((dist, first_step.unwrap())); } if dist > *distances.entry(pos).or_insert(usize::MAX) { continue; } for next_step in self.adjacent_open_tiles(pos) { let next = State { dist: dist + 1, pos: next_step, first_step: first_step.or(Some(next_step)), }; if next.dist < *distances.entry(pos).or_insert(usize::MAX) { heap.push(next); distances.insert(next.pos, next.dist); } } } None } fn outcome(&self) -> usize { self.rounds_completed * self.units.iter().map(|u| u.hp).sum::<usize>() } } fn flip((x, y): (usize, usize)) -> (usize, usize) { (y, x) } impl fmt::Display for Board { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for (y, row) in self.tiles.iter().enumerate() { for (x, tile) in row.iter().enumerate() { if let Some(unit) = self.units.iter().find(|u| u.pos == (x, y)) { match unit.allegiance { Allegiance::Elf => 'E'.fmt(f)?, Allegiance::Goblin => 'G'.fmt(f)?, } } else { match tile { Tile::Wall => '#'.fmt(f)?, Tile::Floor => '.'.fmt(f)?, } }; } '\n'.fmt(f)?; } Ok(()) } } fn main() { let mut board = Board::new(include_str!("input.txt")); while board.round() { println!("{}", board); } println!("{}", board.outcome()); }
use crate::core::Client; use crate::ContinuationToken; use crate::{ entity_path, get_batch_mime, Batch, MetadataDetail, PaginatedResponse, TableClient, TableEntity, }; use azure_core::errors::{check_status_extract_body, AzureError}; use futures::stream::Stream; use hyper::{header, Method, StatusCode}; use serde::{de::DeserializeOwned, Serialize}; use std::convert::TryFrom; use std::convert::TryInto; use url::Position; /// Represents a table in the Microsoft Azure Table service. #[derive(Clone)] pub struct CloudTable<C> where C: Client, { client: TableClient<C>, table_name: String, } impl<C> CloudTable<C> where C: Client, { /// Creates an CloadTable using the specified client and table name pub fn new<T: Into<String>>(client: TableClient<C>, table: T) -> Self { CloudTable { client, table_name: table.into(), } } /// Creates the table in the storage service with default request options. pub async fn create(&self) -> Result<(), AzureError> { self.client.create_table(&self.table_name).await } /// Creates the table in the storage service using default request options if it does not already exist. pub async fn create_if_not_exists(&self) -> Result<(), AzureError> { self.create().await.or_else(|err| match err { AzureError::UnexpectedHTTPResult(e) if e.status_code() == 409 => Ok(()), e => Err(e), }) } pub async fn get<T>( &self, partition_key: &str, row_key: &str, etag: Option<&str>, ) -> Result<Option<TableEntity<T>>, AzureError> where T: DeserializeOwned, { let path = &entity_path(&self.table_name, partition_key, row_key); let future_response = self.client.request_with_default_header( path, &Method::GET, None, MetadataDetail::None, // etag is provided through header, no extra meta info is required &|mut request| { if let Some(etag) = etag { request = request.header(header::IF_MATCH, etag); } request }, )?; let (headers, body) = match future_response .check_status_extract_headers_and_body(StatusCode::OK) .await { Err(AzureError::UnexpectedHTTPResult(e)) if e.status_code() == 404 => return Ok(None), x => x, }?; let entity = TableEntity::try_from((&headers, &body as &[u8]))?; Ok(Some(entity)) } /// Insert a new entity into the table. If entity already exists, the operation fails. /// See https://docs.microsoft.com/en-us/rest/api/storageservices/insert-entity pub async fn insert<T>( &self, partition_key: &str, row_key: &str, payload: T, ) -> Result<TableEntity<T>, AzureError> where T: Serialize + DeserializeOwned, { let entity: TableEntity<T> = TableEntity { partition_key: partition_key.to_owned(), row_key: row_key.to_owned(), etag: None, timestamp: None, payload, }; let obj_ser = serde_json::to_string(&entity)?.to_owned(); let future_response = self.client.request_with_default_header( &self.table_name, &Method::POST, Some(&obj_ser), MetadataDetail::None, &|req| req, )?; let (headers, body) = future_response .check_status_extract_headers_and_body(StatusCode::CREATED) .await?; let entity = TableEntity::try_from((&headers, &body as &[u8]))?; Ok(entity) } pub async fn insert_entity<T>( &self, entity: TableEntity<T>, ) -> Result<TableEntity<T>, AzureError> where T: Serialize + DeserializeOwned, { self.insert(&entity.partition_key, &entity.row_key, entity.payload) .await } /// Insert or updates an entity. Even if the entity is already present the operation succeeds and the /// entity is replaced. /// See https://docs.microsoft.com/en-us/rest/api/storageservices/insert-or-replace-entity pub async fn insert_or_update<T>( &self, partition_key: &str, row_key: &str, payload: T, ) -> Result<TableEntity<T>, AzureError> where T: Serialize + DeserializeOwned + std::fmt::Debug, { let mut entity: TableEntity<T> = TableEntity { partition_key: partition_key.to_owned(), row_key: row_key.to_owned(), etag: None, timestamp: None, payload, }; let obj_ser = serde_json::to_string(&entity)?.to_owned(); let path = &entity_path(&self.table_name, &entity.partition_key, &entity.row_key); let future_response = self.client.request_with_default_header( &path, &Method::PUT, Some(&obj_ser), MetadataDetail::None, &|req| req, )?; let (headers, _body) = future_response .check_status_extract_headers_and_body(StatusCode::NO_CONTENT) .await?; // only header values are returned in the response, thus timestamp cannot be extracted without // an explicit query entity.etag = match headers.get(header::ETAG) { Some(etag) => Some(etag.to_str()?.to_owned()), None => None, }; Ok(entity) } pub async fn insert_or_update_entity<T>( &self, entity: TableEntity<T>, ) -> Result<TableEntity<T>, AzureError> where T: Serialize + DeserializeOwned + std::fmt::Debug, { self.insert_or_update(&entity.partition_key, &entity.row_key, entity.payload) .await } /// Update an existing entity. /// See https://docs.microsoft.com/en-us/rest/api/storageservices/update-entity2 pub async fn update_entity<T>( &self, mut entity: TableEntity<T>, ) -> Result<TableEntity<T>, AzureError> where T: Serialize + DeserializeOwned, { let obj_ser = serde_json::to_string(&entity)?.to_owned(); let path = &entity_path(&self.table_name, &entity.partition_key, &entity.row_key); let etag = entity.etag; let future_response = self.client.request_with_default_header( path, &Method::PUT, Some(&obj_ser), MetadataDetail::None, &|mut request| { if let Some(etag) = &etag { request = request.header(header::IF_MATCH, etag); } request }, )?; let (headers, _body) = future_response .check_status_extract_headers_and_body(StatusCode::NO_CONTENT) .await?; // only header values are returned in the response, thus timestamp cannot be extracted without // an explicit query entity.etag = match headers.get(header::ETAG) { Some(etag) => Some(etag.to_str()?.to_owned()), None => None, }; // another option is to extract timestamp from etag entity.timestamp = None; // if there is no up to date timestamp, clear the old Ok(entity) } pub async fn delete( &self, partition_key: &str, row_key: &str, etag: Option<&str>, ) -> Result<(), AzureError> { let path = &entity_path(&self.table_name, partition_key, row_key); let etag = etag.unwrap_or("*"); let future_response = self .client .request_with_default_header( path, &Method::DELETE, None, MetadataDetail::None, &|request| request.header(header::IF_MATCH, etag), )? .response_future; check_status_extract_body(future_response, StatusCode::NO_CONTENT).await?; Ok(()) } pub async fn delete_entity<'a, T>(&self, entity: TableEntity<T>) -> Result<(), AzureError> { self.delete( &entity.partition_key, &entity.row_key, entity.etag.as_deref(), ) .await } pub async fn begin_get_all<T>(&self) -> Result<PaginatedResponse<T>, AzureError> where T: DeserializeOwned, { log::debug!("begin_get_all()"); self.begin_get_request(None).await } pub async fn begin_query<T>(&self, query: &str) -> Result<PaginatedResponse<T>, AzureError> where T: DeserializeOwned, { log::debug!("begin_query(query = {:?})", query); self.begin_get_request(Some(query)).await } async fn begin_get_request<T>( &self, query: Option<&str>, ) -> Result<PaginatedResponse<T>, AzureError> where T: DeserializeOwned, { log::debug!("begin_get_request(query = {:?})", query); let mut path = self.table_name.to_owned(); if let Some(query) = query { path.push_str(&format!("?{}", query)); } let perform_request_response = self.client.request_with_default_header( path.as_str(), &Method::GET, None, MetadataDetail::Full, // etag is provided through metadata only &|req| req, )?; let url = perform_request_response.url.clone(); let (headers, body) = perform_request_response .check_status_extract_headers_and_body(StatusCode::OK) .await?; Ok((url, &headers, &body).try_into()?) } pub async fn continue_execution<T>( &self, continuation_token: ContinuationToken, ) -> Result<PaginatedResponse<T>, AzureError> where T: DeserializeOwned, { log::debug!( "continue_execution(continuation_token = {:?})", continuation_token ); let path = &continuation_token.new_url[Position::BeforePath..][1..]; let future_response = self.client.request_with_default_header( path, &Method::GET, None, MetadataDetail::Full, // etag is provided through metadata only &|req| req, )?; let (headers, body) = future_response .check_status_extract_headers_and_body(StatusCode::OK) .await?; Ok((continuation_token, &headers, &body).try_into()?) } pub fn stream_get_all<'a, T>( &'a self, ) -> impl Stream<Item = Result<PaginatedResponse<T>, AzureError>> + 'a where T: Serialize + DeserializeOwned + 'a, { futures::stream::unfold( Some(States::Init), move |state: Option<States>| async move { log::debug!("state == {:?}", state); let response = match state { Some(States::Init) => self.begin_get_all().await, Some(States::Continuation(continuation_token)) => { self.continue_execution(continuation_token).await } None => return None, }; let response = match response { Ok(response) => response, Err(err) => return Some((Err(err), None)), }; let continuation_token = response .continuation_token .clone() .map(States::Continuation); Some((Ok(response), continuation_token)) }, ) } pub fn stream_query<'a, T>( &'a self, query: &'a str, ) -> impl Stream<Item = Result<PaginatedResponse<T>, AzureError>> + 'a where T: Serialize + DeserializeOwned + 'a, { futures::stream::unfold( Some(States::Init), move |state: Option<States>| async move { log::debug!("state == {:?}", state); let response = match state { Some(States::Init) => self.begin_query(query).await, Some(States::Continuation(continuation_token)) => { self.continue_execution(continuation_token).await } None => return None, }; let response = match response { Ok(response) => response, Err(err) => return Some((Err(err), None)), }; let continuation_token = response .continuation_token .clone() .map(States::Continuation); Some((Ok(response), continuation_token)) }, ) } pub async fn execute_batch(&self, batch: Batch) -> Result<(), AzureError> { let payload = batch.into_payload(self.client.get_uri_prefix().as_str(), &self.table_name); let future_response = self .client .request("$batch", &Method::POST, Some(&payload), &|request| { request.header( header::CONTENT_TYPE, header::HeaderValue::from_static(get_batch_mime()), ) })? .response_future; check_status_extract_body(future_response, StatusCode::ACCEPTED).await?; // TODO deal with body response, handle batch failure. // let ref body = get_response_body(&mut response)?; // info!("{}", body); Ok(()) } } #[derive(Debug, Clone)] enum States { Init, Continuation(ContinuationToken), }
use crate::SerdeColor; #[derive(Serialize, Deserialize, Clone, PartialEq, Debug)] pub struct ThemeConfig { color: SerdeColor, italic: bool, bold: bool, } impl ThemeConfig { pub fn new(color: SerdeColor, italic: bool, bold: bool) -> Self { Self { color, italic, bold, } } pub fn color(&self) -> &SerdeColor { &self.color } pub fn italic(&self) -> bool { self.italic } pub fn bold(&self) -> bool { self.bold } } #[cfg(test)] mod tests { use super::*; #[test] fn assert_color() { let target = ThemeConfig::new(SerdeColor::new(29, 20, 45, 72), true, false); let result = target.color().clone(); let expected = SerdeColor::new(29, 20, 45, 72); assert_eq!(result, expected); } #[test] fn assert_italic() { let target = ThemeConfig::new(SerdeColor::new(29, 20, 45, 72), true, false); let result = target.italic(); let expected = true; assert_eq!(result, expected); } #[test] fn assert_bold() { let target = ThemeConfig::new(SerdeColor::new(29, 20, 45, 72), false, true); let result = target.bold(); let expected = true; assert_eq!(result, expected); } }
use std::str::FromStr; use clap::{ App, SubCommand, Arg, ArgMatches }; use futures::Future; use maddr::MultiAddr; use util; use context::Context; pub fn subcommand() -> App<'static, 'static> { SubCommand::with_name("disconnect") .about("\ Close connection(s) to the given address(es)\ \n\ \n\ The disconnect is not permanent; if ipfs needs to talk \ to that address later, it will reconnect.") .args(&[ Arg::with_name("address") .help("Address(es) of peer(s) to disconnect from") .required(true) .takes_value(true) .multiple(true) .validator(util::multiaddr_validator), ]) } pub fn run(context: &mut Context, matches: &ArgMatches) { let Context { ref client, ref mut event_loop, .. } = *context; let disconnections = matches.values_of("address") .expect("This argument is required") .map(|addr| { let addr = MultiAddr::from_str(addr).expect("This is validated"); client.swarm().disconnect(&addr) .map(move |result| { match result { Ok(msgs) => { for msg in msgs { println!("{}", msg); } } Err(msg) => { println!("{}: {}", addr, msg); } } }) }); util::run_all(event_loop, disconnections); }
pub(crate) mod command; mod constants; use std::path::{Path, PathBuf}; use structopt::clap::AppSettings::*; use structopt::StructOpt; pub(crate) trait ConfigPath { fn config_path(&self) -> Option<&Path>; } pub(crate) trait Platform { fn platform(&self) -> Option<&str>; } use constants::*; #[derive(Debug, StructOpt)] #[structopt( name = "pahkat", bin_name = "pahkat", about = "The last package manager.", global_settings(&[UnifiedHelpMessage, DisableHelpSubcommand]), template(MAIN_TEMPLATE), long_version(VERSION), version_message("Prints version and license information"), usage("pahkat <command>") )] pub(crate) enum Args { #[structopt(template(SUB_TEMPLATE))] Init(command::Init), #[structopt(template(SUB_TEMPLATE))] Download(command::Download), #[structopt(template(SUB_TEMPLATE))] Install(command::Install), #[structopt(template(SUB_TEMPLATE))] Uninstall(command::Uninstall), #[structopt(template(SUB_TEMPLATE))] Status(command::Status), #[structopt(template(SUBC_TEMPLATE))] Config(command::Config), } impl ConfigPath for Args { #[inline] fn config_path(&self) -> Option<&Path> { match self { Args::Init(x) => x.config_path(), Args::Download(x) => x.config_path(), Args::Install(x) => x.config_path(), Args::Uninstall(x) => x.config_path(), Args::Config(x) => x.config_path(), Args::Status(x) => x.config_path(), } } } impl Platform for Args { #[inline] fn platform(&self) -> Option<&str> { match self { Args::Init(x) => x.platform(), Args::Download(x) => x.platform(), Args::Install(x) => x.platform(), Args::Uninstall(x) => x.platform(), Args::Status(x) => x.platform(), Args::Config(x) => None, } } } #[derive(Debug, StructOpt)] struct GlobalOpts { #[structopt( short, long, parse(from_os_str), help = "Path to configuration directory [default: TODO]" )] config_path: Option<PathBuf>, #[cfg_attr( windows, structopt(short = "P", long, help = "Target platform [default: windows]") )] #[cfg_attr( target_os = "macos", structopt(short = "P", long, help = "Target platform [default: macos]") )] #[cfg_attr( target_os = "linux", structopt(short = "P", long, help = "Target platform [default: linux]") )] #[cfg_attr( not(any(target_os = "linux", target_os = "macos", windows)), structopt(short = "P", long, help = "Target platform") )] platform: Option<String>, #[structopt(short = "C", long, help = "Target channel [default: none]")] channel: Option<String>, }
#[macro_export] macro_rules! nv { ( $( $x:expr ),* ) => { { let mut temp_string = String::new(); $( temp_string.push_str(&format!("|{}|: |{:?}| ", stringify!($x), $x)); )* temp_string } }; } #[macro_export] macro_rules! pnv { ( $( $x:expr ),* ) => { { println!("{}", nv!( $( $x ),* )); } }; } #[test] fn test_nv() { let a = 10; println!("{}", nv!(a, a, a)); pnv!(a, a, a, 1 + 1, "haha") } #[macro_export] macro_rules! time { ($($x:tt)*) => { let now = std::time::Instant::now(); $($x)* dbg!(now.elapsed().as_nanos()); }; } #[test] fn test_time() { use std::thread::sleep; use std::time::Duration; time!( let x = 3; 3 + 4; // sleep(Duration::new(2, 0)); // sleep(Duration::new(2, 0)); ); x; time!( x; ); }
pub mod begin; pub mod manager; use crate::parser::ast::ExprKind; use crate::parser::ast::*; pub trait Folder { fn fold(&mut self, ast: Vec<ExprKind>) -> Vec<ExprKind> { ast.into_iter().map(|x| self.visit(x)).collect() } // Whether or not the pass modified the input AST fn modified(&self) -> bool { false } fn visit(&mut self, expr: ExprKind) -> ExprKind { match expr { ExprKind::If(f) => self.visit_if(f), ExprKind::Define(d) => self.visit_define(d), ExprKind::LambdaFunction(l) => self.visit_lambda_function(l), ExprKind::Begin(b) => self.visit_begin(b), ExprKind::Return(r) => self.visit_return(r), ExprKind::Apply(a) => self.visit_apply(a), ExprKind::Panic(p) => self.visit_panic(p), ExprKind::Transduce(t) => self.visit_transduce(t), ExprKind::Read(r) => self.visit_read(r), ExprKind::Execute(e) => self.visit_execute(e), ExprKind::Quote(q) => self.visit_quote(q), ExprKind::Struct(s) => self.visit_struct(s), ExprKind::Macro(m) => self.visit_macro(m), ExprKind::Eval(e) => self.visit_eval(e), ExprKind::Atom(a) => self.visit_atom(a), ExprKind::List(l) => self.visit_list(l), ExprKind::SyntaxRules(s) => self.visit_syntax_rules(s), ExprKind::Set(s) => self.visit_set(s), ExprKind::Require(r) => self.visit_require(r), ExprKind::CallCC(cc) => self.visit_callcc(cc), } } #[inline] fn visit_if(&mut self, mut f: Box<If>) -> ExprKind { f.test_expr = self.visit(f.test_expr); f.then_expr = self.visit(f.then_expr); f.else_expr = self.visit(f.else_expr); ExprKind::If(f) } #[inline] fn visit_define(&mut self, mut define: Box<Define>) -> ExprKind { define.body = self.visit(define.body); ExprKind::Define(define) } #[inline] fn visit_lambda_function(&mut self, mut lambda_function: Box<LambdaFunction>) -> ExprKind { lambda_function.body = self.visit(lambda_function.body); ExprKind::LambdaFunction(lambda_function) } #[inline] fn visit_begin(&mut self, mut begin: Begin) -> ExprKind { begin.exprs = begin.exprs.into_iter().map(|e| self.visit(e)).collect(); ExprKind::Begin(begin) } #[inline] fn visit_return(&mut self, mut r: Box<Return>) -> ExprKind { r.expr = self.visit(r.expr); ExprKind::Return(r) } #[inline] fn visit_apply(&mut self, mut apply: Box<Apply>) -> ExprKind { apply.func = self.visit(apply.func); apply.list = self.visit(apply.list); ExprKind::Apply(apply) } #[inline] fn visit_panic(&mut self, mut p: Box<Panic>) -> ExprKind { p.message = self.visit(p.message); ExprKind::Panic(p) } #[inline] fn visit_transduce(&mut self, mut transduce: Box<Transduce>) -> ExprKind { transduce.transducer = self.visit(transduce.transducer); transduce.func = self.visit(transduce.func); transduce.initial_value = self.visit(transduce.initial_value); transduce.iterable = self.visit(transduce.iterable); ExprKind::Transduce(transduce) } #[inline] fn visit_read(&mut self, mut read: Box<Read>) -> ExprKind { read.expr = self.visit(read.expr); ExprKind::Read(read) } #[inline] fn visit_execute(&mut self, mut execute: Box<Execute>) -> ExprKind { execute.transducer = self.visit(execute.transducer); execute.collection = self.visit(execute.collection); execute.output_type = execute.output_type.map(|x| self.visit(x)); ExprKind::Execute(execute) } #[inline] fn visit_quote(&mut self, mut quote: Box<Quote>) -> ExprKind { quote.expr = self.visit(quote.expr); ExprKind::Quote(quote) } #[inline] fn visit_struct(&mut self, s: Box<Struct>) -> ExprKind { ExprKind::Struct(s) } #[inline] fn visit_macro(&mut self, m: Macro) -> ExprKind { ExprKind::Macro(m) } #[inline] fn visit_eval(&mut self, mut e: Box<Eval>) -> ExprKind { e.expr = self.visit(e.expr); ExprKind::Eval(e) } #[inline] fn visit_atom(&mut self, a: Atom) -> ExprKind { ExprKind::Atom(a) } #[inline] fn visit_list(&mut self, mut l: List) -> ExprKind { l.args = l.args.into_iter().map(|e| self.visit(e)).collect(); ExprKind::List(l) } #[inline] fn visit_syntax_rules(&mut self, l: SyntaxRules) -> ExprKind { ExprKind::SyntaxRules(l) } #[inline] fn visit_set(&mut self, mut s: Box<Set>) -> ExprKind { s.variable = self.visit(s.variable); s.expr = self.visit(s.expr); ExprKind::Set(s) } #[inline] fn visit_require(&mut self, s: Require) -> ExprKind { ExprKind::Require(s) } #[inline] fn visit_callcc(&mut self, mut cc: Box<CallCC>) -> ExprKind { cc.expr = self.visit(cc.expr); ExprKind::CallCC(cc) } } pub trait VisitorMutUnit { fn visit(&mut self, expr: &ExprKind) { match expr { ExprKind::If(f) => self.visit_if(f), ExprKind::Define(d) => self.visit_define(d), ExprKind::LambdaFunction(l) => self.visit_lambda_function(l), ExprKind::Begin(b) => self.visit_begin(b), ExprKind::Return(r) => self.visit_return(r), ExprKind::Apply(a) => self.visit_apply(a), ExprKind::Panic(p) => self.visit_panic(p), ExprKind::Transduce(t) => self.visit_transduce(t), ExprKind::Read(r) => self.visit_read(r), ExprKind::Execute(e) => self.visit_execute(e), ExprKind::Quote(q) => self.visit_quote(q), ExprKind::Struct(s) => self.visit_struct(s), ExprKind::Macro(m) => self.visit_macro(m), ExprKind::Eval(e) => self.visit_eval(e), ExprKind::Atom(a) => self.visit_atom(a), ExprKind::List(l) => self.visit_list(l), ExprKind::SyntaxRules(s) => self.visit_syntax_rules(s), ExprKind::Set(s) => self.visit_set(s), ExprKind::Require(r) => self.visit_require(r), ExprKind::CallCC(cc) => self.visit_callcc(cc), } } #[inline] fn visit_if(&mut self, f: &If) { self.visit(&f.test_expr); self.visit(&f.then_expr); self.visit(&f.else_expr); } #[inline] fn visit_define(&mut self, define: &Define) { self.visit(&define.body); } #[inline] fn visit_lambda_function(&mut self, lambda_function: &LambdaFunction) { self.visit(&lambda_function.body); } #[inline] fn visit_begin(&mut self, begin: &Begin) { for expr in &begin.exprs { self.visit(expr); } } #[inline] fn visit_return(&mut self, r: &Return) { self.visit(&r.expr); } #[inline] fn visit_apply(&mut self, apply: &Apply) { self.visit(&apply.func); self.visit(&apply.list); } #[inline] fn visit_panic(&mut self, p: &Panic) { self.visit(&p.message); } #[inline] fn visit_transduce(&mut self, transduce: &Transduce) { self.visit(&transduce.transducer); self.visit(&transduce.func); self.visit(&transduce.initial_value); self.visit(&transduce.iterable); } #[inline] fn visit_read(&mut self, read: &Read) { self.visit(&read.expr); } #[inline] fn visit_execute(&mut self, execute: &Execute) { self.visit(&execute.transducer); self.visit(&execute.collection); execute.output_type.as_ref().map(|x| self.visit(x)); } #[inline] fn visit_quote(&mut self, quote: &Quote) { self.visit(&quote.expr); } #[inline] fn visit_struct(&mut self, _s: &Struct) {} #[inline] fn visit_macro(&mut self, _m: &Macro) {} #[inline] fn visit_eval(&mut self, e: &Eval) { self.visit(&e.expr); } #[inline] fn visit_atom(&mut self, _a: &Atom) {} #[inline] fn visit_list(&mut self, l: &List) { for expr in &l.args { self.visit(expr); } } #[inline] fn visit_syntax_rules(&mut self, _l: &SyntaxRules) {} #[inline] fn visit_set(&mut self, s: &Set) { self.visit(&s.variable); self.visit(&s.expr); } #[inline] fn visit_require(&mut self, _s: &Require) {} #[inline] fn visit_callcc(&mut self, cc: &CallCC) { self.visit(&cc.expr); } }
fn main() { one_param(5); two_params(-9, 2); let mut x = return_five(); println!("return_five evaluates to {}", x); x = increment(x); println!("now x is {}", x); x = increment(x); println!("now x is {}", x); x = increment(x); println!("now x is {}", x); x = increment(x); println!("now x is {}", x); } fn one_param(x: i32) { println!("one_param called with {}", x); } fn two_params(x: i32, y: i32) { println!("two_params called with ({}, {})", x, y); } fn return_five() -> i32 { // no return keyword, expressions have no ';' // Fns are statements unless the last line is an expression, // then the function is an expression and evaluates to a value. 5 } fn increment(x: i32) -> i32 { x + 1 }
//! Internal types for passing data around. Overly verbose //! and not useful to the user, thus not publicly visible. use crate::time::Timestamp; use crate::transfer::TransferKind; use crate::types::*; use crate::Priority; /// Internal representation of a received frame. /// /// This is public so externally-defined SessionManagers can use it. #[derive(Debug)] pub struct InternalRxFrame<'a, C: embedded_time::Clock> { pub timestamp: Timestamp<C>, pub priority: Priority, pub transfer_kind: TransferKind, pub port_id: PortId, pub source_node_id: Option<NodeId>, pub destination_node_id: Option<NodeId>, pub transfer_id: TransferId, pub is_svc: bool, pub start_of_transfer: bool, pub end_of_transfer: bool, pub payload: &'a [u8], } impl<'a, C: embedded_time::Clock> InternalRxFrame<'a, C> { /// Construct internal frame as a message type #[allow(clippy::too_many_arguments)] pub fn as_message( timestamp: Timestamp<C>, priority: Priority, subject_id: PortId, source_node_id: Option<NodeId>, transfer_id: TransferId, start: bool, end: bool, payload: &'a [u8], ) -> Self { Self { timestamp, priority, transfer_kind: TransferKind::Message, port_id: subject_id, source_node_id, destination_node_id: None, transfer_id, is_svc: false, start_of_transfer: start, end_of_transfer: end, payload, } } /// Construct internal frame as a service type #[allow(clippy::too_many_arguments)] pub fn as_service( timestamp: Timestamp<C>, priority: Priority, transfer_kind: TransferKind, service_id: PortId, source_node_id: NodeId, destination_node_id: NodeId, transfer_id: TransferId, start: bool, end: bool, payload: &'a [u8], ) -> Self { Self { timestamp, priority, transfer_kind, port_id: service_id, source_node_id: Some(source_node_id), destination_node_id: Some(destination_node_id), transfer_id, is_svc: true, start_of_transfer: start, end_of_transfer: end, payload, } } }
use hilbert_qexp::elements::{div_mut, HmfGen}; use hilbert_qexp::bignum::BigNumber; use parallel_wt::*; use flint::fmpq::Fmpq; use std::cmp::max; use std::ops::*; pub fn star_op<T>(res: &mut HmfGen<T>, f: &HmfGen<T>) where T: BigNumber, { let (k1, k2) = f.weight.unwrap(); v_u_bd_iter!((f.m, f.u_bds, v, u, bd) { res.fcvec.fc_ref_mut(v, u, bd).set_g(f.fcvec.fc_ref(v, -u, bd)); }); if !is_even!((k1 + k2) >> 1) { res.negate(); } res.weight = Some((k2, k1)); } fn proj_s9_part<T>(f: &HmfGen<T>, s9: &HmfGen<T>) -> HmfGen<T> where T: BigNumber + Clone + ShrAssign<u64>, for<'a> T: SubAssign<&'a T>, { let mut tmp = HmfGen::new(f.m, f.prec); star_op(&mut tmp, f); let g = f - &tmp; let mut tmp1 = HmfGen::new(f.m, f.prec); div_mut(&mut tmp1, &g, s9); tmp1 >>= 1_u64; tmp1 } pub fn bracket_inner_prod<T>(f: &HmfGen<T>, g: &HmfGen<T>) -> HmfGen<T> where T: BigNumber + Clone + ShrAssign<u64>, for<'a> T: AddAssign<&'a T>, for<'a> T: SubAssign<&'a T>, for<'a> T: From<&'a Fmpq>, { let prec = max(f.prec, g.prec); let mut tmp = HmfGen::new(f.m, prec); star_op(&mut tmp, g); tmp *= f; let s9 = s9_form(prec); proj_s9_part(&tmp, &Into::<HmfGen<T>>::into(&s9)) } #[cfg(test)] mod tests { use super::*; #[test] fn test_proj_s9() { let prec = 5; let s4 = s4_form(prec); let s6 = s6_form(prec); let mut s5 = s5_form(prec); let s9 = s9_form(prec); let f = &(&(s4.pow(2)) * &s6) + &(&s5 * &s9); let g = proj_s9_part(&f, &s9); s5.decrease_prec(prec - 1); assert_eq!(g, s5); } }
mod server; use server::{start_server, GenericServerError}; const DEFAULT_PORT: u16 = 3001; #[tokio::main] async fn main() -> Result<(), GenericServerError> { let port = std::env::var("PORT").map_or(DEFAULT_PORT, |p| p.parse::<u16>().unwrap_or(DEFAULT_PORT)); let socket_address: std::net::SocketAddr = ([0, 0, 0, 0], port).into(); start_server(socket_address).await?; Ok(()) }
use std::any::Any; use super::Provider; pub(super) struct CastProvider { downcast: *const (dyn Any + Send + Sync), provider: Box<dyn Provider>, } impl CastProvider { pub fn new(p: impl Provider) -> Self { let provider = Box::new(p); Self { downcast: &*provider, provider, } } pub fn downcast<T: 'static>(&self) -> Option<&T> { unsafe { &*self.downcast }.downcast_ref() } pub fn provider(&self) -> &dyn Provider { self.provider.as_ref() } } unsafe impl Send for CastProvider {} unsafe impl Sync for CastProvider {}
extern crate rustypy; use rustypy::*; #[test] fn unpack_pylist_macro() { use std::iter::FromIterator; let nested = PyList::from_iter(vec![ pytuple!( PyArg::PyList(PyList::from_iter(vec![1i32, 2, 3]).into_raw()), PyArg::F32(0.1) ), pytuple!( PyArg::PyList(PyList::from_iter(vec![3i32, 2, 1]).into_raw()), PyArg::F32(0.2) ), ]) .into_raw(); let unpacked = unpack_pytype!(nested; PyList{PyTuple{({PyList{I32 => i32}}, F32,)}}); assert_eq!(vec![(vec![1, 2, 3], 0.1), (vec![3, 2, 1], 0.2)], unpacked); } #[test] fn pytuple_macros() { let pytuple = pytuple!( PyArg::PyBool(PyBool::from(false)), PyArg::PyString(PyString::from("test")), PyArg::I64(55i64) ) .into_raw(); let unpacked = unpack_pytype!(pytuple; (PyBool, PyString, I64,)); assert_eq!((false, String::from("test"), 55i64), unpacked); }
fn main() { call_me(); } fn call_me() { let num = 3; for i in 0..num { println!("Ring! Call number {}", i + 1); } }
use chrono::{DateTime, Utc, Duration}; pub fn after(start: DateTime<Utc>) -> DateTime<Utc> { start + Duration::seconds(i64::from(1_000_000_000)) }
use crate::*; use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; /// Holds a set of reusable objects for different aspects of the OAS. /// All objects defined within the components object will have no effect /// on the API unless they are explicitly referenced from properties /// outside the components object. #[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Components { /// An object to hold reusable Security Scheme Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub security_schemes: BTreeMap<String, ReferenceOr<SecurityScheme>>, /// An object to hold reusable Response Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub responses: BTreeMap<String, ReferenceOr<Response>>, /// An object to hold reusable Parameter Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub parameters: BTreeMap<String, ReferenceOr<Parameter>>, /// An object to hold reusable Example Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub examples: BTreeMap<String, ReferenceOr<Example>>, /// An object to hold reusable Request Body Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub request_bodies: BTreeMap<String, ReferenceOr<RequestBody>>, /// An object to hold reusable Header Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub headers: BTreeMap<String, ReferenceOr<Header>>, /// An object to hold reusable Schema Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub schemas: BTreeMap<String, ReferenceOr<Schema>>, /// An object to hold reusable Link Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub links: BTreeMap<String, ReferenceOr<Link>>, /// An object to hold reusable Callback Objects. #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] pub callbacks: BTreeMap<String, ReferenceOr<Callback>>, }
use std::ops::Deref; use thiserror::Error; #[cfg(feature = "passthrough-decoder")] mod passthrough_decoder; #[cfg(feature = "passthrough-decoder")] pub use passthrough_decoder::PassthroughDecoder; mod symphonia_decoder; pub use symphonia_decoder::SymphoniaDecoder; #[derive(Error, Debug)] pub enum DecoderError { #[error("Passthrough Decoder Error: {0}")] PassthroughDecoder(String), #[error("Symphonia Decoder Error: {0}")] SymphoniaDecoder(String), } pub type DecoderResult<T> = Result<T, DecoderError>; #[derive(Error, Debug)] pub enum AudioPacketError { #[error("Decoder Raw Error: Can't return Raw on Samples")] Raw, #[error("Decoder Samples Error: Can't return Samples on Raw")] Samples, } pub type AudioPacketResult<T> = Result<T, AudioPacketError>; pub enum AudioPacket { Samples(Vec<f64>), Raw(Vec<u8>), } impl AudioPacket { pub fn samples(&self) -> AudioPacketResult<&[f64]> { match self { AudioPacket::Samples(s) => Ok(s), AudioPacket::Raw(_) => Err(AudioPacketError::Raw), } } pub fn raw(&self) -> AudioPacketResult<&[u8]> { match self { AudioPacket::Raw(d) => Ok(d), AudioPacket::Samples(_) => Err(AudioPacketError::Samples), } } pub fn is_empty(&self) -> bool { match self { AudioPacket::Samples(s) => s.is_empty(), AudioPacket::Raw(d) => d.is_empty(), } } } #[derive(Debug, Clone)] pub struct AudioPacketPosition { pub position_ms: u32, pub skipped: bool, } impl Deref for AudioPacketPosition { type Target = u32; fn deref(&self) -> &Self::Target { &self.position_ms } } pub trait AudioDecoder { fn seek(&mut self, position_ms: u32) -> Result<u32, DecoderError>; fn next_packet(&mut self) -> DecoderResult<Option<(AudioPacketPosition, AudioPacket)>>; } impl From<DecoderError> for librespot_core::error::Error { fn from(err: DecoderError) -> Self { librespot_core::error::Error::aborted(err) } } impl From<symphonia::core::errors::Error> for DecoderError { fn from(err: symphonia::core::errors::Error) -> Self { Self::SymphoniaDecoder(err.to_string()) } }
use std::io::prelude::*; use std::fs::File; use std::path; use chrono::{DateTime, Utc}; use clap::App; use ureq; const DEFAULT_DIR: &str = "/mnt/data"; const BASE_URL: &str = "https://dataplane.org"; const ENDPOINTS: (&str, &str, &str) = ( "/sshpwauth.txt", "/telnetlogin.txt", "/vncrfb.txt", ); #[derive(Debug, Clone)] enum Category { Ssh, Telnet, Vnc, } impl ToString for Category { fn to_string(&self) -> String { let s = match self { Category::Ssh => "ssh", Category::Telnet => "telnet", Category::Vnc => "vnc", }; String::from(s) } } #[derive(Debug)] struct Meta { ts: DateTime<Utc>, origin: String, origin_id: u64, category: Category, } impl Meta { fn new(category: Category) -> Meta { Meta { ts: Utc::now(), origin: String::from("dataplane"), origin_id: 1, category, } } } #[derive(Debug)] struct Data { body: String, meta: Meta, } impl Data { fn new(body: String, meta: Meta) -> Data { Data { body, meta, } } } fn get_url(c: &Category) -> String { match c { Category::Ssh => String::from(BASE_URL) + ENDPOINTS.0, Category::Telnet => String::from(BASE_URL) + ENDPOINTS.1, Category::Vnc => String::from(BASE_URL) + ENDPOINTS.2, } } fn get_filename(dir: &String, c: &Category, ts: &DateTime<Utc>) -> String { let mut path = String::from(dir); if path.chars().last().unwrap() != '/' { path += "/"; } let path = path + &c.to_string() + "/dataplane_01_" + &ts.timestamp().to_string() + ".log"; path } fn fetch(c: Category) -> Result<Data, ureq::Error> { let meta = Meta::new(c); let url = get_url(&meta.category); let body: String = ureq::get(&url) .call()? .into_string()?; Ok(Data::new(body, meta)) } fn write_f(dir: &String, data: Data) -> std::io::Result<String> { let path = get_filename(dir, &data.meta.category, &data.meta.ts); // Create dir let prefix = path::Path::new(&path).parent().unwrap(); std::fs::create_dir_all(prefix)?; let mut f = File::create(path.clone())?; f.write(data.body.as_bytes())?; Ok(path) } fn main() { let args = App::new("fetch_dp") .version("0.1.0") .about("Fetch data feeds of dataplane.org") .author("Max Resing") .arg("-o, --output=[DIR] 'Define an output directory'") .arg("-v, --verbose 'Set the output to verbose'") .get_matches(); let verbose = args.is_present("verbose"); let dir = args.value_of("output").unwrap_or(DEFAULT_DIR); let dir = String::from(dir); if verbose { println!("Check if output directory exists...") } if !path::Path::new(&dir).is_dir() { println!("Given output directory '{}' is not a directory. Exiting", dir); return; } let cats = vec![Category::Ssh, Category::Telnet, Category::Vnc]; for cat in &cats { let c = cat.clone(); if verbose { println!("Fetching data for {:?}", cat); } let res = fetch(c); let res = match res { Ok(data) => data, Err(error) => { panic!("Problem fetching the file: {:?}", error) }, }; if verbose { println!("Writing data of feed to file: {:?}", cat); } match write_f(&dir, res) { Ok(f) => { if verbose { println!("Successfully wrote data to {}", f); } }, Err(error) => { println!("Problem writing the file: {:?}", error) }, } } if verbose { println!("Done") } }
use super::{Server, ServerResult}; use actix_web::{ http::header, middleware::cors::Cors, pred, server::{HttpHandler, HttpHandlerTask}, App, }; use std::{net::SocketAddr, path::PathBuf, sync::Arc}; pub struct ServerBuilder { pkcs12: Option<PathBuf>, address: SocketAddr, prefix: Arc<String>, handlers: Vec<Box<Fn() -> Box<HttpHandler<Task = Box<HttpHandlerTask>>> + Send + Sync>>, } impl ServerBuilder { pub fn new(pkcs12: Option<PathBuf>, address: SocketAddr, prefix: impl Into<String>) -> Self { Self { pkcs12, address, prefix: Arc::new(prefix.into()), handlers: vec![], } .add_handler(create_options_handler()) } /// Warning! App will consume every request which passes filtering and matches prefix. /// The consumed request will not be passed to other handlers, so make sure that app /// consumes only request, which are not valid for other handlers. /// The prefix passed to a handler function must be added on the beginning of an app prefix. pub fn add_handler<F, S: 'static>(mut self, handler: F) -> Self where F: Fn(&str) -> App<S> + Send + Sync + Clone + 'static, { let prefix = self.prefix.clone(); let wrapped_handler = move || { handler(&*prefix) .middleware(create_cors_middleware()) .boxed() }; self.handlers.push(Box::new(wrapped_handler)); self } pub fn build(self) -> ServerResult<Server> { let handlers = Arc::new(self.handlers); let multi_handler = move || handlers.iter().map(|handler| handler()).collect::<Vec<_>>(); Server::start(self.pkcs12, self.address, multi_handler) } } fn create_options_handler() -> impl Fn(&str) -> App<()> + Send + Sync + Clone + 'static { |prefix| App::new().filter(pred::Options()).prefix(prefix) } fn create_cors_middleware() -> Cors { Cors::build() .send_wildcard() .allowed_header(header::CONTENT_TYPE) .finish() }
use std::collections::HashMap; use std::env; use std::error::Error; use std::fmt; use chrono::{DateTime, Utc}; use dynomite::{ attr_map, dynamodb::{DynamoDb, DynamoDbClient, GetItemInput, PutItemInput}, AttributeValue, Attributes, FromAttributes, Item, }; use nanoid::nanoid; use serde::{Deserialize, Serialize}; #[derive(Attributes, Debug, Serialize, Deserialize, Clone)] pub struct Player { pub name: String, pub connection_id: String, } pub type LobbyId = String; #[derive(Item, Debug, Serialize, Deserialize, Clone)] pub struct Lobby { #[dynomite(partition_key)] pub id: LobbyId, pub created_at: DateTime<Utc>, pub updated_at: DateTime<Utc>, pub players: Vec<Player>, } #[derive(Debug)] struct LobbyServiceError { details: String, } impl LobbyServiceError { fn new(msg: &str) -> LobbyServiceError { LobbyServiceError { details: msg.to_string(), } } } impl fmt::Display for LobbyServiceError { fn fmt( &self, f: &mut fmt::Formatter, ) -> fmt::Result { write!(f, "{}", self.details) } } impl Error for LobbyServiceError { fn description(&self) -> &str { &self.details } } pub struct LobbyService; impl LobbyService { pub async fn create( ddb: &DynamoDbClient, now: &DateTime<Utc>, host_name: &String, connection_id: &String, ) -> Result<Lobby, Box<dyn std::error::Error + Sync + Send + 'static>> { log::info!("Create: {}", host_name); let mut players = Vec::new(); players.push(Player { name: host_name.to_string(), connection_id: connection_id.to_string(), }); let lobby_code_alphabet: [char; 36] = [ '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', ]; let lobby_code = nanoid!(4, &lobby_code_alphabet); let lobby = Lobby { id: lobby_code, created_at: now.clone(), updated_at: now.clone(), players, }; log::info!("Lobby: {:?}", lobby); LobbyRepo::put(ddb, &lobby).await?; return Ok(lobby); } pub async fn join( ddb: &DynamoDbClient, now: &DateTime<Utc>, lobby_id: &LobbyId, player_name: &String, connection_id: &String, ) -> Result<Lobby, Box<dyn std::error::Error + Sync + Send + 'static>> { log::info!("Join: {} {}", lobby_id, player_name); let maybe_lobby = LobbyRepo::get(ddb, lobby_id).await?; log::info!("LobbyService::join get result: {:?}", &maybe_lobby); let current_lobby = maybe_lobby .ok_or(Box::new(LobbyServiceError::new("Could not get Lobby"))) .unwrap(); let mut modified_lobby = current_lobby.clone(); modified_lobby.players.push(Player { name: player_name.to_string(), connection_id: connection_id.to_string(), }); modified_lobby.updated_at = now.clone(); let new_lobby = LobbyRepo::update( ddb, &current_lobby.id, &current_lobby.updated_at, &modified_lobby, ) .await?; return Ok(new_lobby); } pub async fn get( ddb: &DynamoDbClient, lobby_id: &LobbyId, ) -> Result<Lobby, Box<dyn std::error::Error + Sync + Send + 'static>> { log::info!("Get: {}", lobby_id); let lobby = LobbyRepo::get(ddb, lobby_id) .await? .expect("Could not get lobby"); log::info!("LobbyService::get result: {:?}", &lobby); return Ok(lobby); } } struct LobbyRepo; impl LobbyRepo { /// Retrieve a single Lobby by Id. pub async fn get( ddb: &DynamoDbClient, lobby_code: &LobbyId, ) -> Result<Option<Lobby>, Box<dyn std::error::Error + Sync + Send + 'static>> { let table_name = env::var("tableName")?; let maybe_lobby = ddb .get_item(GetItemInput { table_name: table_name.clone(), key: { let mut x = HashMap::new(); x.insert( "id".to_owned(), AttributeValue { s: Some(lobby_code.to_string()), ..AttributeValue::default() }, ); x }, ..GetItemInput::default() }) .await? .item .map(|attrs| Lobby::from_attrs(attrs)) .transpose()?; log::info!("LobbyRepo::get result: {:?}", &maybe_lobby); return Ok(maybe_lobby); } /// Put new Lobby into a table. pub async fn put( ddb: &DynamoDbClient, lobby: &Lobby, ) -> Result<(), Box<dyn std::error::Error + Sync + Send + 'static>> { let item = lobby.clone().into(); let table_name = env::var("tableName")?; let result = ddb .put_item(PutItemInput { table_name: table_name.clone(), item, ..PutItemInput::default() }) .await?; log::info!("LobbyRepo::put result: {:?}", result); return Ok(()); } /// Update an existing Lobby. /// /// This function uses the `timestamp` of the passed lobby as the most recent timestamp and the /// function will error if the table has a different value. pub async fn update( ddb: &DynamoDbClient, lobby_id: &LobbyId, previous_updated_at: &DateTime<Utc>, lobby: &Lobby, ) -> Result<Lobby, Box<dyn std::error::Error + Sync + Send + 'static>> { let item = lobby.clone().into(); let table_name = env::var("tableName")?; log::debug!( "LobbyRepo::update updated_at ({}), previous ({})", &lobby.updated_at, previous_updated_at, ); let result = ddb .put_item(PutItemInput { table_name: table_name.clone(), condition_expression: Some("updated_at = :previousUpdatedAt".to_string()), expression_attribute_values: Some( attr_map! { ":previousUpdatedAt" => previous_updated_at }, ), item, ..PutItemInput::default() }) .await?; log::info!("LobbyRepo::update result: {:?}", result); Ok(LobbyRepo::get(ddb, lobby_id) .await? .expect("Could not get the record that was just updated")) } }
use super::data::{ServerData}; use std::sync::{Arc, Mutex}; use super::network::{ Stream, encode::stream_to_raw, packet::Packet }; // Starts ping thread A in a new thread and consumes active thread for ping thread B. These threads // are used to maintain connections to clients and check in on them. pub fn start_ping_threads(handle: Arc<Mutex<ServerData>>) -> ! { // double our data handle let tmp_handle = Arc::clone(&handle); std::thread::spawn(move || { ping_thread_a(tmp_handle); }); // ping_thread_b returns ! so we don't have to do anything else ping_thread_b(handle); } /// Ping thread A sends repeated pings to every client listed as connected to the server. fn ping_thread_a(handle: Arc<Mutex<ServerData>>) -> ! { loop { // Grab the server data as a clone, and drop the handle because we don't need it anymore. let tmp_data = handle.lock().unwrap(); let data = tmp_data.clone(); drop(tmp_data); let mut loc = 0; // for ever connection for ip in data.clone().get_connections() { // send ping ip.send( stream_to_raw( Stream::new_with_packets(vec![ Packet::PingClient(loc) ]) ) ).unwrap_or_else(|e| { println!("WARN: unable to ping client: `{}`", e); }); loc += 1; } // Rest thread before next iteration to not use 100% of thread additionally, don't ping the // client infinite times per second, clogging up the client's inbound packets. std::thread::sleep(std::time::Duration::from_millis(250)); } } /// Ping thread B checks incoming packets and disconnects idle or unresponsive clients. (no pong) fn ping_thread_b(handle: Arc<Mutex<ServerData>>) -> ! { loop { // Grab the server data. Keep the handled data because we need to change it. Other threads // resume when data is dropped, either at the end of the loop or earlier if possible. let mut data = handle.lock().unwrap(); // for every unprocessed packet in the last two seconds, grab and store the usid and server // id if it's related to ping/pong processes. let mut clientpairs: Vec<(usize, usize)> = vec![]; for i in data.packets.clone().get_pings_after(std::time::SystemTime::now() - std::time::Duration::from_secs(2)) { match i.packet { Packet::PongClient(a, b) => { clientpairs.push((a, b)); }, _ => {} } } for i in 0..data.clone().amount_connected() { let mut passed = false; for j in clientpairs.clone() { if j.1 == i { passed = true; } } if !passed { // if we remove a connection we have to immediately break and pause for 5 seconds // because this changes indexes invalidating packets for a short while println!("Disconnecting a client due to inactivity."); data.remove_connection(i); break; } } // clear out old packets to prevent infinite ram expansion data.packets.pings = vec![]; // Rest thread before next iteration to not use 100% of thread. Drop data beforehand so that // other threads can use it. drop(data); std::thread::sleep(std::time::Duration::from_millis(5000)); } }
/*! Implementation of [the WTF-8 encoding](https://simonsapin.github.io/wtf-8/). This library uses Rust’s type system to maintain [well-formedness](https://simonsapin.github.io/wtf-8/#well-formed), like the `String` and `&str` types do for UTF-8. Since [WTF-8 must not be used for interchange](https://simonsapin.github.io/wtf-8/#intended-audience), this library deliberately does not provide access to the underlying bytes of WTF-8 strings, nor can it decode WTF-8 from arbitrary bytes. WTF-8 strings can be obtained from UTF-8, UTF-16, or code points. */ #![no_std] extern crate alloc; use alloc::borrow::Cow; use alloc::string::String; use alloc::vec::Vec; use core::str; use core::cmp::Ordering; use core::fmt; use core::hash; use core::iter::{FromIterator, IntoIterator}; use core::mem::transmute; use core::ops::Deref; use core::slice; mod not_quite_std; static UTF8_REPLACEMENT_CHARACTER: &'static [u8] = b"\xEF\xBF\xBD"; /// A Unicode code point: from U+0000 to U+10FFFF. /// /// Compare with the `char` type, /// which represents a Unicode scalar value: /// a code point that is not a surrogate (U+D800 to U+DFFF). #[derive(Eq, PartialEq, Ord, PartialOrd, Clone)] pub struct CodePoint { value: u32 } impl Copy for CodePoint {} /// Format the code point as `U+` followed by four to six hexadecimal digits. /// Example: `U+1F4A9` impl fmt::Debug for CodePoint { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(formatter, "U+{:04X}", self.value) } } impl CodePoint { /// Unsafely create a new `CodePoint` without checking the value. /// /// Only use when `value` is known to be less than or equal to 0x10FFFF. #[inline] pub unsafe fn from_u32_unchecked(value: u32) -> CodePoint { CodePoint { value: value } } /// Create a new `CodePoint` if the value is a valid code point. /// /// Return `None` if `value` is above 0x10FFFF. #[inline] pub fn from_u32(value: u32) -> Option<CodePoint> { match value { 0 ..= 0x10FFFF => Some(CodePoint { value: value }), _ => None } } /// Create a new `CodePoint` from a `char`. /// /// Since all Unicode scalar values are code points, this always succeds. #[inline] pub fn from_char(value: char) -> CodePoint { CodePoint { value: value as u32 } } /// Return the numeric value of the code point. #[inline] pub fn to_u32(&self) -> u32 { self.value } /// Optionally return a Unicode scalar value for the code point. /// /// Return `None` if the code point is a surrogate (from U+D800 to U+DFFF). #[inline] pub fn to_char(&self) -> Option<char> { match self.value { 0xD800 ..= 0xDFFF => None, _ => Some(unsafe { transmute(self.value) }) } } /// Return a Unicode scalar value for the code point. /// /// Return `'\u{FFFD}'` (the replacement character “�”) /// if the code point is a surrogate (from U+D800 to U+DFFF). #[inline] pub fn to_char_lossy(&self) -> char { self.to_char().unwrap_or('\u{FFFD}') } } /// An owned, growable string of well-formed WTF-8 data. /// /// Similar to `String`, but can additionally contain surrogate code points /// if they’re not in a surrogate pair. #[derive(Eq, PartialEq, Ord, PartialOrd, Clone)] pub struct Wtf8Buf { bytes: Vec<u8> } impl Deref for Wtf8Buf { type Target = Wtf8; fn deref(&self) -> &Wtf8 { unsafe { transmute(&*self.bytes) } } } /// Format the string with double quotes, /// and surrogates as `\u` followed by four hexadecimal digits. /// Example: `"a\u{D800}"` for a string with code points [U+0061, U+D800] impl fmt::Debug for Wtf8Buf { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { Wtf8::fmt(self, formatter) } } impl Wtf8Buf { /// Create an new, empty WTF-8 string. #[inline] pub fn new() -> Wtf8Buf { Wtf8Buf { bytes: Vec::new() } } /// Create an new, empty WTF-8 string with pre-allocated capacity for `n` bytes. #[inline] pub fn with_capacity(n: usize) -> Wtf8Buf { Wtf8Buf { bytes: Vec::with_capacity(n) } } /// Create a WTF-8 string from an UTF-8 `String`. /// /// This takes ownership of the `String` and does not copy. /// /// Since WTF-8 is a superset of UTF-8, this always succeeds. #[inline] pub fn from_string(string: String) -> Wtf8Buf { Wtf8Buf { bytes: string.into_bytes() } } /// Create a WTF-8 string from an UTF-8 `&str` slice. /// /// This copies the content of the slice. /// /// Since WTF-8 is a superset of UTF-8, this always succeeds. #[inline] pub fn from_str(str: &str) -> Wtf8Buf { Wtf8Buf { bytes: str.as_bytes().to_vec() } } /// Create a WTF-8 string from a potentially ill-formed UTF-16 slice of 16-bit code units. /// /// This is lossless: calling `.to_ill_formed_utf16()` on the resulting string /// will always return the original code units. pub fn from_ill_formed_utf16(v: &[u16]) -> Wtf8Buf { let mut string = Wtf8Buf::with_capacity(v.len()); for item in not_quite_std::decode_utf16(v.iter().cloned()) { match item { Ok(c) => string.push_char(c), Err(s) => { // Surrogates are known to be in the code point range. let code_point = unsafe { CodePoint::from_u32_unchecked(s as u32) }; // Skip the WTF-8 concatenation check, // surrogate pairs are already decoded by utf16_items not_quite_std::push_code_point(&mut string, code_point) } } } string } /// Reserves capacity for at least `additional` more bytes to be inserted /// in the given `Wtf8Buf`. /// The collection may reserve more space to avoid frequent reallocations. /// /// # Panics /// /// Panics if the new capacity overflows `usize`. /// /// # Example /// /// ``` /// let mut s = Wtf8Buf::new(); /// s.reserve(10); /// assert!(s.capacity() >= 10); /// ``` #[inline] pub fn reserve(&mut self, additional: usize) { self.bytes.reserve(additional) } /// Returns the number of bytes that this string buffer can hold without reallocating. /// /// # Example /// /// ``` /// let s = Wtf8Buf::with_capacity(10); /// assert!(s.capacity() >= 10); /// ``` #[inline] pub fn capacity(&self) -> usize { self.bytes.capacity() } /// Append an UTF-8 slice at the end of the string. #[inline] pub fn push_str(&mut self, other: &str) { self.bytes.extend_from_slice(other.as_bytes()) } /// Append a WTF-8 slice at the end of the string. /// /// This replaces newly paired surrogates at the boundary /// with a supplementary code point, /// like concatenating ill-formed UTF-16 strings effectively would. #[inline] pub fn push_wtf8(&mut self, other: &Wtf8) { match ((&*self).final_lead_surrogate(), other.initial_trail_surrogate()) { // Replace newly paired surrogates by a supplementary code point. (Some(lead), Some(trail)) => { let len_without_lead_surrogate = self.len() - 3; self.bytes.truncate(len_without_lead_surrogate); let other_without_trail_surrogate = &other.bytes[3..]; // 4 bytes for the supplementary code point self.bytes.reserve(4 + other_without_trail_surrogate.len()); self.push_char(decode_surrogate_pair(lead, trail)); self.bytes.extend_from_slice(other_without_trail_surrogate); } _ => self.bytes.extend_from_slice(&other.bytes) } } /// Append a Unicode scalar value at the end of the string. #[inline] pub fn push_char(&mut self, c: char) { not_quite_std::push_code_point(self, CodePoint::from_char(c)) } /// Append a code point at the end of the string. /// /// This replaces newly paired surrogates at the boundary /// with a supplementary code point, /// like concatenating ill-formed UTF-16 strings effectively would. #[inline] pub fn push(&mut self, code_point: CodePoint) { match code_point.to_u32() { trail @ 0xDC00..=0xDFFF => { match (&*self).final_lead_surrogate() { Some(lead) => { let len_without_lead_surrogate = self.len() - 3; self.bytes.truncate(len_without_lead_surrogate); self.push_char(decode_surrogate_pair(lead, trail as u16)); return } _ => {} } } _ => {} } // No newly paired surrogates at the boundary. not_quite_std::push_code_point(self, code_point) } /// Shortens a string to the specified length. /// /// # Failure /// /// Fails if `new_len` > current length, /// or if `new_len` is not a code point boundary. #[inline] pub fn truncate(&mut self, new_len: usize) { assert!(not_quite_std::is_code_point_boundary(self, new_len)); self.bytes.truncate(new_len) } /// Consume the WTF-8 string and try to convert it to UTF-8. /// /// This does not copy the data. /// /// If the contents are not well-formed UTF-8 /// (that is, if the string contains surrogates), /// the original WTF-8 string is returned instead. pub fn into_string(self) -> Result<String, Wtf8Buf> { match self.next_surrogate(0) { None => Ok(unsafe { String::from_utf8_unchecked(self.bytes) }), Some(_) => Err(self), } } /// Consume the WTF-8 string and convert it lossily to UTF-8. /// /// This does not copy the data (but may overwrite parts of it in place). /// /// Surrogates are replaced with `"\u{FFFD}"` (the replacement character “�”) pub fn into_string_lossy(mut self) -> String { let mut pos = 0; loop { match self.next_surrogate(pos) { Some((surrogate_pos, _)) => { pos = surrogate_pos + 3; self.bytes[surrogate_pos..pos].copy_from_slice(UTF8_REPLACEMENT_CHARACTER); }, None => return unsafe { String::from_utf8_unchecked(self.bytes) } } } } } /// Create a new WTF-8 string from an iterator of code points. /// /// This replaces surrogate code point pairs with supplementary code points, /// like concatenating ill-formed UTF-16 strings effectively would. impl FromIterator<CodePoint> for Wtf8Buf { fn from_iter<T: IntoIterator<Item = CodePoint>>(iterable: T) -> Wtf8Buf { let mut string = Wtf8Buf::new(); string.extend(iterable); string } } /// Append code points from an iterator to the string. /// /// This replaces surrogate code point pairs with supplementary code points, /// like concatenating ill-formed UTF-16 strings effectively would. impl Extend<CodePoint> for Wtf8Buf { fn extend<T: IntoIterator<Item = CodePoint>>(&mut self, iterable: T) { let iterator = iterable.into_iter(); let (low, _high) = iterator.size_hint(); // Lower bound of one byte per code point (ASCII only) self.bytes.reserve(low); for code_point in iterator { self.push(code_point); } } } /// A borrowed slice of well-formed WTF-8 data. /// /// Similar to `&str`, but can additionally contain surrogate code points /// if they’re not in a surrogate pair. pub struct Wtf8 { bytes: [u8] } // FIXME: https://github.com/rust-lang/rust/issues/18805 impl PartialEq for Wtf8 { fn eq(&self, other: &Wtf8) -> bool { self.bytes.eq(&other.bytes) } } // FIXME: https://github.com/rust-lang/rust/issues/18805 impl Eq for Wtf8 {} // FIXME: https://github.com/rust-lang/rust/issues/18738 impl PartialOrd for Wtf8 { #[inline] fn partial_cmp(&self, other: &Wtf8) -> Option<Ordering> { self.bytes.partial_cmp(&other.bytes) } #[inline] fn lt(&self, other: &Wtf8) -> bool { self.bytes.lt(&other.bytes) } #[inline] fn le(&self, other: &Wtf8) -> bool { self.bytes.le(&other.bytes) } #[inline] fn gt(&self, other: &Wtf8) -> bool { self.bytes.gt(&other.bytes) } #[inline] fn ge(&self, other: &Wtf8) -> bool { self.bytes.ge(&other.bytes) } } // FIXME: https://github.com/rust-lang/rust/issues/18738 impl Ord for Wtf8 { #[inline] fn cmp(&self, other: &Wtf8) -> Ordering { self.bytes.cmp(&other.bytes) } } /// Format the slice with double quotes, /// and surrogates as `\u` followed by four hexadecimal digits. /// Example: `"a\u{D800}"` for a slice with code points [U+0061, U+D800] impl fmt::Debug for Wtf8 { fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> { formatter.write_str("\"")?; let mut pos = 0; loop { match self.next_surrogate(pos) { None => break, Some((surrogate_pos, surrogate)) => { formatter.write_str(unsafe { str::from_utf8_unchecked(&self.bytes[pos..surrogate_pos]) })?; write!(formatter, "\\u{{{:X}}}", surrogate)?; pos = surrogate_pos + 3; } } } formatter.write_str(unsafe { str::from_utf8_unchecked(&self.bytes[pos..]) })?; formatter.write_str("\"") } } impl Wtf8 { /// Create a WTF-8 slice from a UTF-8 `&str` slice. /// /// Since WTF-8 is a superset of UTF-8, this always succeeds. #[inline] pub fn from_str(value: &str) -> &Wtf8 { unsafe { transmute(value.as_bytes()) } } /// Return the length, in WTF-8 bytes. #[inline] pub fn len(&self) -> usize { self.bytes.len() } /// Return a slice of the given string for the byte range [`begin`..`end`). /// /// # Failure /// /// Fails when `begin` and `end` do not point to code point boundaries, /// or point beyond the end of the string. #[inline] pub fn slice(&self, begin: usize, end: usize) -> &Wtf8 { // is_code_point_boundary checks that the index is in [0, .len()] if begin <= end && not_quite_std::is_code_point_boundary(self, begin) && not_quite_std::is_code_point_boundary(self, end) { unsafe { not_quite_std::slice_unchecked(self, begin, end) } } else { not_quite_std::slice_error_fail(self, begin, end) } } /// Return a slice of the given string from byte `begin` to its end. /// /// # Failure /// /// Fails when `begin` is not at a code point boundary, /// or is beyond the end of the string. #[inline] pub fn slice_from(&self, begin: usize) -> &Wtf8 { // is_code_point_boundary checks that the index is in [0, .len()] if not_quite_std::is_code_point_boundary(self, begin) { unsafe { not_quite_std::slice_unchecked(self, begin, self.len()) } } else { not_quite_std::slice_error_fail(self, begin, self.len()) } } /// Return a slice of the given string from its beginning to byte `end`. /// /// # Failure /// /// Fails when `end` is not at a code point boundary, /// or is beyond the end of the string. #[inline] pub fn slice_to(&self, end: usize) -> &Wtf8 { // is_code_point_boundary checks that the index is in [0, .len()] if not_quite_std::is_code_point_boundary(self, end) { unsafe { not_quite_std::slice_unchecked(self, 0, end) } } else { not_quite_std::slice_error_fail(self, 0, end) } } /// Return the code point at `position` if it is in the ASCII range, /// or `b'\xFF' otherwise. /// /// # Failure /// /// Fails if `position` is beyond the end of the string. #[inline] pub fn ascii_byte_at(&self, position: usize) -> u8 { match self.bytes[position] { ascii_byte @ 0x00 ..= 0x7F => ascii_byte, _ => 0xFF } } /// Return an iterator for the string’s code points. #[inline] pub fn code_points(&self) -> Wtf8CodePoints { Wtf8CodePoints { bytes: self.bytes.iter() } } /// Try to convert the string to UTF-8 and return a `&str` slice. /// /// Return `None` if the string contains surrogates. /// /// This does not copy the data. #[inline] pub fn as_str(&self) -> Option<&str> { // Well-formed WTF-8 is also well-formed UTF-8 // if and only if it contains no surrogate. match self.next_surrogate(0) { None => Some(unsafe { str::from_utf8_unchecked(&self.bytes) }), Some(_) => None, } } /// Lossily convert the string to UTF-8. /// Return an UTF-8 `&str` slice if the contents are well-formed in UTF-8. /// /// Surrogates are replaced with `"\u{FFFD}"` (the replacement character “�”). /// /// This only copies the data if necessary (if it contains any surrogate). pub fn to_string_lossy(&self) -> Cow<str> { let surrogate_pos = match self.next_surrogate(0) { None => return Cow::Borrowed(unsafe { str::from_utf8_unchecked(&self.bytes) }), Some((pos, _)) => pos, }; let wtf8_bytes = &self.bytes; let mut utf8_bytes = Vec::with_capacity(self.len()); utf8_bytes.extend_from_slice(&wtf8_bytes[..surrogate_pos]); utf8_bytes.extend_from_slice(UTF8_REPLACEMENT_CHARACTER); let mut pos = surrogate_pos + 3; loop { match self.next_surrogate(pos) { Some((surrogate_pos, _)) => { utf8_bytes.extend_from_slice(&wtf8_bytes[pos..surrogate_pos]); utf8_bytes.extend_from_slice(UTF8_REPLACEMENT_CHARACTER); pos = surrogate_pos + 3; }, None => { utf8_bytes.extend_from_slice(&wtf8_bytes[pos..]); return Cow::Owned(unsafe { String::from_utf8_unchecked(utf8_bytes) }) } } } } /// Convert the WTF-8 string to potentially ill-formed UTF-16 /// and return an iterator of 16-bit code units. /// /// This is lossless: /// calling `Wtf8Buf::from_ill_formed_utf16` on the resulting code units /// would always return the original WTF-8 string. #[inline] pub fn to_ill_formed_utf16(&self) -> IllFormedUtf16CodeUnits { IllFormedUtf16CodeUnits { code_points: self.code_points(), extra: 0 } } #[inline] fn next_surrogate(&self, mut pos: usize) -> Option<(usize, u16)> { let mut iter = self.bytes[pos..].iter(); loop { let b = match iter.next() { None => return None, Some(&b) => b, }; if b < 0x80 { pos += 1; } else if b < 0xE0 { iter.next(); pos += 2; } else if b == 0xED { match (iter.next(), iter.next()) { (Some(&b2), Some(&b3)) if b2 >= 0xA0 => { return Some((pos, decode_surrogate(b2, b3))) } _ => pos += 3 } } else if b < 0xF0 { iter.next(); iter.next(); pos += 3; } else { iter.next(); iter.next(); iter.next(); pos += 4; } } } #[inline] fn final_lead_surrogate(&self) -> Option<u16> { let len = self.len(); if len < 3 { return None } let seq = &self.bytes[len - 3..]; if seq[0] == 0xED && 0xA0 <= seq[1] && seq[1] <= 0xAF { Some(decode_surrogate(seq[1], seq[2])) } else { None } } #[inline] fn initial_trail_surrogate(&self) -> Option<u16> { let len = self.len(); if len < 3 { return None } let seq = &self.bytes[..3]; if seq[0] == 0xED && 0xB0 <= seq[1] && seq[1] <= 0xBF { Some(decode_surrogate(seq[1], seq[2])) } else { None } } } #[inline] fn decode_surrogate(second_byte: u8, third_byte: u8) -> u16 { // The first byte is assumed to be 0xED 0xD800 | (second_byte as u16 & 0x3F) << 6 | third_byte as u16 & 0x3F } #[inline] fn decode_surrogate_pair(lead: u16, trail: u16) -> char { let code_point = 0x10000 + (((lead as u32 - 0xD800) << 10) | (trail as u32 - 0xDC00)); unsafe { transmute(code_point) } } /// Iterator for the code points of a WTF-8 string. /// /// Created with the method `.code_points()`. #[derive(Clone)] pub struct Wtf8CodePoints<'a> { bytes: slice::Iter<'a, u8> } impl<'a> Iterator for Wtf8CodePoints<'a> { type Item = CodePoint; #[inline] fn next(&mut self) -> Option<CodePoint> { match not_quite_std::next_code_point(&mut self.bytes) { None => None, Some(value) => { // Wtf8 invariant says `value` is a valid code point unsafe { Some(CodePoint::from_u32_unchecked(value)) } } } } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let (len, _) = self.bytes.size_hint(); (len.saturating_add(3) / 4, Some(len)) } } #[derive(Clone)] pub struct IllFormedUtf16CodeUnits<'a> { code_points: Wtf8CodePoints<'a>, extra: u16 } impl<'a> Iterator for IllFormedUtf16CodeUnits<'a> { type Item = u16; #[inline] fn next(&mut self) -> Option<u16> { not_quite_std::next_utf16_code_unit(self) } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { let (low, high) = self.code_points.size_hint(); // every code point gets either one u16 or two u16, // so this iterator is between 1 or 2 times as // long as the underlying iterator. (low, high.and_then(|n| n.checked_mul(2))) } } impl<'a> PartialEq<&'a Wtf8> for Wtf8Buf { fn eq(&self, other: &&Wtf8) -> bool { **self == **other } } impl<'a> PartialEq<Wtf8Buf> for &'a Wtf8 { fn eq(&self, other: &Wtf8Buf) -> bool { **self == **other } } impl hash::Hash for CodePoint { #[inline] fn hash<H: hash::Hasher>(&self, state: &mut H) { self.value.hash(state) } } impl hash::Hash for Wtf8Buf { #[inline] fn hash<H: hash::Hasher>(&self, state: &mut H) { Wtf8::hash(self, state) } } impl hash::Hash for Wtf8 { #[inline] fn hash<H: hash::Hasher>(&self, state: &mut H) { state.write(&self.bytes); 0xfeu8.hash(state) } } #[cfg(test)] mod tests { use alloc::format; use alloc::vec; use core::mem::transmute; use super::*; #[test] fn code_point_from_u32() { assert!(CodePoint::from_u32(0).is_some()); assert!(CodePoint::from_u32(0xD800).is_some()); assert!(CodePoint::from_u32(0x10FFFF).is_some()); assert!(CodePoint::from_u32(0x110000).is_none()); } #[test] fn code_point_to_u32() { fn c(value: u32) -> CodePoint { CodePoint::from_u32(value).unwrap() } assert_eq!(c(0).to_u32(), 0); assert_eq!(c(0xD800).to_u32(), 0xD800); assert_eq!(c(0x10FFFF).to_u32(), 0x10FFFF); } #[test] fn code_point_from_char() { assert_eq!(CodePoint::from_char('a').to_u32(), 0x61); assert_eq!(CodePoint::from_char('💩').to_u32(), 0x1F4A9); } #[test] fn code_point_to_string() { assert_eq!(format!("{:?}", CodePoint::from_char('a')), "U+0061"); assert_eq!(format!("{:?}", CodePoint::from_char('💩')), "U+1F4A9"); } #[test] fn code_point_to_char() { fn c(value: u32) -> CodePoint { CodePoint::from_u32(value).unwrap() } assert_eq!(c(0x61).to_char(), Some('a')); assert_eq!(c(0x1F4A9).to_char(), Some('💩')); assert_eq!(c(0xD800).to_char(), None); } #[test] fn code_point_to_char_lossy() { fn c(value: u32) -> CodePoint { CodePoint::from_u32(value).unwrap() } assert_eq!(c(0x61).to_char_lossy(), 'a'); assert_eq!(c(0x1F4A9).to_char_lossy(), '💩'); assert_eq!(c(0xD800).to_char_lossy(), '\u{FFFD}'); } #[test] fn wtf8buf_new() { assert_eq!(Wtf8Buf::new().bytes, b""); } #[test] fn wtf8buf_from_str() { assert_eq!(Wtf8Buf::from_str("").bytes, b""); assert_eq!(Wtf8Buf::from_str("aé 💩").bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); } #[test] fn wtf8buf_from_string() { assert_eq!(Wtf8Buf::from_string(String::from("")).bytes, b""); assert_eq!(Wtf8Buf::from_string(String::from("aé 💩")).bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); } #[test] fn wtf8buf_from_ill_formed_utf16() { assert_eq!(Wtf8Buf::from_ill_formed_utf16(&[]).bytes, b""); assert_eq!(Wtf8Buf::from_ill_formed_utf16( &[0x61, 0xE9, 0x20, 0xD83D, 0xD83D, 0xDCA9]).bytes, b"a\xC3\xA9 \xED\xA0\xBD\xF0\x9F\x92\xA9"); } #[test] fn wtf8buf_push_str() { let mut string = Wtf8Buf::new(); assert_eq!(string.bytes, b""); string.push_str("aé 💩"); assert_eq!(string.bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); } #[test] fn wtf8buf_push_char() { let mut string = Wtf8Buf::from_str("aé "); assert_eq!(string.bytes, b"a\xC3\xA9 "); string.push_char('💩'); assert_eq!(string.bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); } #[test] fn wtf8buf_push() { let mut string = Wtf8Buf::from_str("aé "); assert_eq!(string.bytes, b"a\xC3\xA9 "); string.push(CodePoint::from_char('💩')); assert_eq!(string.bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); fn c(value: u32) -> CodePoint { CodePoint::from_u32(value).unwrap() } let mut string = Wtf8Buf::new(); string.push(c(0xD83D)); // lead string.push(c(0xDCA9)); // trail assert_eq!(string.bytes, b"\xF0\x9F\x92\xA9"); // Magic! let mut string = Wtf8Buf::new(); string.push(c(0xD83D)); // lead string.push(c(0x20)); // not surrogate string.push(c(0xDCA9)); // trail assert_eq!(string.bytes, b"\xED\xA0\xBD \xED\xB2\xA9"); let mut string = Wtf8Buf::new(); string.push(c(0xD800)); // lead string.push(c(0xDBFF)); // lead assert_eq!(string.bytes, b"\xED\xA0\x80\xED\xAF\xBF"); let mut string = Wtf8Buf::new(); string.push(c(0xD800)); // lead string.push(c(0xE000)); // not surrogate assert_eq!(string.bytes, b"\xED\xA0\x80\xEE\x80\x80"); let mut string = Wtf8Buf::new(); string.push(c(0xD7FF)); // not surrogate string.push(c(0xDC00)); // trail assert_eq!(string.bytes, b"\xED\x9F\xBF\xED\xB0\x80"); let mut string = Wtf8Buf::new(); string.push(c(0x61)); // not surrogate, < 3 bytes string.push(c(0xDC00)); // trail assert_eq!(string.bytes, b"\x61\xED\xB0\x80"); let mut string = Wtf8Buf::new(); string.push(c(0xDC00)); // trail assert_eq!(string.bytes, b"\xED\xB0\x80"); } #[test] fn wtf8buf_push_wtf8() { let mut string = Wtf8Buf::from_str("aé"); assert_eq!(string.bytes, b"a\xC3\xA9"); string.push_wtf8(Wtf8::from_str(" 💩")); assert_eq!(string.bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); fn w(value: &[u8]) -> &Wtf8 { unsafe { transmute(value) } } let mut string = Wtf8Buf::new(); string.push_wtf8(w(b"\xED\xA0\xBD")); // lead string.push_wtf8(w(b"\xED\xB2\xA9")); // trail assert_eq!(string.bytes, b"\xF0\x9F\x92\xA9"); // Magic! let mut string = Wtf8Buf::new(); string.push_wtf8(w(b"\xED\xA0\xBD")); // lead string.push_wtf8(w(b" ")); // not surrogate string.push_wtf8(w(b"\xED\xB2\xA9")); // trail assert_eq!(string.bytes, b"\xED\xA0\xBD \xED\xB2\xA9"); let mut string = Wtf8Buf::new(); string.push_wtf8(w(b"\xED\xA0\x80")); // lead string.push_wtf8(w(b"\xED\xAF\xBF")); // lead assert_eq!(string.bytes, b"\xED\xA0\x80\xED\xAF\xBF"); let mut string = Wtf8Buf::new(); string.push_wtf8(w(b"\xED\xA0\x80")); // lead string.push_wtf8(w(b"\xEE\x80\x80")); // not surrogate assert_eq!(string.bytes, b"\xED\xA0\x80\xEE\x80\x80"); let mut string = Wtf8Buf::new(); string.push_wtf8(w(b"\xED\x9F\xBF")); // not surrogate string.push_wtf8(w(b"\xED\xB0\x80")); // trail assert_eq!(string.bytes, b"\xED\x9F\xBF\xED\xB0\x80"); let mut string = Wtf8Buf::new(); string.push_wtf8(w(b"a")); // not surrogate, < 3 bytes string.push_wtf8(w(b"\xED\xB0\x80")); // trail assert_eq!(string.bytes, b"\x61\xED\xB0\x80"); let mut string = Wtf8Buf::new(); string.push_wtf8(w(b"\xED\xB0\x80")); // trail assert_eq!(string.bytes, b"\xED\xB0\x80"); } #[test] fn wtf8buf_truncate() { let mut string = Wtf8Buf::from_str("aé"); string.truncate(1); assert_eq!(string.bytes, b"a"); } #[test] #[should_panic] fn wtf8buf_truncate_fail_code_point_boundary() { let mut string = Wtf8Buf::from_str("aé"); string.truncate(2); } #[test] #[should_panic] fn wtf8buf_truncate_fail_longer() { let mut string = Wtf8Buf::from_str("aé"); string.truncate(4); } #[test] fn wtf8buf_into_string() { let mut string = Wtf8Buf::from_str("aé 💩"); assert_eq!(string.clone().into_string(), Ok(String::from("aé 💩"))); string.push(CodePoint::from_u32(0xD800).unwrap()); assert_eq!(string.clone().into_string(), Err(string)); } #[test] fn wtf8buf_into_string_lossy() { let mut string = Wtf8Buf::from_str("aé 💩"); assert_eq!(string.clone().into_string_lossy(), String::from("aé 💩")); string.push(CodePoint::from_u32(0xD800).unwrap()); assert_eq!(string.clone().into_string_lossy(), String::from("aé 💩�")); } #[test] fn wtf8buf_from_iterator() { fn f(values: &[u32]) -> Wtf8Buf { values.iter().map(|&c| CodePoint::from_u32(c).unwrap()).collect::<Wtf8Buf>() } assert_eq!(f(&[0x61, 0xE9, 0x20, 0x1F4A9]).bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); assert_eq!(f(&[0xD83D, 0xDCA9]).bytes, b"\xF0\x9F\x92\xA9"); // Magic! assert_eq!(f(&[0xD83D, 0x20, 0xDCA9]).bytes, b"\xED\xA0\xBD \xED\xB2\xA9"); assert_eq!(f(&[0xD800, 0xDBFF]).bytes, b"\xED\xA0\x80\xED\xAF\xBF"); assert_eq!(f(&[0xD800, 0xE000]).bytes, b"\xED\xA0\x80\xEE\x80\x80"); assert_eq!(f(&[0xD7FF, 0xDC00]).bytes, b"\xED\x9F\xBF\xED\xB0\x80"); assert_eq!(f(&[0x61, 0xDC00]).bytes, b"\x61\xED\xB0\x80"); assert_eq!(f(&[0xDC00]).bytes, b"\xED\xB0\x80"); } #[test] fn wtf8buf_extend() { fn e(initial: &[u32], extended: &[u32]) -> Wtf8Buf { fn c(value: &u32) -> CodePoint { CodePoint::from_u32(*value).unwrap() } let mut string = initial.iter().map(c).collect::<Wtf8Buf>(); string.extend(extended.iter().map(c)); string } assert_eq!(e(&[0x61, 0xE9], &[0x20, 0x1F4A9]).bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); assert_eq!(e(&[0xD83D], &[0xDCA9]).bytes, b"\xF0\x9F\x92\xA9"); // Magic! assert_eq!(e(&[0xD83D, 0x20], &[0xDCA9]).bytes, b"\xED\xA0\xBD \xED\xB2\xA9"); assert_eq!(e(&[0xD800], &[0xDBFF]).bytes, b"\xED\xA0\x80\xED\xAF\xBF"); assert_eq!(e(&[0xD800], &[0xE000]).bytes, b"\xED\xA0\x80\xEE\x80\x80"); assert_eq!(e(&[0xD7FF], &[0xDC00]).bytes, b"\xED\x9F\xBF\xED\xB0\x80"); assert_eq!(e(&[0x61], &[0xDC00]).bytes, b"\x61\xED\xB0\x80"); assert_eq!(e(&[], &[0xDC00]).bytes, b"\xED\xB0\x80"); } #[test] fn wtf8buf_debug() { let mut string = Wtf8Buf::from_str("aé 💩"); string.push(CodePoint::from_u32(0xD800).unwrap()); assert_eq!(format!("{:?}", string), r#""aé 💩\u{D800}""#); } #[test] fn wtf8buf_as_slice() { assert_eq!(Wtf8Buf::from_str("aé"), Wtf8::from_str("aé")); } #[test] fn wtf8_debug() { let mut string = Wtf8Buf::from_str("aé 💩"); string.push(CodePoint::from_u32(0xD800).unwrap()); assert_eq!(format!("{:?}", &*string), r#""aé 💩\u{D800}""#); } #[test] fn wtf8_from_str() { assert_eq!(&Wtf8::from_str("").bytes, b""); assert_eq!(&Wtf8::from_str("aé 💩").bytes, b"a\xC3\xA9 \xF0\x9F\x92\xA9"); } #[test] fn wtf8_len() { assert_eq!(Wtf8::from_str("").len(), 0); assert_eq!(Wtf8::from_str("aé 💩").len(), 8); } #[test] fn wtf8_slice() { assert_eq!(&Wtf8::from_str("aé 💩").slice(1, 4).bytes, b"\xC3\xA9 "); } #[test] #[should_panic] fn wtf8_slice_not_code_point_boundary() { Wtf8::from_str("aé 💩").slice(2, 4); } #[test] fn wtf8_slice_from() { assert_eq!(&Wtf8::from_str("aé 💩").slice_from(1).bytes, b"\xC3\xA9 \xF0\x9F\x92\xA9"); } #[test] #[should_panic] fn wtf8_slice_from_not_code_point_boundary() { Wtf8::from_str("aé 💩").slice_from(2); } #[test] fn wtf8_slice_to() { assert_eq!(&Wtf8::from_str("aé 💩").slice_to(4).bytes, b"a\xC3\xA9 "); } #[test] #[should_panic] fn wtf8_slice_to_not_code_point_boundary() { Wtf8::from_str("aé 💩").slice_from(5); } #[test] fn wtf8_ascii_byte_at() { let slice = Wtf8::from_str("aé 💩"); assert_eq!(slice.ascii_byte_at(0), b'a'); assert_eq!(slice.ascii_byte_at(1), b'\xFF'); assert_eq!(slice.ascii_byte_at(2), b'\xFF'); assert_eq!(slice.ascii_byte_at(3), b' '); assert_eq!(slice.ascii_byte_at(4), b'\xFF'); } #[test] fn wtf8_code_points() { fn c(value: u32) -> CodePoint { CodePoint::from_u32(value).unwrap() } fn cp(string: &Wtf8Buf) -> Vec<Option<char>> { string.code_points().map(|c| c.to_char()).collect::<Vec<_>>() } let mut string = Wtf8Buf::from_str("é "); assert_eq!(cp(&string), vec![Some('é'), Some(' ')]); string.push(c(0xD83D)); assert_eq!(cp(&string), vec![Some('é'), Some(' '), None]); string.push(c(0xDCA9)); assert_eq!(cp(&string), vec![Some('é'), Some(' '), Some('💩')]); } #[test] fn wtf8_as_str() { assert_eq!(Wtf8::from_str("").as_str(), Some("")); assert_eq!(Wtf8::from_str("aé 💩").as_str(), Some("aé 💩")); let mut string = Wtf8Buf::new(); string.push(CodePoint::from_u32(0xD800).unwrap()); assert_eq!(string.as_str(), None); } #[test] fn wtf8_to_string_lossy() { assert_eq!(Wtf8::from_str("").to_string_lossy(), Cow::Borrowed("")); assert_eq!(Wtf8::from_str("aé 💩").to_string_lossy(), Cow::Borrowed("aé 💩")); let mut string = Wtf8Buf::from_str("aé 💩"); string.push(CodePoint::from_u32(0xD800).unwrap()); assert_eq!(string.to_string_lossy(), { let o: Cow<str> = Cow::Owned(String::from("aé 💩�")); o }); } #[test] fn wtf8_to_ill_formed_utf16() { let mut string = Wtf8Buf::from_str("aé "); string.push(CodePoint::from_u32(0xD83D).unwrap()); string.push_char('💩'); assert_eq!(string.to_ill_formed_utf16().collect::<Vec<_>>(), vec![0x61, 0xE9, 0x20, 0xD83D, 0xD83D, 0xDCA9]); } }
const DEFAULT_PER_PAGE: i64 = 10; const DEFAULT_PAGE: i64 = 1; use actix_web::{error::ResponseError, HttpResponse}; use failure::Fail; use serde::{Deserialize, Serialize}; #[derive(Deserialize, Debug, Clone, Default)] pub struct Params { #[serde(default = "default_page")] pub page: i64, #[serde(default = "default_per_page")] pub per_page: i64, } fn default_page() -> i64 { DEFAULT_PAGE } fn default_per_page() -> i64 { DEFAULT_PER_PAGE } #[derive(Debug, Fail, Serialize, Default)] pub struct ValidationErrors { #[serde(skip_serializing_if = "Vec::is_empty")] page: Vec<String>, #[serde(skip_serializing_if = "Vec::is_empty")] per_page: Vec<String>, } impl std::fmt::Display for ValidationErrors { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "{:?}", self) } } impl ResponseError for ValidationErrors { fn error_response(&self) -> HttpResponse { HttpResponse::BadRequest().json(self) } } impl ValidationErrors { fn is_empty(&self) -> bool { self.page.is_empty() && self.per_page.is_empty() } } #[cfg_attr(test, derive(Debug))] pub struct Data { pub page: i64, pub per_page: i64, } impl Params { pub fn validate(&self) -> Result<Data, ValidationErrors> { let Self { page, per_page } = self; let mut errors = ValidationErrors::default(); if page.is_negative() { errors.page.push("Must be a positive number".to_string()); } if per_page.is_negative() { errors .per_page .push("Must be a positive number".to_string()); } if errors.is_empty() { Ok(Data { page: *page, per_page: *per_page, }) } else { Err(errors) } } } #[cfg(test)] mod tests { use super::*; fn errors_json(params: Params) -> String { serde_json::to_string(&params.validate().unwrap_err()).expect("Failed to convert to json") } #[test] fn it_is_ok_when_valid() { let params = Params { page: 0, per_page: 10 }; assert!(params.validate().is_ok()); } #[test] fn data_is_correct_when_valid() { let params = Params { page: 3, per_page: 10 }; let data = params.validate().expect("is expected to be valid"); assert_eq!(3, data.page); assert_eq!(10, data.per_page); } #[test] fn invalid_when_page_number_is_negative() { let params = Params { page: -1, per_page: 123 }; assert_eq!("{\"page\":[\"Must be a positive number\"]}", errors_json(params)); } #[test] fn invalid_when_per_page_is_negative() { let params = Params { page: 0, per_page: -1 }; assert_eq!("{\"per_page\":[\"Must be a positive number\"]}", errors_json(params)); } }
use std::cmp; use std::ops::{AddAssign, Neg}; use ieee754::Ieee754; use num::{One, Zero}; use openssl::rand::rand_bytes; #[cfg(feature="use-mpfr")] use rug::{Float, rand::{ThreadRandGen, ThreadRandState}}; pub fn fill_bytes(mut buffer: &mut [u8]) -> Fallible<()> { if let Err(e) = rand_bytes(&mut buffer) { fallible!(FailedFunction, "OpenSSL error: {:?}", e) } else { Ok(()) } } use crate::error::Fallible; #[cfg(not(feature="use-mpfr"))] use statrs::function::erf; #[cfg(not(feature="use-mpfr"))] use rand::Rng; #[cfg(feature="use-mpfr")] struct GeneratorOpenSSL; #[cfg(feature="use-mpfr")] impl ThreadRandGen for GeneratorOpenSSL { fn gen(&mut self) -> u32 { let mut buffer = [0u8; 4]; // impossible not to panic here // cannot ignore errors with .ok(), because the buffer will remain 0 fill_bytes(&mut buffer).unwrap(); u32::from_ne_bytes(buffer) } } // SAMPLERS pub trait SampleBernoulli: Sized { fn sample_standard_bernoulli() -> Fallible<Self>; /// Sample a single bit with arbitrary probability of success /// /// Uses only an unbiased source of coin flips. /// The strategy for doing this with 2 flips in expectation is described [here](https://web.archive.org/web/20160418185834/https://amakelov.wordpress.com/2013/10/10/arbitrarily-biasing-a-coin-in-2-expected-tosses/). /// /// # Arguments /// * `prob`- The desired probability of success (bit = 1). /// * `enforce_constant_time` - Whether or not to enforce the algorithm to run in constant time /// /// # Return /// A bit that is 1 with probability "prob" /// /// # Examples /// /// ``` /// // returns a bit with Pr(bit = 1) = 0.7 /// use opendp::samplers::SampleBernoulli; /// let n = bool::sample_bernoulli(0.7, false); /// # use opendp::error::ExplainUnwrap; /// # n.unwrap_test(); /// ``` /// ```should_panic /// // fails because 1.3 not a valid probability /// use opendp::samplers::SampleBernoulli; /// let n = bool::sample_bernoulli(1.3, false); /// # use opendp::error::ExplainUnwrap; /// # n.unwrap_test(); /// ``` /// ```should_panic /// // fails because -0.3 is not a valid probability /// use opendp::samplers::SampleBernoulli; /// let n = bool::sample_bernoulli(-0.3, false); /// # use opendp::error::ExplainUnwrap; /// # n.unwrap_test(); /// ``` fn sample_bernoulli(prob: f64, enforce_constant_time: bool) -> Fallible<Self>; } impl SampleBernoulli for bool { fn sample_standard_bernoulli() -> Fallible<Self> { let mut buffer = [0u8; 1]; fill_bytes(&mut buffer)?; Ok(buffer[0] & 1 == 1) } fn sample_bernoulli(prob: f64, enforce_constant_time: bool) -> Fallible<Self> { // ensure that prob is a valid probability if !(0.0..=1.0).contains(&prob) {return fallible!(FailedFunction, "probability is not within [0, 1]")} // decompose probability into mantissa and exponent integers to quickly identify the value in the first_heads_index let (_sign, exponent, mantissa) = prob.decompose_raw(); // repeatedly flip fair coin (up to 1023 times) and identify index (0-based) of first heads let first_heads_index = sample_i10_geometric(enforce_constant_time)?; // if prob == 1., return after retrieving censored_specific_geom, to protect constant time if exponent == 1023 { return Ok(true) } // number of leading zeros in binary representation of prob // cast is non-saturating because exponent only uses first 11 bits // exponent is bounded within [0, 1022] by check for valid probability let num_leading_zeros = 1022_i16 - exponent as i16; // 0 is the most significant/leftmost implicit bit in the mantissa/fraction/significand // 52 is the least significant/rightmost Ok(match first_heads_index - num_leading_zeros { // index into the leading zeros of the binary representation i if i < 0 => false, // bit index 0 is implicitly set in ieee-754 when the exponent is nonzero i if i == 0 => exponent != 0, // all other digits out-of-bounds are not float-approximated/are-implicitly-zero i if i > 52 => false, // retrieve the bit at `i` slots shifted from the left i => mantissa & (1_u64 << (52 - i as usize)) != 0 }) } } pub trait SampleRademacher: Sized { fn sample_standard_rademacher() -> Fallible<Self>; fn sample_rademacher(prob: f64, enforce_constant_time: bool) -> Fallible<Self>; } impl<T: Neg<Output=T> + One> SampleRademacher for T { fn sample_standard_rademacher() -> Fallible<Self> { Ok(if bool::sample_standard_bernoulli()? {T::one()} else {T::one().neg()}) } fn sample_rademacher(prob: f64, enforce_constant_time: bool) -> Fallible<Self> { Ok(if bool::sample_bernoulli(prob, enforce_constant_time)? {T::one()} else {T::one().neg()}) } } pub trait SampleUniform: Sized { /// Returns a random sample from Uniform[0,1). /// /// This algorithm is taken from [Mironov (2012)](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.366.5957&rep=rep1&type=pdf) /// and is important for making some of the guarantees in the paper. /// /// The idea behind the uniform sampling is to first sample a "precision band". /// Each band is a range of floating point numbers with the same level of arithmetic precision /// and is situated between powers of two. /// A band is sampled with probability relative to the unit of least precision using the Geometric distribution. /// That is, the uniform sampler will generate the band [1/2,1) with probability 1/2, [1/4,1/2) with probability 1/4, /// and so on. /// /// Once the precision band has been selected, floating numbers numbers are generated uniformly within the band /// by generating a 52-bit mantissa uniformly at random. /// /// # Arguments /// /// `min`: f64 minimum of uniform distribution (inclusive) /// `max`: f64 maximum of uniform distribution (non-inclusive) /// /// # Return /// Random draw from Unif[min, max). /// /// # Example /// ``` /// // valid draw from Unif[0,1) /// use opendp::samplers::SampleUniform; /// let unif = f64::sample_standard_uniform(false); /// # use opendp::error::ExplainUnwrap; /// # unif.unwrap_test(); /// ``` fn sample_standard_uniform(enforce_constant_time: bool) -> Fallible<Self>; } impl SampleUniform for f64 { fn sample_standard_uniform(enforce_constant_time: bool) -> Fallible<Self> { // A saturated mantissa with implicit bit is ~2 let exponent: i16 = -(1 + sample_i10_geometric(enforce_constant_time)?); let mantissa: u64 = { let mut mantissa_buffer = [0u8; 8]; // mantissa bit index zero is implicit fill_bytes(&mut mantissa_buffer[1..])?; // limit the buffer to 52 bits mantissa_buffer[1] %= 16; // convert mantissa to integer u64::from_be_bytes(mantissa_buffer) }; // Generate uniform random number from [0,1) Ok(Self::recompose(false, exponent, mantissa)) } } impl SampleUniform for f32 { fn sample_standard_uniform(enforce_constant_time: bool) -> Fallible<Self> { f64::sample_standard_uniform(enforce_constant_time).map(|v| v as f32) } } /// Return sample from a censored Geometric distribution with parameter p=0.5 without calling to sample_bit_prob. /// /// The algorithm generates 1023 bits uniformly at random and returns the /// index of the first bit with value 1. If all 1023 bits are 0, then /// the algorithm acts as if the last bit was a 1 and returns 1022. /// /// This is a less general version of the sample_geometric function. /// The major difference is that this function does not /// call sample_geometric itself (whereas sample_geometric does), so having this more specialized /// version allows us to avoid an infinite dependence loop. fn sample_i10_geometric(enforce_constant_time: bool) -> Fallible<i16> { Ok(if enforce_constant_time { let mut buffer = vec![0_u8; 128]; fill_bytes(&mut buffer)?; cmp::min(buffer.into_iter().enumerate() // ignore samples that contain no events .filter(|(_, sample)| sample > &0) // compute the index of the smallest event in the batch .map(|(i, sample)| 8 * i + sample.leading_zeros() as usize) // retrieve the smallest index .min() // return 1022 if no events occurred (slight dp violation w.p. ~2^-52) .unwrap_or(1022) as i16, 1022) } else { // retrieve up to 128 bytes, each containing 8 trials for i in 0..128 { let mut buffer = vec![0_u8; 1]; fill_bytes(&mut buffer)?; if buffer[0] > 0 { return Ok(cmp::min(i * 8 + buffer[0].leading_zeros() as i16, 1022)) } } 1022 }) } pub trait SampleGeometric: Sized { /// Sample from the censored geometric distribution with parameter "prob" and maximum /// number of trials "max_trials". /// /// # Arguments /// * `prob` - Parameter for the geometric distribution, the probability of success on any given trials. /// * `max_trials` - The maximum number of trials allowed. /// * `enforce_constant_time` - Whether or not to enforce the algorithm to run in constant time; if true, /// it will always run for "max_trials" trials. /// /// # Return /// A draw from the censored geometric distribution. /// /// # Example /// ``` /// use opendp::samplers::SampleGeometric; /// let geom = u8::sample_geometric(0.1, 20, false); /// # use opendp::error::ExplainUnwrap; /// # geom.unwrap_test(); /// ``` fn sample_geometric(prob: f64, max_trials: Self, enforce_constant_time: bool) -> Fallible<Self>; } impl<T: Zero + One + PartialOrd + AddAssign + Clone> SampleGeometric for T { fn sample_geometric(prob: f64, max_trials: Self, enforce_constant_time: bool) -> Fallible<Self> { // ensure that prob is a valid probability if !(0.0..=1.0).contains(&prob) {return fallible!(FailedFunction, "probability is not within [0, 1]")} let mut n_trials: Self = T::zero(); let mut geom_return: Self = T::zero(); // generate bits until we find a 1 // if enforcing the runtime of the algorithm to be constant, the while loop // continues after the 1 is found and just stores the first location of a 1 bit. while n_trials < max_trials { n_trials += T::one(); // If we haven't seen a 1 yet, set the return to the current number of trials if bool::sample_bernoulli(prob, enforce_constant_time)? && geom_return.is_zero() { geom_return = n_trials.clone(); if !enforce_constant_time { return Ok(geom_return); } } } // set geom_return to max if we never saw a bit equaling 1 if geom_return.is_zero() { geom_return = max_trials; // could also set this equal to n_trials - 1. } Ok(geom_return) } } pub trait SampleLaplace: SampleRademacher + Sized { fn sample_laplace(shift: Self, scale: Self, enforce_constant_time: bool) -> Fallible<Self>; } pub trait SampleGaussian: Sized { /// Generates a draw from a Gaussian(loc, scale) distribution using the MPFR library. /// /// If shift = 0 and scale = 1, sampling is done in a way that respects exact rounding. /// Otherwise, the return will be the result of a composition of two operations that /// respect exact rounding (though the result will not necessarily). /// /// # Arguments /// * `shift` - The expectation of the Gaussian distribution. /// * `scale` - The scaling parameter (standard deviation) of the Gaussian distribution. /// * `enforce_constant_time` - Force underlying computations to run in constant time. /// /// # Return /// Draw from Gaussian(loc, scale) /// /// # Example /// ``` /// use opendp::samplers::SampleGaussian; /// let gaussian = f64::sample_gaussian(0.0, 1.0, false); /// ``` fn sample_gaussian(shift: Self, scale: Self, enforce_constant_time: bool) -> Fallible<Self>; } pub trait MantissaDigits { const MANTISSA_DIGITS: u32; } impl MantissaDigits for f32 { const MANTISSA_DIGITS: u32 = f32::MANTISSA_DIGITS; } impl MantissaDigits for f64 { const MANTISSA_DIGITS: u32 = f64::MANTISSA_DIGITS; } #[cfg(feature = "use-mpfr")] pub trait CastRug: MantissaDigits + Sized { fn from_rug(v: Float) -> Self; fn into_rug(self) -> Float; } #[cfg(feature = "use-mpfr")] impl CastRug for f64 { fn from_rug(v: Float) -> Self { v.to_f64() } fn into_rug(self) -> Float { rug::Float::with_val(Self::MANTISSA_DIGITS, self) } } #[cfg(feature = "use-mpfr")] impl CastRug for f32 { fn from_rug(v: Float) -> Self { v.to_f32() } fn into_rug(self) -> Float { rug::Float::with_val(Self::MANTISSA_DIGITS, self) } } #[cfg(feature = "use-mpfr")] impl<T: CastRug + SampleRademacher> SampleLaplace for T { fn sample_laplace(shift: Self, scale: Self, enforce_constant_time: bool) -> Fallible<Self> { if enforce_constant_time { return fallible!(FailedFunction, "mpfr samplers do not support constant time execution") } let shift = shift.into_rug(); let scale = scale.into_rug() * T::sample_standard_rademacher()?.into_rug(); let standard_exponential_sample = { let mut rng = GeneratorOpenSSL {}; let mut state = ThreadRandState::new_custom(&mut rng); rug::Float::with_val(Self::MANTISSA_DIGITS, rug::Float::random_exp(&mut state)) }; Ok(Self::from_rug(standard_exponential_sample.mul_add(&scale, &shift))) } } #[cfg(not(feature = "use-mpfr"))] impl<T: num::Float + rand::distributions::uniform::SampleUniform + SampleRademacher> SampleLaplace for T { fn sample_laplace(shift: Self, scale: Self, _enforce_constant_time: bool) -> Fallible<Self> { let mut rng = rand::thread_rng(); let _1_ = T::from(1.0).unwrap(); let _2_ = T::from(2.0).unwrap(); let u: T = rng.gen_range(T::from(-0.5).unwrap(), T::from(0.5).unwrap()); Ok(shift - u.signum() * (_1_ - _2_ * u.abs()).ln() * scale) } } #[cfg(feature = "use-mpfr")] impl<T: CastRug> SampleGaussian for T { fn sample_gaussian(shift: Self, scale: Self, enforce_constant_time: bool) -> Fallible<Self> { if enforce_constant_time { return fallible!(FailedFunction, "mpfr samplers do not support constant time execution") } // initialize randomness let mut rng = GeneratorOpenSSL {}; let mut state = ThreadRandState::new_custom(&mut rng); // generate Gaussian(0,1) according to mpfr standard let gauss = rug::Float::with_val(Self::MANTISSA_DIGITS, Float::random_normal(&mut state)); // initialize floats within mpfr/rug let shift = shift.into_rug(); let scale = scale.into_rug(); Ok(Self::from_rug(gauss.mul_add(&scale, &shift))) } } #[cfg(not(feature = "use-mpfr"))] impl SampleGaussian for f64 { fn sample_gaussian(shift: Self, scale: Self, enforce_constant_time: bool) -> Fallible<Self> { let uniform_sample = f64::sample_standard_uniform(enforce_constant_time)?; Ok(shift + scale * std::f64::consts::SQRT_2 * erf::erfc_inv(2.0 * uniform_sample)) } } #[cfg(not(feature = "use-mpfr"))] impl SampleGaussian for f32 { fn sample_gaussian(shift: Self, scale: Self, enforce_constant_time: bool) -> Fallible<Self> { let uniform_sample = f64::sample_standard_uniform(enforce_constant_time)?; Ok(shift + scale * std::f32::consts::SQRT_2 * (erf::erfc_inv(2.0 * uniform_sample) as f32)) } }
use criterion::{criterion_group, criterion_main, Criterion}; const UPDATE_RATE: f32 = 1.0 / 60.0; const NUM_OBJECTS: usize = 1 << 13; #[macro_export] macro_rules! bench_euler { ($b: ident, ty => $t: ty, zero => $zero: expr) => {{ let accel_data = <$t as mathbench::RandomVec>::random_vec(0, NUM_OBJECTS); let mut vel_data: Vec<$t> = vec![$zero; NUM_OBJECTS]; let mut pos_data: Vec<$t> = vec![$zero; NUM_OBJECTS]; $b.iter(|| { let dt = UPDATE_RATE; for ((position, acceleration), velocity) in pos_data.iter_mut().zip(&accel_data).zip(&mut vel_data) { *velocity += *acceleration * dt; *position += *velocity * dt; } }) }}; } fn bench_euler_3d(c: &mut Criterion) { use criterion::Benchmark; c.bench( "euler 3d", Benchmark::new("glam", |b| { use glam::Vec3; bench_euler!(b, ty => Vec3, zero => Vec3::zero()) }) .with_function("cgmath", |b| { use cgmath::{prelude::*, Vector3}; bench_euler!(b, ty => Vector3<f32>, zero => Vector3::zero()) }) .with_function("nalgebra", |b| { use nalgebra::{zero, Vector3}; bench_euler!(b, ty => Vector3<f32>, zero => zero()); }) .with_function("euclid", |b| { use euclid::{UnknownUnit, Vector3D}; bench_euler!(b, ty => Vector3D<f32, UnknownUnit>, zero => Vector3D::zero()); }), ); } fn bench_euler_2d(c: &mut Criterion) { use criterion::Benchmark; c.bench( "euler 2d", Benchmark::new("glam", |b| { use glam::Vec2; bench_euler!(b, ty => Vec2, zero => Vec2::zero()) }) .with_function("cgmath", |b| { use cgmath::{prelude::*, Vector2}; bench_euler!(b, ty => Vector2<f32>, zero => Vector2::zero()) }) .with_function("nalgebra", |b| { use nalgebra::{zero, Vector2}; bench_euler!(b, ty => Vector2<f32>, zero => zero()); }) .with_function("euclid", |b| { use euclid::{UnknownUnit, Vector2D}; bench_euler!(b, ty => Vector2D<f32, UnknownUnit>, zero => Vector2D::zero()); }), ); } criterion_group!(benches, bench_euler_2d, bench_euler_3d,); criterion_main!(benches);
use crate::{ error::{MqttResponse, ServerError}, service::{App, Session}, }; use drogue_cloud_endpoint_common::sink::Sink as DownstreamSink; use ntex::router::Path; use ntex::util::{ByteString, Bytes}; use ntex_mqtt::{ types::QoS, v3, v5::{ self, codec::{Auth, ConnectAckReason, DisconnectReasonCode}, }, }; use std::{fmt::Debug, num::NonZeroU32}; pub async fn connect_v3<Io, S>( connect: v3::Handshake<Io>, app: App<S>, ) -> Result<v3::HandshakeAck<Io, Session<S>>, ServerError> where S: DownstreamSink, { match app.connect(Connect::V3(&connect)).await { Ok(session) => Ok(connect.ack(session, false)), Err(_) => Ok(connect.bad_username_or_pwd()), } } pub async fn connect_v5<Io, S>( connect: v5::Handshake<Io>, app: App<S>, ) -> Result<v5::HandshakeAck<Io, Session<S>>, ServerError> where S: DownstreamSink, { match app.connect(Connect::V5(&connect)).await { Ok(session) => Ok(connect.ack(session).with(|ack| { ack.retain_available = Some(false); ack.shared_subscription_available = Some(true); ack.subscription_identifiers_available = Some(true); ack.wildcard_subscription_available = Some(false); })), Err(_) => Ok(connect.failed(ConnectAckReason::BadUserNameOrPassword)), } } pub async fn publish_v3<S>( session: v3::Session<Session<S>>, publish: v3::Publish, ) -> Result<(), ServerError> where S: DownstreamSink, { session.publish(Publish::V3(&publish)).await } pub async fn publish_v5<S>( session: v5::Session<Session<S>>, publish: v5::Publish, ) -> Result<v5::PublishAck, ServerError> where S: DownstreamSink, { match session.publish(Publish::V5(&publish)).await { Ok(_) => Ok(publish.ack()), Err(err) => Ok(err.ack(publish.ack())), } } pub async fn control_v3<S>( session: v3::Session<Session<S>>, control: v3::ControlMessage, ) -> Result<v3::ControlResult, ServerError> where S: DownstreamSink, { match control { v3::ControlMessage::Ping(p) => Ok(p.ack()), v3::ControlMessage::Disconnect(d) => Ok(d.ack()), v3::ControlMessage::Subscribe(mut s) => { session.subscribe(Subscribe::V3(&mut s)).await?; Ok(s.ack()) } v3::ControlMessage::Unsubscribe(u) => { match session.unsubscribe(Unsubscribe::V3(&u)).await { Ok(_) => Ok(u.ack()), Err(err) => Err(err), } } v3::ControlMessage::Closed(c) => { session.closed().await?; Ok(c.ack()) } } } pub async fn control_v5<E: Debug, S>( session: v5::Session<Session<S>>, control: v5::ControlMessage<E>, ) -> Result<v5::ControlResult, ServerError> where S: DownstreamSink, { match control { v5::ControlMessage::Auth(a) => { // we don't do extended authentication (yet?) Ok(a.ack(Auth::default())) } v5::ControlMessage::Error(e) => Ok(e.ack(DisconnectReasonCode::UnspecifiedError)), v5::ControlMessage::ProtocolError(pe) => Ok(pe.ack()), v5::ControlMessage::Ping(p) => Ok(p.ack()), v5::ControlMessage::Disconnect(d) => Ok(d.ack()), v5::ControlMessage::Subscribe(mut s) => { session.subscribe(Subscribe::V5(&mut s)).await?; Ok(s.ack()) } v5::ControlMessage::Unsubscribe(mut u) => { session.unsubscribe(Unsubscribe::V5(&mut u)).await?; Ok(u.ack()) } v5::ControlMessage::Closed(c) => { session.closed().await?; Ok(c.ack()) } } } #[derive(Clone, Debug)] pub enum Sink { V3(v3::MqttSink), V5(v5::MqttSink), } impl Sink { pub fn close(&self) { match self { Self::V3(sink) => sink.close(), Self::V5(sink) => sink.close(), } } } pub enum Connect<'a, Io> { V3(&'a v3::Handshake<Io>), V5(&'a v5::Handshake<Io>), } impl<'a, Io> Connect<'a, Io> { /// Return "clean session" for v3 and "clean start" for v5. pub fn clean_session(&self) -> bool { match self { Self::V3(connect) => connect.packet().clean_session, Self::V5(connect) => connect.packet().clean_start, } } /// Return the MQTT sink. pub fn sink(&self) -> Sink { match self { Self::V3(connect) => Sink::V3(connect.sink()), Self::V5(connect) => Sink::V5(connect.sink()), } } pub fn credentials(&self) -> (Option<&ByteString>, Option<&Bytes>) { match self { Self::V3(connect) => ( connect.packet().username.as_ref(), connect.packet().password.as_ref(), ), Self::V5(connect) => ( connect.packet().username.as_ref(), connect.packet().password.as_ref(), ), } } pub fn client_id(&self) -> &ByteString { match self { Self::V3(connect) => &connect.packet().client_id, Self::V5(connect) => &connect.packet().client_id, } } } pub enum Publish<'a> { V3(&'a v3::Publish), V5(&'a v5::Publish), } impl<'a> Publish<'a> { pub fn topic(&self) -> &Path<ByteString> { match self { Self::V3(publish) => publish.topic(), Self::V5(publish) => publish.topic(), } } pub fn payload(&self) -> &Bytes { match self { Self::V3(publish) => publish.payload(), Self::V5(publish) => publish.payload(), } } } pub enum Subscribe<'a> { V3(&'a mut v3::control::Subscribe), V5(&'a mut v5::control::Subscribe), } impl<'a> Subscribe<'a> { pub fn user_properties(&self) -> Option<&v5::codec::UserProperties> { match self { Self::V3(_) => None, Self::V5(sub) => Some(&sub.packet().user_properties), } } } impl<'a> Subscribe<'a> { pub fn id(&self) -> Option<NonZeroU32> { match self { Self::V3(_) => None, Self::V5(sub) => sub.packet().id, } } } impl<'a> IntoIterator for Subscribe<'a> { type Item = Subscription<'a>; type IntoIter = SubscriptionIter<'a>; fn into_iter(self) -> Self::IntoIter { match self { Self::V3(sub) => SubscriptionIter::V3(sub.iter_mut()), Self::V5(sub) => SubscriptionIter::V5(sub.iter_mut()), } } } pub enum SubscriptionIter<'a> { V3(v3::control::SubscribeIter<'a>), V5(v5::control::SubscribeIter<'a>), } impl<'a> Iterator for SubscriptionIter<'a> { type Item = Subscription<'a>; fn next(&mut self) -> Option<Self::Item> { match self { Self::V3(iter) => iter.next().map(Subscription::V3), Self::V5(iter) => iter.next().map(Subscription::V5), } } } pub enum Subscription<'a> { V3(v3::control::Subscription<'a>), V5(v5::control::Subscription<'a>), } impl<'a> Subscription<'a> { pub fn topic(&self) -> &'a ByteString { match self { Subscription::V3(sub) => sub.topic(), Subscription::V5(sub) => sub.topic(), } } #[allow(dead_code)] pub fn qos(&self) -> QoS { match self { Subscription::V3(sub) => sub.qos(), Subscription::V5(sub) => sub.options().qos, } } pub fn fail(&mut self, reason: v5::codec::SubscribeAckReason) { match self { Subscription::V3(sub) => sub.fail(), Subscription::V5(sub) => sub.fail(reason), } } pub fn confirm(&mut self, qos: QoS) { match self { Subscription::V3(sub) => sub.confirm(qos), Subscription::V5(sub) => sub.confirm(qos), } } } pub enum Unsubscribe<'a> { V3(&'a v3::control::Unsubscribe), V5(&'a mut v5::control::Unsubscribe), } impl<'a> IntoIterator for Unsubscribe<'a> { type Item = Unsubscription<'a>; type IntoIter = UnsubscriptionIter<'a>; fn into_iter(self) -> Self::IntoIter { match self { Self::V3(unsub) => { let mut topics = unsub.iter().collect::<Vec<_>>(); topics.reverse(); UnsubscriptionIter::V3(topics) } Self::V5(unsub) => UnsubscriptionIter::V5(unsub.iter_mut()), } } } pub enum UnsubscriptionIter<'a> { V3(Vec<&'a ByteString>), V5(v5::control::UnsubscribeIter<'a>), } impl<'a> Iterator for UnsubscriptionIter<'a> { type Item = Unsubscription<'a>; fn next(&mut self) -> Option<Self::Item> { match self { Self::V3(iter) => iter.pop().map(Unsubscription::V3), Self::V5(iter) => iter.next().map(Unsubscription::V5), } } } pub enum Unsubscription<'a> { V3(&'a ByteString), V5(v5::control::UnsubscribeItem<'a>), } impl<'a> Unsubscription<'a> { pub fn topic(&self) -> &'a ByteString { match self { Self::V3(topic) => topic, Self::V5(unsub) => unsub.topic(), } } }
// q0113_path_sum_ii struct Solution; use crate::util::TreeNode; use std::cell::RefCell; use std::rc::Rc; impl Solution { pub fn path_sum(root: Option<Rc<RefCell<TreeNode>>>, sum: i32) -> Vec<Vec<i32>> { let mut tvd = vec![]; let mut ret = vec![]; Solution::path_traveral(&mut tvd, sum, &mut ret, root); ret } fn path_traveral( tvd: &mut Vec<i32>, sum: i32, ret: &mut Vec<Vec<i32>>, tree: Option<Rc<RefCell<TreeNode>>>, ) { if tree.is_none() { return; } let tree = tree.unwrap(); let tn = tree.borrow(); match (&tn.left, &tn.right) { (Some(ln), Some(rn)) => { let new_sum = sum - tn.val; let new_tree1 = Some(Rc::clone(ln)); tvd.push(tn.val); Solution::path_traveral(tvd, new_sum, ret, new_tree1); tvd.pop(); let new_tree2 = Some(Rc::clone(rn)); tvd.push(tn.val); Solution::path_traveral(tvd, new_sum, ret, new_tree2); tvd.pop(); } (Some(ln), None) => { let new_sum = sum - tn.val; let new_tree = Some(Rc::clone(ln)); tvd.push(tn.val); Solution::path_traveral(tvd, new_sum, ret, new_tree); tvd.pop(); } (None, Some(rn)) => { let new_sum = sum - tn.val; let new_tree = Some(Rc::clone(rn)); tvd.push(tn.val); Solution::path_traveral(tvd, new_sum, ret, new_tree); tvd.pop(); } (None, None) => { if sum == tn.val { let mut t = tvd.clone(); t.push(sum); ret.push(t); } } } } } #[cfg(test)] mod tests { use super::Solution; use crate::util::{self, TreeNode}; #[test] fn it_works() { assert_eq!( util::vec_2_set(vec![vec![5, 4, 11, 2], vec![5, 8, 4, 5]]), util::vec_2_set(Solution::path_sum( TreeNode::build_with_str("[5,4,8,11,null,13,4,7,2,null,null,5,1]"), 22 )) ); } }
extern crate proc_macro; use proc_macro::TokenStream; use syn::{parse_macro_input, AttributeArgs, DeriveInput, Item}; mod from_args; mod pyclass; #[proc_macro_derive(FromArgs, attributes(pyarg))] pub fn derive_from_args(input: TokenStream) -> TokenStream { let ast: DeriveInput = syn::parse(input).unwrap(); from_args::impl_from_args(ast).into() } #[proc_macro_attribute] pub fn pyclass(attr: TokenStream, item: TokenStream) -> TokenStream { let attr = parse_macro_input!(attr as AttributeArgs); let item = parse_macro_input!(item as Item); pyclass::impl_pyclass(attr, item).into() } #[proc_macro_attribute] pub fn pyimpl(attr: TokenStream, item: TokenStream) -> TokenStream { let attr = parse_macro_input!(attr as AttributeArgs); let item = parse_macro_input!(item as Item); pyclass::impl_pyimpl(attr, item).into() }
use crate::{ bit_set, bit_set::ops::{Access, Capacity, Count}, bit_set::{word, Word}, }; /// `Rank` is a generization of `Count`. /// /// `rank1` and `rank0` have default implementation, but these are cycled. /// So either `rank1` or `rank0` need to be redefined. pub trait Rank: Count { /// Returns the number of non-zero bit in `[0, i)`, for `i <= size`. /// /// - `i == size`: rank1 is equal to count1. /// - `i > size`: rank1 should panic to satisfy the following rule, `rank1(i) + rank0(i) == i`. fn rank1(&self, i: u64) -> u64 { assert!(i <= self.size(), "index out of bounds"); i - self.rank0(i) } /// Returns the number of zero bit in `[0, i)`, for `i <= size`. /// /// - `i == size`: rank0 is equal to count0. /// - `i > size`: rank0 should panic to satisfy the following rule, `rank1(i) + rank0(i) == i`. fn rank0(&self, i: u64) -> u64 { assert!(i <= self.size(), "index out of bounds"); i - self.rank1(i) } /// Returns `|rank1(i) - rank0(i)|`. #[doc(hidden)] fn excess(&self, i: u64) -> u64 { let rank1 = self.rank1(i); let rank0 = self.rank0(i); if rank1 >= rank0 { rank1 - rank0 } else { rank0 - rank1 } } /// Returns `rank1(i) - rank0(i)`. /// /// # Panics /// Panics if `rank1(i) < rank0(i)`. #[doc(hidden)] fn excess1(&self, i: u64) -> u64 { let rank1 = self.rank1(i); let rank0 = self.rank0(i); assert!(rank1 >= rank0); rank1 - rank0 } /// Returns `rank0(i) - rank1(i)`. /// /// # Panics /// Panics if `rank0(i) < rank1(i)`. #[doc(hidden)] fn excess0(&self, i: u64) -> u64 { let rank1 = self.rank1(i); let rank0 = self.rank0(i); assert!(rank0 >= rank1); rank0 - rank1 } /// Select1 by binary search. /// /// # Panics /// Panics if `n >= self.count1()`. #[doc(hidden)] fn search1(&self, n: u64) -> u64 { assert!(n < self.count1()); Word::search(self.size(), |k| self.rank1(k) > n) - 1 } /// Select0 by binary search. /// /// # Panics /// /// Panic if `n >= self.count0()`. #[doc(hidden)] fn search0(&self, n: u64) -> u64 { assert!(n < self.count0()); Word::search(self.size(), |k| self.rank0(k) > n) - 1 } } macro_rules! impl_Rank_for_words { ($($ty:ty),*) => ($( impl Rank for $ty { fn rank1(&self, i: u64) -> u64 { assert!(i <= Self::CAPACITY, "index out of bounds"); if i == Self::CAPACITY { self.count1() } else { let mask = *self & Self::mask(word::cast(i)); mask.count1() } } fn rank0(&self, i: u64) -> u64 { assert!(i <= Self::CAPACITY, "index out of bounds"); (!self).rank1(i) } } )*) } impl_Rank_for_words!(u8, u16, u32, u64, u128, usize); impl<T: Capacity + Rank> Rank for [T] { fn rank1(&self, i: u64) -> u64 { assert!(i <= self.size(), "index out of bounds"); let (index, offset) = bit_set::address(i, T::CAPACITY); let c = self.iter().take(index).map(|b| b.count1()).sum::<u64>(); let r = self.get(index).map_or(0, |b| b.rank1(offset)); c + r } }
use crate::{domain, value}; use std::fmt::{Display, Formatter, Result as FmtResult}; use strum_macros::{EnumIter, EnumString}; #[derive(PartialEq, Clone, Copy, Hash, Eq, Debug, EnumIter, EnumString)] pub enum Property { Bool, Int, Str, } impl Display for Property { fn fmt(&self, f: &mut Formatter) -> FmtResult { write!( f, "{}", match &self { Property::Bool => "Property::Bool", Property::Int => "Property::Int", Property::Str => "Property::Str", } ) } } impl domain::DomainEnum for Property {} impl domain::Property for Property { fn name(&self) -> &'static str { match &self { Property::Bool => "Property::Bool", Property::Int => "Property::Int", Property::Str => "Property::Str", } } fn datatype(&self) -> value::Datatype { match &self { Property::Bool => value::Datatype::Bool, Property::Int => value::Datatype::Int, Property::Str => value::Datatype::Str, } } }
use sqlx::decode::Decode; use sqlx::encode::Encode; use sqlx::sqlite::SqliteTypeInfo; use sqlx::SqlitePool; use sqlx::{FromRow, Sqlite, Type}; use std::string::ToString; #[derive(FromRow, Debug)] pub struct Role { pub id: i64, pub role_name: RoleName, } #[derive(Decode, Encode, Debug, Copy, Clone, Display)] pub enum RoleName { ADMIN = 1, USER, } impl From<String> for RoleName { fn from(input: String) -> RoleName { use RoleName::*; let input_uppercase = input.to_uppercase(); if input_uppercase == ADMIN.to_string() { ADMIN } else if input_uppercase == USER.to_string() { USER } else { panic!("input invalid !"); } } } impl Type<Sqlite> for RoleName { fn type_info() -> SqliteTypeInfo { <str as Type<Sqlite>>::type_info() } } impl Role { pub async fn new(role_name: RoleName) -> anyhow::Result<Role> { Ok(Role { id: role_name as i64, role_name, }) } pub async fn find_by_name(pool: &SqlitePool, role_name: RoleName) -> anyhow::Result<Role> { Ok( sqlx::query_as::<_, Role>("SELECT id,role_name FROM roles WHERE role_name = $1") .bind(role_name.to_string()) .fetch_one(pool) .await?, ) } pub async fn find_by_id(pool: &SqlitePool, user_id: i64) -> anyhow::Result<Role> { Ok( sqlx::query_as::<_, Role>("SELECT id,role_name FROM roles WHERE id = $1") .bind(user_id) .fetch_one(pool) .await?, ) } pub async fn find_all(pool: &SqlitePool) -> anyhow::Result<Vec<Role>> { Ok(sqlx::query_as::<_, Role>("SELECT id,role_name FROM roles") .fetch_all(pool) .await?) } }
/* Copyright (c) 2023 Uber Technologies, Inc. <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at <p>http://www.apache.org/licenses/LICENSE-2.0 <p>Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ //! Defines the traits containing with utility functions that interface with tree-sitter. use super::eq_without_whitespace; use crate::{ models::{ edit::Edit, matches::{Match, Range}, }, utilities::MapOfVec, }; use itertools::Itertools; use log::debug; use std::collections::HashMap; use tree_sitter::{InputEdit, Node, Parser, Query, QueryCapture, QueryCursor}; use tree_sitter_traversal::{traverse, Order}; /// Applies the query upon the given `node`, and gets the first match /// # Arguments /// * `node` - the root node to apply the query upon /// * `source_code` - the corresponding source code string for the node. /// * `query` - the query to be applied /// * `recursive` - if `true` it matches the query to `self` and `self`'s sub-ASTs, else it matches the `query` only to `self`. /// /// # Returns /// The range of the match in the source code and the corresponding mapping from tags to code snippets. pub(crate) fn get_all_matches_for_query( node: &Node, source_code: String, query: &Query, recursive: bool, replace_node: Option<String>, replace_node_idx: Option<u8>, ) -> Vec<Match> { let query_capture_groups = _get_query_capture_groups(node, &source_code, query); // In the below code, we get the code snippet corresponding to each tag for each QueryMatch. // It could happen that we have multiple occurrences of the same tag (in queries // that use the quantifier operator (*/+)). Therefore for each query match, we have to group (join) the codes snippets // corresponding to the same tag. let mut output = vec![]; for (captured_node_range, query_matches) in query_capture_groups { // This ensures that each query pattern in rule.query matches the same node. if query_matches.len() != query.pattern_count() { continue; } // Check if the range of the self (node), and the range of outermost node captured by the query are equal. let range_matches_self = node.start_byte() == *captured_node_range.start_byte() && node.end_byte() == *captured_node_range.end_byte(); // If `recursive` it allows matches to the subtree of self (Node) // Else it ensure that the query perfectly matches the node (`self`). if recursive || range_matches_self { let mut replace_node_range = captured_node_range; if let Some(replace_node_name) = &replace_node { if let Some(r) = get_range_for_replace_node(query, &query_matches, replace_node_name, replace_node_idx) { replace_node_range = r; } else { continue; } } let code_snippet_by_tag = accumulate_repeated_tags(query, query_matches, &source_code); output.push(Match { matched_string: source_code [*replace_node_range.start_byte()..*replace_node_range.end_byte()] .to_string(), range: replace_node_range, matches: code_snippet_by_tag, ..Default::default() }); } } // This sorts the matches from bottom to top output.sort_by(|a, b| a.range().start_byte().cmp(b.range().start_byte())); output.reverse(); output } /// Applies the query upon given `node`, and gets the first match /// # Arguments /// * `node` - the root node to apply the query upon /// * `source_code` - the corresponding source code string for the node. /// * `query` - the query to be applied /// /// # Returns /// List of matches (list of captures), grouped by the outermost tag of the query fn _get_query_capture_groups<'a>( node: &'a Node<'a>, source_code: &'a str, query: &'a Query, ) -> HashMap<Range, Vec<Vec<QueryCapture<'a>>>> { let mut cursor = QueryCursor::new(); // Match the query to the node get list of QueryMatch instances. // a QueryMatch is like a Map<tag, Node> let query_matches = cursor.matches(query, *node, source_code.as_bytes()); // Since a node can be a part of multiple QueryMatch instances, // we group the query match instances based on the range of the outermost node they matched. let mut query_matches_by_node_range: HashMap<Range, Vec<Vec<QueryCapture>>> = HashMap::new(); for query_match in query_matches { // The first capture in any query match is it's outermost tag. // Ensure the outermost s-expression for is tree-sitter query is tagged. if let Some(captured_node) = query_match.captures.first() { query_matches_by_node_range.collect( captured_node.node.range().into(), query_match.captures.iter().cloned().collect_vec(), ); } } query_matches_by_node_range } // Join code snippets corresponding to the corresponding to the same tag with `\n`. // This scenario occurs when we use the `*` or the `+` quantifier in the tree-sitter query // Look at - cleanup_riles/java/rules:remove_unnecessary_nested_block // If tag name did not match a code snippet, add an empty string. // Returns the mapping between the tag and source code snippet (accumulated). fn accumulate_repeated_tags( query: &Query, query_matches: Vec<Vec<tree_sitter::QueryCapture>>, source_code: &str, ) -> HashMap<String, String> { let mut code_snippet_by_tag: HashMap<String, String> = HashMap::new(); let tag_names_by_index: HashMap<usize, &String> = query.capture_names().iter().enumerate().collect(); // Iterate over each tag name in the query for tag_name in query.capture_names().iter() { // Iterate over each query match for this range of code snippet for captures in query_matches.clone() { // Iterate over each capture for capture in captures { if tag_names_by_index[&(capture.index as usize)].eq(tag_name) { let code_snippet = &capture.node.utf8_text(source_code.as_bytes()).unwrap(); code_snippet_by_tag .entry(tag_name.clone()) .and_modify(|x| x.push_str(format!("\n{code_snippet}").as_str())) .or_insert_with(|| code_snippet.to_string()); } } } // If tag name did not match a code snippet, add an empty string. code_snippet_by_tag.entry(tag_name.clone()).or_default(); } code_snippet_by_tag } // In some queries, the `rule.query` matches a larger node, while the rewrite rule replaces the a sub-AST with a new pattern // For instance: cleanup_riles/java/rules:remove_unnecessary_nested_block (here the outermost tag is @block while the // replace_node is @nested.block) // If parameter `replace_node` is provided we group the captures by this replace node and not the // outermost node captured by the query. // This function gets the range of the ast corresponding to the `replace_node` tag of the query. fn get_range_for_replace_node( query: &Query, query_matches: &[Vec<tree_sitter::QueryCapture>], replace_node_name: &String, replace_node_idx: Option<u8>, ) -> Option<Range> { let tag_names_by_index: HashMap<usize, &String> = query.capture_names().iter().enumerate().collect(); // Iterate over each tag name in the query for tag_name in query.capture_names().iter() { // Iterate over each query match for this range of code snippet for captures in query_matches.iter().cloned() { // Iterate over each capture for capture in captures { if tag_names_by_index[&(capture.index as usize)].eq(tag_name) && tag_name.eq(replace_node_name) { if let Some(child_index) = replace_node_idx { let c_usize = child_index as usize; if c_usize >= capture.node.named_child_count() { return None; } return Some(capture.node.named_child(c_usize).unwrap().range().into()); } return Some(capture.node.range().into()); } } } } panic!( "Could not fetch range or node for replace_node {}. Context: {:?}", replace_node_name, query.capture_names() ); } /// Replaces the given byte range (`replace_range`) with the `replacement`. /// Returns tree-sitter's edit representation along with updated source code. /// Note: This method does not update `self`. pub(crate) fn get_tree_sitter_edit(code: String, edit: &Edit) -> (String, InputEdit) { // Log the edit let replace_range: Range = *edit.p_match().range(); let replacement = edit.replacement_string(); debug!("{}", edit); // Create the new source code content by appropriately // replacing the range with the replacement string. let new_source_code = [ &code[..*replace_range.start_byte()], replacement, &code[*replace_range.end_byte()..], ] .concat(); let len_of_replacement = replacement.as_bytes().len(); let old_source_code_bytes = code.as_bytes(); let new_source_code_bytes = new_source_code.as_bytes(); let start_byte = *replace_range.start_byte(); let old_end_byte = *replace_range.end_byte(); let new_end_byte = start_byte + len_of_replacement; ( new_source_code.to_string(), // Tree-sitter edit InputEdit { start_byte, old_end_byte, new_end_byte, start_position: position_for_offset(old_source_code_bytes, start_byte), old_end_position: position_for_offset(old_source_code_bytes, old_end_byte), new_end_position: position_for_offset(new_source_code_bytes, new_end_byte), }, ) } // Finds the position (col and row number) for a given offset. fn position_for_offset(input: &[u8], offset: usize) -> tree_sitter::Point { let mut result = tree_sitter::Point { row: 0, column: 0 }; for c in &input[0..offset] { if *c as char == '\n' { result.row += 1; result.column = 0; } else { result.column += 1; } } result } // Creates the InputEdit as per the tree-sitter api documentation. fn _get_tree_sitter_edit( replace_range: Range, len_of_replacement: usize, old_source_code_bytes: &[u8], new_source_code_bytes: &[u8], ) -> InputEdit { let start_byte = *replace_range.start_byte(); let old_end_byte = *replace_range.end_byte(); let new_end_byte = start_byte + len_of_replacement; InputEdit { start_byte, old_end_byte, new_end_byte, start_position: position_for_offset(old_source_code_bytes, start_byte), old_end_position: position_for_offset(old_source_code_bytes, old_end_byte), new_end_position: position_for_offset(new_source_code_bytes, new_end_byte), } } /// Get the smallest node within `self` that spans the given range. pub(crate) fn get_node_for_range(root_node: Node, start_byte: usize, end_byte: usize) -> Node { root_node .descendant_for_byte_range(start_byte, end_byte) .unwrap() } fn get_non_str_eq_parent(node: Node, source_code: String) -> Option<Node> { if let Some(parent) = node.parent() { if !eq_without_whitespace( parent.utf8_text(source_code.as_bytes()).unwrap(), node.utf8_text(source_code.as_bytes()).unwrap(), ) { return Some(parent); } else { return get_non_str_eq_parent(parent, source_code); } } None } /// Returns the node, its parent, grand parent and great grand parent pub(crate) fn get_context(prev_node: Node<'_>, source_code: String, count: u8) -> Vec<Node<'_>> { let mut output = Vec::new(); if count > 0 { output.push(prev_node); if let Some(parent) = get_non_str_eq_parent(prev_node, source_code.to_string()) { output.extend(get_context(parent, source_code, count - 1)); } } output } pub(crate) fn get_replace_range(input_edit: InputEdit) -> Range { Range { start_byte: input_edit.start_byte, end_byte: input_edit.new_end_byte, start_point: input_edit.start_position.into(), end_point: input_edit.new_end_position.into(), } } /// Returns the (tree-sitter) parser for the tree-sitter query DSL pub(crate) fn get_ts_query_parser() -> Parser { let mut parser = Parser::new(); parser .set_language(tree_sitter_query::language()) .expect("Could not set the language for the parser."); parser } /// Returns the number of errors in the AST pub(crate) fn number_of_errors(node: &Node) -> usize { traverse(node.walk(), Order::Post) .filter(|node| node.is_error() || node.is_missing()) .count() } #[cfg(test)] #[path = "unit_tests/tree_sitter_utilities_test.rs"] mod tree_sitter_utilities_test;
use crate::custom_var::CustomVar; use crate::looping::{IterResult, NativeIterator}; use crate::method::StdMethod; use crate::name::Name; use crate::operator::Operator; use crate::runtime::Runtime; use crate::stack_frame::StackFrame; use crate::std_type::Type; use crate::variable::{FnResult, Variable}; use crate::{executor, looping}; use std::cell::Cell; use std::fmt::{self, Debug, Formatter}; use std::rc::Rc; pub struct Generator { frame: Cell<Option<StackFrame>>, stack: Cell<Vec<Variable>>, } impl Generator { pub fn new(frame: StackFrame, stack: Vec<Variable>) -> Generator { Generator { frame: Cell::new(Option::Some(frame)), stack: Cell::new(stack), } } pub fn replace_vars(&self, frame: StackFrame, stack: Vec<Variable>) { assert!(self.frame.take().is_none()); self.frame.replace(Option::Some(frame)); self.stack.replace(stack); } pub fn take_frame(&self) -> Option<StackFrame> { self.frame.take() } pub fn take_stack(&self) -> Vec<Variable> { self.stack.take() } pub fn create(_args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { panic!( "Should not be creating generators\n{}", runtime.frame_strings() ) } fn gen_type() -> Type { custom_class!(Generator, create, "Generator") } fn next_fn(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); runtime.push_native(); runtime.add_generator(self)?; let result = executor::execute(runtime); runtime.pop_native(); result } fn ret_self(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult { debug_assert!(args.is_empty()); runtime.return_1(self.into()) } } impl CustomVar for Generator { fn set(self: Rc<Self>, _name: Name, _object: Variable) { unimplemented!() } fn get_type(&self) -> Type { Self::gen_type() } fn get_operator(self: Rc<Self>, op: Operator) -> Variable { match op { Operator::Iter => StdMethod::new_native(self, Self::ret_self).into(), _ => unimplemented!("Generator.{}", op.name()), } } fn get_attribute(self: Rc<Self>, name: &str) -> Variable { match name { "next" => StdMethod::new_native(self, Self::next_fn).into(), _ => unimplemented!("Generator.{}", name), } } fn into_iter(self: Rc<Self>) -> looping::Iterator { looping::Iterator::Native(self) } } impl NativeIterator for Generator { fn next(self: Rc<Self>, runtime: &mut Runtime) -> IterResult { runtime.add_generator(self)?; match executor::execute(runtime) { FnResult::Ok(_) => match runtime.pop_return() { Variable::Option(var) => IterResult::Ok(var.into()), _ => panic!("Expected option to be returned from generator"), }, FnResult::Err(_) => IterResult::Err(()), } } } impl Debug for Generator { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let frame = self.frame.take(); let stack = self.stack.take(); let result = f .debug_struct("Generator") .field("frame", &frame) .field("stack", &stack) .finish(); self.frame.replace(frame); self.stack.replace(stack); result } }
use num_traits::{FromPrimitive, ToPrimitive}; use crate::lexer::SyntaxKind; /// Second, implementing the `Language` trait teaches rowan to convert between /// these two SyntaxKind types, allowing for a nicer SyntaxNode API where /// "kinds" are values from our `enum SyntaxKind`, instead of plain u16 values. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum Lang {} pub type SyntaxNode = rowan::SyntaxNode<Lang>; #[allow(unused)] pub type SyntaxToken = rowan::SyntaxToken<Lang>; #[allow(unused)] pub type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>; impl rowan::Language for Lang { type Kind = SyntaxKind; fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind { Self::Kind::from_u16(raw.0).unwrap() } fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind { rowan::SyntaxKind(kind.to_u16().unwrap()) } }