text
stringlengths
8
4.13M
use crate::hittable::{aabb::Aabb, HitRecord, Hittable, Hittables}; use crate::ray::{face_normal, Ray}; use crate::vec::Vec3; use rand::rngs::SmallRng; use std::sync::Arc; #[derive(Debug, Clone)] pub struct Translate { pub object: Arc<Hittables>, pub offset: Vec3, } impl Translate { pub fn new(object: Arc<Hittables>, offset: Vec3) -> Hittables { Hittables::from(Translate { object: object, offset: offset, }) } } impl Hittable for Translate { fn hit(&self, ray: &Ray, t_min: f64, t_max: f64, rng: &mut SmallRng) -> Option<HitRecord> { let moved = Ray { origin: ray.origin - self.offset, direction: ray.direction, time: ray.time, }; match self.object.hit(&moved, t_min, t_max, rng) { Some(hit) => { let (front_face, normal) = face_normal(&moved, hit.normal); Some(HitRecord { t: hit.t, u: hit.u, v: hit.v, point: hit.point + self.offset, normal: normal, front_face: front_face, mat: hit.mat.clone(), }) } None => None, } } fn bounding_box(&self, time0: f64, time1: f64) -> Option<Aabb> { match self.object.bounding_box(time0, time1) { Some(bbox) => Some(Aabb::new( bbox.minimum + self.offset, bbox.maximum + self.offset, )), None => None, } } }
use std::env; use std::error; use std::ffi::OsStr; use std::path::Path; use wallpaper_windows_shobjidl::desktop_wallpaper; static USAGE: &'static str = r#"Usage: wallpaper-shobjidl - get wallpaper wallpaper-shobjidl [path] - set wallpaper "#; fn main() -> Result<(), Box<dyn error::Error>> { let args: Vec<_> = env::args().collect(); match args.len() { 2 => { let path: &Path = Path::new(&args[1]); let mut dw = desktop_wallpaper().lock()?; dw.set_wallpaper::<&OsStr, _>(None, path)?; Ok(()) } 1 => { let mut dw = desktop_wallpaper().lock()?; let path = dw.get_wallpaper::<&OsStr>(None)?; let path = path .into_os_string() .into_string() .or(Err("Path is not a valid UTF-8 string"))?; println!("{}", path); Ok(()) } _ => { eprintln!("{}", USAGE); Ok(()) } } }
mod new_transaction; mod new_validated_block; pub use new_transaction::new_transaction; pub use new_validated_block::new_validated_block;
//! Cuticula provides convenient and universal Machine Learning Transformer //! for non-numeric data types such as: `Strings`, `Images` and `Audio`. #![feature(plugin)] #![plugin(clippy)] extern crate image as image_lib; extern crate murmurhash3 as murmur3; pub use image::Image; pub use word::Word; pub use transformer::Transformer; pub use modifier::Set; pub mod transformer; pub mod image; pub mod word; /// Re-exports from the Modifier crate. pub mod modifier { extern crate modifier as modifier_lib; pub use self::modifier_lib::*; }
mod iterinfo; mod monthinfo; mod yearinfo; pub use iterinfo::IterInfo; mod poslist; pub use poslist::build_poslist; mod easter; mod masks; use crate::datetime::{get_weekday_val, DTime, Time}; use crate::options::*; use crate::utils::{includes, not_empty}; use chrono::prelude::*; use chrono::Duration; pub fn decrement_date_until_valid(date: DTime, new_month: u32, new_year: Option<i32>) -> DTime { let new_date = if let Some(new_year) = new_year { let mut new_date = date.with_year(new_year); let mut day_i = 1; while new_date.is_none() { new_date = date.with_day(date.day() - day_i); new_date = new_date.unwrap().with_year(new_year); day_i += 1; } new_date.unwrap() } else { date }; let mut new_date = new_date.with_month(new_month); let mut day_i = 1; while new_date.is_none() { new_date = date .with_day(date.day() - day_i) .unwrap() .with_month(new_month); day_i += 1; } new_date.unwrap() } pub fn increment_counter_date( counter_date: DTime, options: &ParsedOptions, filtered: bool, ) -> DTime { match options.freq { Frequenzy::Yearly => counter_date .with_year(counter_date.year() + options.interval as i32) .unwrap(), Frequenzy::Monthly => { let new_month = counter_date.month() + options.interval as u32; if new_month > 12 { let mut year_div = new_month / 12; let mut new_month = new_month % 12; if new_month == 0 { new_month = 12; year_div -= 1; } let new_year = counter_date.year() + year_div as i32; decrement_date_until_valid(counter_date, new_month, Some(new_year)) } else { decrement_date_until_valid(counter_date, new_month, None) } } Frequenzy::Weekly => { let mut day_delta = 0; let weekday = get_weekday_val(&counter_date.weekday()); if options.wkst > weekday { day_delta += -((weekday + 1 + (6 - options.wkst)) as isize) + (options.interval as isize) * 7; } else { day_delta += -((weekday - options.wkst) as isize) + (options.interval as isize) * 7; } counter_date + Duration::days(day_delta as i64) } Frequenzy::Daily => counter_date + Duration::days(options.interval as i64), Frequenzy::Hourly => { let mut new_hours = counter_date.hour() as usize; if filtered { new_hours += ((23 - new_hours) as f32 / options.interval as f32).floor() as usize * options.interval; } loop { new_hours += options.interval; if options.byhour.is_empty() || options .byhour .iter() .any(|bh| *bh == (new_hours % 24) as usize) { break; } } counter_date.with_hour(0).unwrap() + Duration::hours(new_hours as i64) } Frequenzy::Minutely => { let mut minutes_inc = 0; let minutes = counter_date.minute() as usize; let hours = counter_date.hour() as usize; if filtered { // Jump to one iteration before next day minutes_inc = (1439. - ((hours * 60 + minutes) as f32 / options.interval as f32)) .floor() as usize * options.interval; } let mut counter_date = counter_date + Duration::minutes(minutes_inc as i64); loop { counter_date = counter_date + Duration::minutes(options.interval as i64); let minutes = counter_date.minute() as usize; let hours = counter_date.hour() as usize; if (options.byhour.is_empty() || includes(&options.byhour, &hours)) && (options.byminute.is_empty() || includes(&options.byminute, &minutes)) { break; } } counter_date } Frequenzy::Secondly => { let mut seconds_inc = 0; let seconds = counter_date.second() as usize; let minutes = counter_date.minute() as usize; let hours = counter_date.hour() as usize; if filtered { // Jump to one iteration before next day seconds_inc = (86399. - ((hours * 3600 + minutes * 60 + seconds) as f32 / options.interval as f32)) .floor() as usize * options.interval; } let mut counter_date = counter_date + Duration::seconds(seconds_inc as i64); loop { counter_date = counter_date + Duration::seconds(options.interval as i64); let seconds = counter_date.second() as usize; let minutes = counter_date.minute() as usize; let hours = counter_date.hour() as usize; if (options.byhour.is_empty() || includes(&options.byhour, &hours)) && (options.byminute.is_empty() || includes(&options.byminute, &minutes)) && (options.bysecond.is_empty() || includes(&options.bysecond, &seconds)) { break; } } counter_date } } } pub fn is_filtered(ii: &IterInfo, current_day: usize, options: &ParsedOptions) -> bool { return (!options.bymonth.is_empty() && !options.bymonth.contains(&ii.mmask()[current_day])) || (not_empty(&options.byweekno) && (ii.wnomask().unwrap()[current_day]) == 0) || (not_empty(&options.byweekday) && !includes(&options.byweekday, &ii.wdaymask()[current_day])) || (ii.nwdaymask().is_some() && not_empty(ii.nwdaymask().unwrap()) && (ii.nwdaymask().unwrap()[current_day]) == 0) || (options.byeaster.is_some() && !(includes(ii.eastermask().unwrap(), &(current_day as isize)))) || ((not_empty(&options.bymonthday) || not_empty(&options.bynmonthday)) && !includes(&options.bymonthday, &ii.mdaymask()[current_day]) && !includes(&options.bynmonthday, &ii.nmdaymask()[current_day])) || (not_empty(&options.byyearday) && ((current_day < ii.yearlen().unwrap() && !includes(&options.byyearday, &(current_day as isize + 1)) && !includes( &options.byyearday.iter().map(|v| *v as isize).collect(), &(-(ii.yearlen().unwrap() as isize) + current_day as isize), )) || (current_day >= ii.yearlen().unwrap() && !includes( &options.byyearday, &((current_day + 1 - ii.yearlen().unwrap()) as isize), ) && !includes( &options.byyearday.iter().map(|v| *v as isize).collect(), &(-(ii.nextyearlen().unwrap() as isize) + current_day as isize - ii.yearlen().unwrap() as isize), )))); } pub fn remove_filtered_days( dayset: &mut Vec<Option<isize>>, start: usize, end: usize, ii: &IterInfo, ) -> bool { let mut filtered = false; for daycounter in start..end { match dayset[daycounter] { Some(current_day) => { filtered = is_filtered(ii, current_day as usize, &ii.options); if filtered { dayset[daycounter] = None; } } None => continue, } } filtered } pub fn build_timeset(options: &ParsedOptions) -> Vec<Time> { let millisecond_mod = (options.dtstart.timestamp_millis() % 1000) as usize; if options.freq > Frequenzy::Daily { return vec![]; } let mut timeset = Vec::with_capacity(options.byhour.len() * options.byminute.len() * options.bysecond.len()); for hour in &options.byhour { for minute in &options.byminute { for second in &options.bysecond { timeset.push(Time::new(*hour, *minute, *second, millisecond_mod)); } } } timeset } pub fn make_timeset(ii: &IterInfo, counter_date: &DTime, options: &ParsedOptions) -> Vec<Time> { if options.freq < Frequenzy::Hourly { return build_timeset(options); } if (options.freq >= Frequenzy::Hourly && !options.byhour.is_empty() && !options .byhour .iter() .any(|&h| h == counter_date.hour() as usize)) || (options.freq >= Frequenzy::Minutely && !options.byminute.is_empty() && !options .byminute .iter() .any(|&m| m == counter_date.minute() as usize)) || (options.freq >= Frequenzy::Secondly && !options.bysecond.is_empty() && !options .bysecond .iter() .any(|&s| s == counter_date.second() as usize)) { return vec![]; } ii.gettimeset( &options.freq, counter_date.hour() as usize, counter_date.minute() as usize, counter_date.second() as usize, counter_date.timestamp_subsec_millis() as usize, ) }
#[allow(unused_imports)] #[macro_use] extern crate limn; mod util; use limn::prelude::*; fn main() { let window_builder = glutin::WindowBuilder::new() .with_title("Limn image demo") .with_min_dimensions(100, 100); let app = util::init(window_builder); resources().image_loader.load_image("rust", include_bytes!("../assets/images/rust.png").to_vec()); let mut root = Widget::new("root"); let mut image_widget = Widget::from_modifier(Image::new(ImageSource::bundled("rust"))); image_widget.layout().add(constraints![ center(&root), bound_by(&root).padding(50.0), ]); root.add_child(image_widget); app.main_loop(root); }
use crate::{ dispose::Dispose, hex::{ pointer::HexPointer, render::renderer::HexRenderer, shape::cubic_range::CubicRangeShape, }, world::RhombusViewerWorld, }; use amethyst::{ecs::prelude::*, prelude::*}; use rand::{thread_rng, Rng}; use rhombus_core::hex::{ coordinates::{ axial::AxialVector, cubic::CubicVector, direction::{HexagonalDirection, NUM_DIRECTIONS}, }, field_of_view::FieldOfView, storage::hash::RectHashStorage, }; use smallvec::SmallVec; use std::{collections::HashSet, sync::Arc}; #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum HexState { Open(usize), Wall, } pub struct HexData { state: HexState, } impl Dispose for HexData { fn dispose(&mut self, _data: &mut StateData<'_, GameData<'_, '_>>) {} } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum FovState { Partial, Full, } #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum MoveMode { StraightAhead, StrafeLeftAhead, StrafeLeftBack, StrafeRightAhead, StrafeRightBack, StraightBack, } const CELL_RADIUS_RATIO_DEN: usize = 42; pub struct World<R: HexRenderer> { shape: CubicRangeShape, shape_positions: Vec<AxialVector>, hexes: RectHashStorage<(HexData, R::Hex)>, renderer: R, renderer_dirty: bool, rooms: Vec<CubicRangeShape>, next_region: usize, pointer: Option<(HexPointer, FovState)>, } impl<R: HexRenderer> World<R> { pub fn new(renderer: R) -> Self { Self { shape: CubicRangeShape::default(), shape_positions: Vec::new(), hexes: RectHashStorage::new(), renderer, renderer_dirty: false, rooms: Vec::new(), next_region: 0, pointer: None, } } pub fn set_shape_and_reset_world( &mut self, shape: CubicRangeShape, data: &mut StateData<'_, GameData<'_, '_>>, ) { self.shape = shape; let cell_radius = Self::compute_cell_radius(&self.shape, CELL_RADIUS_RATIO_DEN); let mut r = 0; loop { let mut end = true; for pos in self.shape.center().big_ring_iter(cell_radius, r) { let mut one_inside = false; for v in pos.ring_iter(cell_radius) { if self.shape.contains_position(v) { self.shape_positions.push(v); one_inside = true; } } if !one_inside { continue; } end = false; for s in 0..cell_radius { for v in pos.ring_iter(s) { if self.shape.contains_position(v) { self.shape_positions.push(v); } } } } if end { break; } r += 1; } self.reset_world(data); } pub fn reset_world(&mut self, data: &mut StateData<'_, GameData<'_, '_>>) { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); self.clear(data, &world); for v in &self.shape_positions { self.hexes.insert( *v, ( HexData { state: HexState::Wall, }, self.renderer.new_hex(true, true), ), ); } self.renderer_dirty = true; } fn compute_cell_radius(shape: &CubicRangeShape, cell_radius_ratio_den: usize) -> usize { let mut deltas = [ shape.range_x().end() - shape.range_x().start(), shape.range_y().end() - shape.range_y().start(), shape.range_z().end() - shape.range_z().start(), ]; deltas.sort(); deltas[1] as usize / cell_radius_ratio_den } pub fn clear( &mut self, data: &mut StateData<'_, GameData<'_, '_>>, world: &RhombusViewerWorld, ) { self.delete_pointer(data, world); self.rooms.clear(); self.renderer.clear(data); self.hexes.dispose(data); self.next_region = 0; } fn delete_pointer( &mut self, data: &mut StateData<'_, GameData<'_, '_>>, world: &RhombusViewerWorld, ) { if let Some((mut pointer, _)) = self.pointer.take() { pointer.delete_entities(data, world); } } pub fn add_room(&mut self) { let mut deltas = [ self.shape.range_x().end() - self.shape.range_x().start(), self.shape.range_y().end() - self.shape.range_y().start(), self.shape.range_z().end() - self.shape.range_z().start(), ]; deltas.sort(); let radius = deltas[1] / 10; let mut rng = thread_rng(); let mut new_room = CubicRangeShape::new((-radius, radius), (-radius, radius), (-radius, radius)); let funcs: [( fn(&mut CubicRangeShape, usize) -> bool, fn(&mut CubicRangeShape, usize) -> bool, ); 6] = [ ( CubicRangeShape::shrink_x_start, CubicRangeShape::stretch_x_start, ), ( CubicRangeShape::shrink_x_end, CubicRangeShape::stretch_x_end, ), ( CubicRangeShape::shrink_y_start, CubicRangeShape::stretch_y_start, ), ( CubicRangeShape::shrink_y_end, CubicRangeShape::stretch_y_end, ), ( CubicRangeShape::shrink_z_start, CubicRangeShape::stretch_z_start, ), ( CubicRangeShape::shrink_z_end, CubicRangeShape::stretch_z_end, ), ]; for (st, sh) in funcs.iter() { let d = rng.gen_range(-radius / 3, radius / 3 + 1); for _ in 0..d.abs() { if d > 0 { st(&mut new_room, 2); } else if d < 0 { sh(&mut new_room, 2); } } } let random_pos = CubicVector::from(self.shape_positions[rng.gen_range(0, self.shape_positions.len())]); let mut start_x = new_room.range_x().start() + random_pos.x(); let delta_x = (start_x - self.shape.range_x().start() + 1) % 2; start_x += delta_x; let end_x = new_room.range_x().end() + random_pos.x() + delta_x; let mut start_z = new_room.range_z().start() + random_pos.z(); let delta_z = (start_z - self.shape.range_z().start() + 1) % 2; start_z += delta_z; let end_z = new_room.range_z().end() + random_pos.z() + delta_z; let start_y = new_room.range_y().start() + random_pos.y() - delta_x - delta_z; let end_y = new_room.range_y().end() + random_pos.y() - delta_x - delta_z; let is_inside_shape = self.shape.range_x().start() < start_x && self.shape.range_x().end() > end_x && self.shape.range_y().start() < start_y && self.shape.range_y().end() > end_y && self.shape.range_z().start() < start_z && self.shape.range_z().end() > end_z; let new_room = CubicRangeShape::new((start_x, end_x), (start_y, end_y), (start_z, end_z)); if is_inside_shape && !self.rooms.iter().any(|room| room.intersects(&new_room)) { let mut r = 0; loop { let mut end = true; for pos in new_room.center().ring_iter(r) { if new_room.contains_position(pos) { self.hexes.get_mut(pos).expect("new room cell").0.state = HexState::Open(self.next_region); end = false; } } if end { break; } r += 1; } self.rooms.push(new_room); self.next_region += 1; self.renderer_dirty = true; } } pub fn start_maze(&self) -> MazeState { MazeState { next_pos: 0, cells: Vec::new(), region: 0, } } pub fn grow_maze(&mut self, state: &mut MazeState) -> bool { loop { let mut rng = thread_rng(); if state.cells.is_empty() { let mut pos = state.next_pos; loop { if pos < self.shape_positions.len() { let cell = self.shape_positions[pos]; if self.can_carve(cell) { state.next_pos = pos + 1; state.cells.push((cell, None)); state.region = self.next_region; self.next_region += 1; break; } else { pos += 1; } } else { return true; } } } if let Some((cell, via)) = state.cells.pop() { if self.can_carve(cell) { if let Some((via, _)) = via { self.hexes.get_mut(via).expect("via cell").0.state = HexState::Open(state.region); } self.hexes.get_mut(cell).expect("carve cell").0.state = HexState::Open(state.region); self.renderer_dirty = true; let mut directions = Vec::new(); let mut wind_d = None; for dir in 0..NUM_DIRECTIONS { let neighbour = cell + AxialVector::direction(dir) * 2; if self.can_carve(neighbour) { if let Some((_, wind_dir)) = via { if wind_dir == dir { wind_d = Some(directions.len()) } } directions.push(dir); } } if !directions.is_empty() && wind_d.is_some() { debug_assert_eq!(directions[wind_d.unwrap()], via.unwrap().1); } if !directions.is_empty() { let d = wind_d .and_then(|d| { let windy = rng.gen_bool(0.6); if windy { Some(d) } else { None } }) .unwrap_or_else(|| rng.gen_range(0, directions.len())); let dir = directions[d]; for (i, dir) in directions.into_iter().enumerate() { if i != d { let via = cell + AxialVector::direction(dir); let neighbour = cell + AxialVector::direction(dir) * 2; state.cells.push((neighbour, Some((via, dir)))); } } let via = cell + AxialVector::direction(dir); let neighbour = cell + AxialVector::direction(dir) * 2; state.cells.push((neighbour, Some((via, dir)))); } return false; } } else { break; } } true } fn can_carve(&self, position: AxialVector) -> bool { let cubic = CubicVector::from(position); let is_inside_shape = self.shape.range_x().start() < cubic.x() && self.shape.range_x().end() > cubic.x() && self.shape.range_y().start() < cubic.y() && self.shape.range_y().end() > cubic.y() && self.shape.range_z().start() < cubic.z() && self.shape.range_z().end() > cubic.z(); is_inside_shape && ((cubic.x() - self.shape.range_x().start()) % 2 == 1) && ((cubic.z() - self.shape.range_z().start()) % 2 == 1) && self .hexes .get(position) .map_or(false, |(data, _)| data.state == HexState::Wall) } pub fn start_connect(&self) -> ConnectState { if self.next_region <= 1 { return ConnectState { connectors: Vec::new(), regions_to_connect: HashSet::new(), }; } let connectors = self .hexes .positions_and_hexes_with_adjacents() .filter_map(|(pos, hex_with_adjacents)| { if hex_with_adjacents.hex().0.state != HexState::Wall { return None; } let mut regions: SmallVec<[usize; 3]> = (0..NUM_DIRECTIONS) .filter_map(|dir| { hex_with_adjacents .adjacent(dir) .and_then(|(data, _)| match data.state { HexState::Open(region) => Some(region), HexState::Wall => None, }) }) .collect(); regions.sort(); regions.dedup(); debug_assert!(regions.len() <= 3); if regions.len() > 1 { Some((pos, regions)) } else { None } }) .collect(); let mut rng = thread_rng(); let first_region = rng.gen_range(0, self.next_region); let regions_to_connect = (0..self.next_region) .filter(|region| *region != first_region) .collect(); ConnectState { connectors, regions_to_connect, } } pub fn connect(&mut self, state: &mut ConnectState) -> bool { if state.regions_to_connect.is_empty() { return true; } let indices = state .connectors .iter() .enumerate() .filter_map(|(index, (_, connector_regions))| { let one_in = connector_regions .iter() .any(|cr| !state.regions_to_connect.contains(cr)); let one_out = connector_regions .iter() .any(|cr| state.regions_to_connect.contains(cr)); if one_in && one_out { Some(index) } else { None } }) .collect::<Vec<usize>>(); let mut rng = thread_rng(); let (pos, regions) = &state.connectors[indices[rng.gen_range(0, indices.len())]]; self.hexes.get_mut(*pos).expect("connector cell").0.state = HexState::Open(0); for r in regions { state.regions_to_connect.remove(r); } let connected_regions = regions.clone(); let mut connectors = Vec::new(); std::mem::swap(&mut state.connectors, &mut connectors); let (drained, remaining) = connectors.into_iter().partition(|(_, connector_regions)| { connector_regions .iter() .filter(|r1| connected_regions.iter().any(|r2| *r1 == r2)) .count() >= 2 }); state.connectors = remaining; for (pos, _) in drained { let carve = rng.gen_range(0, 50) == 0; if carve { self.hexes.get_mut(pos).expect("connector cell").0.state = HexState::Open(0); } } self.renderer_dirty = true; false } pub fn start_remove_dead_ends(&self) -> RemoveDeadEndsState { RemoveDeadEndsState { tests: self .hexes .positions() .filter(|pos| { let cubic = CubicVector::from(*pos); ((cubic.x() - self.shape.range_x().start()) % 2 == 1) && ((cubic.z() - self.shape.range_z().start()) % 2 == 1) }) .collect(), next: 0, redo_tests: Vec::new(), } } pub fn remove_dead_ends(&mut self, state: &mut RemoveDeadEndsState) -> bool { loop { while state.next < state.tests.len() { let pos = state.tests[state.next]; state.next += 1; let hex = self.hexes.get(pos); if let Some(( HexData { state: HexState::Open(..), }, _, )) = hex { } else { continue; } let mut redo = SmallVec::<[usize; NUM_DIRECTIONS]>::new(); for dir in 0..NUM_DIRECTIONS { let via = self.hexes.get(pos + AxialVector::direction(dir)); let adj = self.hexes.get(pos + AxialVector::direction(dir) * 2); if let ( Some(( HexData { state: HexState::Open(..), }, _, )), Some(( HexData { state: HexState::Open(..), }, _, )), ) = (via, adj) { redo.push(dir); } } if redo.len() <= 1 { state.redo_tests.extend( redo.into_iter() .map(|dir| pos + AxialVector::direction(dir) * 2), ); let mut haa = self.hexes.hex_with_adjacents_mut(pos); haa.hex().as_mut().expect("dead end cell").0.state = HexState::Wall; for dir in 0..NUM_DIRECTIONS { if let Some(adj) = haa.adjacent(dir) { if let HexState::Open(..) = adj.0.state { adj.0.state = HexState::Wall; } }; } self.renderer_dirty = true; return false; } } if !state.redo_tests.is_empty() { std::mem::swap(&mut state.tests, &mut state.redo_tests); state.redo_tests.clear(); state.next = 0; } else { break; } } true } pub fn start_remove_angles(&self) -> RemoveAnglesState { RemoveAnglesState { tests: self .hexes .positions() .filter(|pos| { let cubic = CubicVector::from(*pos); ((cubic.x() - self.shape.range_x().start()) % 2 == 1) && ((cubic.z() - self.shape.range_z().start()) % 2 == 1) }) .collect(), next: 0, redo_tests: Vec::new(), } } pub fn remove_angles(&mut self, state: &mut RemoveAnglesState) -> bool { loop { while state.next < state.tests.len() { let pos = state.tests[state.next]; state.next += 1; let hex = self.hexes.get(pos); if let Some(( HexData { state: HexState::Open(..), }, _, )) = hex { } else { continue; } let mut redo = SmallVec::<[usize; NUM_DIRECTIONS]>::new(); for dir in 0..NUM_DIRECTIONS { let adj = self.hexes.get(pos + AxialVector::direction(dir)); if let Some(( HexData { state: HexState::Open(..), }, _, )) = adj { redo.push(dir); } } if redo.len() == 2 && (redo[0] + 1 == redo[1] || redo[0] == 0 && redo[1] == 5) { let hex = self.hexes.get_mut(pos); hex.expect("angle cell").0.state = HexState::Wall; } } if !state.redo_tests.is_empty() { std::mem::swap(&mut state.tests, &mut state.redo_tests); state.redo_tests.clear(); state.next = 0; } else { break; } } true } pub fn clean_walls(&mut self, data: &mut StateData<'_, GameData<'_, '_>>) { let mut remove = Vec::new(); for (pos, haa) in self.hexes.positions_and_hexes_with_adjacents() { let mut keep = false; for dir in 0..NUM_DIRECTIONS { if let Some(( HexData { state: HexState::Open(..), }, _, )) = haa.adjacent(dir) { keep = true; break; } } if !keep { remove.push(pos); } } if !remove.is_empty() { for pos in remove { self.hexes.remove(pos).map(|mut hex| hex.dispose(data)); } self.renderer_dirty = true; } } fn find_open_hex(&self) -> Option<AxialVector> { let mut r = 0; loop { let mut end = true; for pos in self.shape.center().ring_iter(r) { let hex_data = self.hexes.get(pos).map(|hex| &hex.0); match hex_data { Some(HexData { state: HexState::Open(..), .. }) => return Some(pos), Some(..) => end = false, None => { if self.shape.contains_position(pos) { end = false } } } } if end { return None; } r += 1; } } pub fn create_pointer( &mut self, fov_state: FovState, data: &mut StateData<'_, GameData<'_, '_>>, ) { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); self.delete_pointer(data, &world); if let Some(hex) = self.find_open_hex() { let mut pointer = HexPointer::new_with_level_height(1.0); pointer.set_position(hex, 0, data, &world); pointer.create_entities(data, &world); self.pointer = Some((pointer, fov_state)); self.renderer_dirty = true; } } pub fn increment_direction(&mut self, data: &StateData<'_, GameData<'_, '_>>) { if let Some((pointer, _)) = &mut self.pointer { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); pointer.increment_direction(data, &world); } } pub fn decrement_direction(&mut self, data: &StateData<'_, GameData<'_, '_>>) { if let Some((pointer, _)) = &mut self.pointer { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); pointer.decrement_direction(data, &world); } } pub fn next_position(&mut self, mode: MoveMode, data: &mut StateData<'_, GameData<'_, '_>>) { if let Some((pointer, _)) = &mut self.pointer { let direction = match mode { MoveMode::StraightAhead => pointer.direction(), MoveMode::StrafeLeftAhead => (pointer.direction() + 5) % 6, MoveMode::StrafeLeftBack => (pointer.direction() + 4) % 6, MoveMode::StrafeRightAhead => (pointer.direction() + 1) % 6, MoveMode::StrafeRightBack => (pointer.direction() + 2) % 6, MoveMode::StraightBack => (pointer.direction() + 3) % 6, }; let next = pointer.position().neighbor(direction); if let Some(HexData { state: HexState::Open(..), .. }) = self.hexes.get(next).map(|hex| &hex.0) { let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); pointer.set_position(next, 0, data, &world); self.renderer_dirty = true; } } } pub fn change_field_of_view(&mut self, fov_state: FovState) { if let Some((_, pointer_fov_state)) = &mut self.pointer { *pointer_fov_state = fov_state; self.renderer_dirty = true; } } pub fn update_renderer_world( &mut self, force: bool, data: &mut StateData<'_, GameData<'_, '_>>, ) { if !self.renderer_dirty { return; } let (visible_positions, visible_only) = if let Some((pointer, fov_state)) = &self.pointer { let mut visible_positions = HashSet::new(); visible_positions.insert(pointer.position()); let mut fov = FieldOfView::default(); fov.start(pointer.position()); let is_obstacle = |pos| { let hex_data = self.hexes.get(pos).map(|hex| &hex.0); match hex_data { Some(HexData { state: HexState::Open(..), .. }) => false, Some(HexData { state: HexState::Wall, .. }) => true, None => false, } }; loop { let prev_len = visible_positions.len(); for pos in fov.iter() { let key = pointer.position() + pos; if self.hexes.contains_position(key) { let inserted = visible_positions.insert(key); debug_assert!(inserted); } } if visible_positions.len() == prev_len { break; } fov.next_radius(&is_obstacle); } ( Some(visible_positions), match fov_state { FovState::Partial => false, FovState::Full => true, }, ) } else { (None, false) }; let world = (*data.world.read_resource::<Arc<RhombusViewerWorld>>()).clone(); self.renderer.update_world( &mut self.hexes, |_, hex| !matches!(hex.0.state, HexState::Open(..)), |pos, _| { visible_positions .as_ref() .map_or(true, |vp| vp.contains(&pos)) }, |hex| &mut hex.1, visible_only, force, data, &world, ); self.renderer_dirty = false; } } #[derive(Debug)] pub struct MazeState { next_pos: usize, cells: Vec<(AxialVector, Option<(AxialVector, usize)>)>, region: usize, } #[derive(Debug)] pub struct ConnectState { connectors: Vec<(AxialVector, SmallVec<[usize; 3]>)>, regions_to_connect: HashSet<usize>, } #[derive(Debug)] pub struct RemoveDeadEndsState { tests: Vec<AxialVector>, next: usize, redo_tests: Vec<AxialVector>, } #[derive(Debug)] pub struct RemoveAnglesState { tests: Vec<AxialVector>, next: usize, redo_tests: Vec<AxialVector>, }
#[macro_use] extern crate debug_rs; fn main() { debug!(666, 33, "aaa"); debug!(vec![1, 2, 3]); }
use std::collections::HashMap; use std::fs::File; use std::io::BufReader; use xml::reader::{EventReader, XmlEvent}; /// Pathref contains the special pathRef string syntax #[derive(Debug, Eq, PartialEq, Hash, Default, Clone)] struct PathRef { ref_type: String, target_parent: String, target_type: String, } #[derive(Debug, Eq, PartialEq, Hash, Default, Clone)] struct StringEnumeration { value: String, access: String, optional: bool, } impl StringEnumeration { fn new(value: String, access: String, optional: bool) -> StringEnumeration { StringEnumeration { value: value, access: access, optional: optional, } } } /// Contains information on the syntax of a parameter #[derive(Debug, Eq, PartialEq, Hash, Default, Clone)] struct StringSyntax { min_length: usize, max_length: usize, patterns: Vec<String>, // must match one of enumerations: Vec<StringEnumeration>, // string - enumeration type pathref: Option<PathRef>, } impl StringSyntax { fn set_min_length(&mut self, min_length: usize) { self.min_length = min_length; } fn set_max_length(&mut self, max_length: usize) { self.max_length = max_length; } fn push_pattern(&mut self, new_pattern: String) { self.patterns.push(new_pattern); } fn push_enumeration(&mut self, value: String, access: String, optional: bool) { self .enumerations .push(StringEnumeration::new(value, access, optional)); } } /// Contains information on the syntax of a parameter #[derive(Debug, Eq, PartialEq, Hash, Default, Clone)] struct BooleanSyntax { default: bool, } impl BooleanSyntax { fn set_default(&mut self, default: bool) { self.default = default; } } #[derive(Debug, Eq, PartialEq, Hash, Default, Clone)] struct IntSyntax<T> { min_inclusive: T, max_inclusive: T, unit: String, } impl<T> IntSyntax<T> { fn set_min_inclusive(&mut self, min_inclusive: T) { self.min_inclusive = min_inclusive; } fn set_max_inclusive(&mut self, max_inclusive: T) { self.max_inclusive = max_inclusive; } fn set_unit(&mut self, unit: String) { self.unit = unit; } } #[derive(Debug, Eq, PartialEq, Hash, Default, Clone)] struct HexSyntax { min_length: usize, max_length: usize, } impl HexSyntax { fn set_min_length(&mut self, min_length: usize) { self.min_length = min_length; } fn set_max_length(&mut self, max_length: usize) { self.max_length = max_length; } } #[derive(Debug, Eq, PartialEq, Hash, Default, Clone)] struct Base64Syntax { min_length: usize, max_length: usize, } impl Base64Syntax { fn set_min_length(&mut self, min_length: usize) { self.min_length = min_length; } fn set_max_length(&mut self, max_length: usize) { self.max_length = max_length; } } #[derive(Debug, Eq, PartialEq, Hash, Clone)] enum ParameterSyntax { None, String(StringSyntax), HexBinary(HexSyntax), Base64(Base64Syntax), DateTime, Integer(IntSyntax<i32>), UnsignedInt(IntSyntax<u32>), Long(IntSyntax<i64>), UnsignedLong(IntSyntax<u64>), Boolean(BooleanSyntax), } #[derive(Debug, Eq, PartialEq, Hash, Clone)] enum ParseState { None, InString, InBoolean, } #[derive(Debug, Eq, PartialEq, Hash, Clone)] struct DataType { name: Option<String>, description: Option<String>, is_list: bool, list_min: usize, list_max: usize, syntax: ParameterSyntax, } impl DataType { fn new(name: Option<String>, description: Option<String>) -> Self { DataType { name: name, description: description, syntax: ParameterSyntax::None, is_list: false, list_min: 0, list_max: 0, } } fn set_description(&mut self, description: Option<String>) { self.description = description; } fn set_name(&mut self, name: String) { self.name = Some(name); } fn get_name(&mut self) -> Option<String> { return self.name.clone(); } fn set_list(&mut self, list: bool) { self.is_list = list; } fn set_list_min_items(&mut self, min: usize) { self.list_min = min; } fn set_list_max_items(&mut self, max: usize) { self.list_max = max; } fn set_syntax(&mut self, syntax: ParameterSyntax) { self.syntax = syntax; } fn get_syntax(&mut self) -> ParameterSyntax { return self.syntax.clone(); } } /// Contains the parameter attributes from the xml spec #[derive(Debug, Eq, PartialEq, Hash, Clone)] pub struct ParameterAttributes { access: Option<String>, version: Option<String>, active_notify: Option<String>, description: Option<String>, forced_inform: bool, is_list: bool, list_min: usize, list_max: usize, syntax: ParameterSyntax, } impl ParameterAttributes { /// Used to create a new ParameterAttribute struct fn new(access: Option<String>, version: Option<String>, active_notify: Option<String>) -> Self { ParameterAttributes { access: access, version: version, active_notify: active_notify, description: None, syntax: ParameterSyntax::None, forced_inform: false, is_list: false, list_min: 0, list_max: 0, } } /// Used when a description for a parameter is found in the spec fn set_description(&mut self, description: Option<String>) { self.description = description; } /// used when a syntax has been compiled from the spec to attach it to the /// attribute for which it was collected fn set_syntax(&mut self, syntax: ParameterSyntax) { self.syntax = syntax; } fn set_forced_inform(&mut self, forced_inform: bool) { self.forced_inform = forced_inform; } /// used when a "list" tag is found within a syntax tag fn set_list(&mut self, list: bool) { self.is_list = list; } fn set_list_min_items(&mut self, min: usize) { self.list_min = min; } fn set_list_max_items(&mut self, max: usize) { self.list_max = max; } fn set_access(&mut self, access: String) { self.access = Some(access); } fn set_version(&mut self, version: String) { self.version = Some(version); } fn set_active_notify(&mut self, active_notify: String) { self.active_notify = Some(active_notify); } } /// Used to parse the specification given by the filename /// Returns a parsed set of ParameterAttributes. ie. /// "model_version", HashMap<"parametername", ParameterAttributes> pub fn parse_spec( spec_filename: String, ) -> Result<HashMap<String, HashMap<String, ParameterAttributes>>, Box<dyn std::error::Error>> { let file = File::open(spec_filename).unwrap(); let file_buf = BufReader::new(file); let mut result: HashMap<String, HashMap<String, ParameterAttributes>> = HashMap::new(); let mut data_types: HashMap<String, DataType> = HashMap::new(); let parser = EventReader::new(file_buf); let mut current_model_name: Option<String> = None; let mut current_object_name: Option<String> = None; let mut current_parameter_name: Option<String> = None; let mut current_syntax: ParameterSyntax = ParameterSyntax::None; let mut current_parameter_attributes: ParameterAttributes = ParameterAttributes::new(None, None, None); let mut current_string_syntax: StringSyntax = StringSyntax::default(); let mut current_boolean_syntax: BooleanSyntax = BooleanSyntax::default(); let mut state = ParseState::None; let mut path = vec![]; let mut current_datatype = DataType::new(None, None); for e in parser { match e { Ok(XmlEvent::StartElement { ref name, ref attributes, .. }) => { path.push(name.local_name.to_string()); let path_pattern: Vec<&str> = path.iter().map(AsRef::as_ref).collect(); match &path_pattern[..] { ["document", "dataType"] => { current_datatype = DataType::new(None, None); current_datatype.syntax = ParameterSyntax::None; if let Some(datatype_name) = extract_attribute(attributes, "name") { current_datatype.set_name(datatype_name); } if let Some(datatype_base) = extract_attribute(attributes, "base") { // use syntax from base // find base dataType and grab the syntax from there if data_types.contains_key(&datatype_base) { if let Some(base_type) = data_types.get(&datatype_base) { let syn = base_type.syntax.clone(); current_datatype.set_syntax(syn); } } } } ["document", "dataType", "list"] => { current_datatype.set_list(true); if let Some(min_items) = extract_attribute(attributes, "minItems") { current_datatype.set_list_min_items(min_items.parse::<usize>().unwrap()); } if let Some(max_items) = extract_attribute(attributes, "maxItems") { current_datatype.set_list_max_items(max_items.parse::<usize>().unwrap()); } } ["document", "dataType", "dateTime"] => { current_datatype.syntax = ParameterSyntax::DateTime; } ["document", "dataType", "long"] => { current_datatype.syntax = ParameterSyntax::Long(IntSyntax::<i64>::default()); } ["document", "dataType", "unsignedLong"] => { current_datatype.syntax = ParameterSyntax::UnsignedLong(IntSyntax::<u64>::default()); } ["document", "dataType", "int"] => { current_datatype.syntax = ParameterSyntax::Integer(IntSyntax::<i32>::default()); } ["document", "dataType", "unsignedInt"] => { current_datatype.syntax = ParameterSyntax::UnsignedInt(IntSyntax::<u32>::default()); } ["document", "dataType", "string"] => { current_string_syntax = StringSyntax::default(); state = ParseState::InString; } ["document", "dataType", "string", "size"] => { if let Some(min_length) = extract_attribute(attributes, "minLength") { if state == ParseState::InString { current_string_syntax.set_min_length(min_length.parse::<usize>().unwrap()); } } if let Some(max_length) = extract_attribute(attributes, "maxLength") { if state == ParseState::InString { current_string_syntax.set_max_length(max_length.parse::<usize>().unwrap()); } } } ["document", "dataType", "string", "pattern"] => { if let Some(pattern) = extract_attribute(attributes, "value") { if state == ParseState::InString { current_string_syntax.push_pattern(pattern); } } } ["document", "dataType", "string", "enumeration"] => { if let Some(enum_value) = extract_attribute(attributes, "value") { let a = extract_attribute(attributes, "access"); let access = if a.is_some() { a.unwrap() } else { "readWrite".to_string() }; let o = extract_attribute(attributes, "optional"); let optional = if o.is_some() { o.unwrap() == "true".to_string() } else { false }; current_string_syntax.push_enumeration(enum_value, access, optional); } } ["document", "dataType", "size"] => { // override of existing string syntax if let Some(min_length) = extract_attribute(attributes, "minLength") { let cur_syn = current_datatype.get_syntax(); match cur_syn { ParameterSyntax::String(css) => { let mut new_css = css.clone(); new_css.set_min_length(min_length.parse::<usize>().unwrap()); current_datatype.set_syntax(ParameterSyntax::String(new_css)); } _ => {} } } if let Some(max_length) = extract_attribute(attributes, "maxLength") { let cur_syn = current_datatype.get_syntax(); match cur_syn { ParameterSyntax::String(css) => { let mut new_css = css.clone(); new_css.set_max_length(max_length.parse::<usize>().unwrap()); current_datatype.set_syntax(ParameterSyntax::String(new_css)); } _ => {} } } } ["document", "dataType", "pattern"] => { // override of existing string patterns, just add this to the existing list of patterns if let Some(pattern) = extract_attribute(attributes, "value") { let cur_syn = current_datatype.get_syntax(); match cur_syn { ParameterSyntax::String(css) => { let mut new_css = css.clone(); new_css.push_pattern(pattern); current_datatype.set_syntax(ParameterSyntax::String(new_css)); } _ => {} } } } ["document", "model"] => { if let Some(current_model) = extract_attribute(attributes, "name") { current_model_name = Some(current_model.clone()); result.insert(current_model, HashMap::new()); } } ["document", "model", "object"] => { current_object_name = extract_attribute(attributes, "name"); } ["document", "model", "object", "parameter"] => { current_parameter_attributes = ParameterAttributes::new(None, None, None); // now extract the attribute values if let Some(parameter_name) = extract_attribute(attributes, "name") { current_parameter_name = Some(format!( "{}{}", current_object_name.clone().unwrap(), parameter_name )); } if let Some(parameter_access) = extract_attribute(attributes, "access") { current_parameter_attributes.set_access(parameter_access); } if let Some(parameter_version) = extract_attribute(attributes, "version") { current_parameter_attributes.set_version(parameter_version); } if let Some(parameter_active_notify) = extract_attribute(attributes, "active_notify") { current_parameter_attributes.set_active_notify(parameter_active_notify); } if let Some(forced_inform) = extract_attribute(attributes, "forcedInform") { current_parameter_attributes.set_forced_inform(forced_inform == String::from("true")); } } ["document", "model", "object", "parameter", "syntax"] => { current_syntax = ParameterSyntax::None; } ["document", "model", "object", "parameter", "syntax", "dateTime"] => { current_syntax = ParameterSyntax::DateTime; } ["document", "model", "object", "parameter", "syntax", "long"] => { current_syntax = ParameterSyntax::Long(IntSyntax::<i64>::default()); } ["document", "model", "object", "parameter", "syntax", "unsignedLong"] => { current_syntax = ParameterSyntax::UnsignedLong(IntSyntax::<u64>::default()); } ["document", "model", "object", "parameter", "syntax", "int"] => { current_syntax = ParameterSyntax::Integer(IntSyntax::<i32>::default()); } ["document", "model", "object", "parameter", "syntax", "unsignedInt"] => { current_syntax = ParameterSyntax::UnsignedInt(IntSyntax::<u32>::default()); } ["document", "model", "object", "parameter", "syntax", "int", "range"] => { let mut int_syn = IntSyntax::<i32>::default(); if let Some(min) = extract_attribute(attributes, "minInclusive") { int_syn.set_min_inclusive(min.parse::<i32>().unwrap()); } if let Some(max) = extract_attribute(attributes, "maxInclusive") { int_syn.set_max_inclusive(max.parse::<i32>().unwrap()); } current_syntax = ParameterSyntax::Integer(int_syn); } ["document", "model", "object", "parameter", "syntax", "long", "range"] => { match current_syntax { ParameterSyntax::Long(long_syntax) => { let mut long_syn = long_syntax.clone(); if let Some(min) = extract_attribute(attributes, "minInclusive") { long_syn.set_min_inclusive(min.parse::<i64>().unwrap()); } if let Some(max) = extract_attribute(attributes, "maxInclusive") { long_syn.set_max_inclusive(max.parse::<i64>().unwrap()); } current_syntax = ParameterSyntax::Long(long_syn); } _ => {} } } ["document", "model", "object", "parameter", "syntax", "long", "units"] => { match current_syntax { ParameterSyntax::Long(long_syntax) => { let mut long_syn = long_syntax.clone(); if let Some(unit_name) = extract_attribute(attributes, "value") { long_syn.set_unit(unit_name); } current_syntax = ParameterSyntax::Long(long_syn); } _ => {} } } ["document", "model", "object", "parameter", "syntax", "hexBinary"] => { current_syntax = ParameterSyntax::HexBinary(HexSyntax::default()); } ["document", "model", "object", "parameter", "syntax", "hexBinary", "size"] => { let mut hex_syn = HexSyntax::default(); if let Some(min) = extract_attribute(attributes, "minLength") { hex_syn.set_min_length(min.parse::<usize>().unwrap()); } if let Some(max) = extract_attribute(attributes, "maxLength") { hex_syn.set_max_length(max.parse::<usize>().unwrap()); } current_syntax = ParameterSyntax::HexBinary(hex_syn); } ["document", "model", "object", "parameter", "syntax", "base64"] => { current_syntax = ParameterSyntax::Base64(Base64Syntax::default()); } ["document", "model", "object", "parameter", "syntax", "base64", "size"] => { let mut b64_syn = Base64Syntax::default(); if let Some(min) = extract_attribute(attributes, "minLength") { b64_syn.set_min_length(min.parse::<usize>().unwrap()); } if let Some(max) = extract_attribute(attributes, "maxLength") { b64_syn.set_max_length(max.parse::<usize>().unwrap()); } current_syntax = ParameterSyntax::Base64(b64_syn); } ["document", "model", "object", "parameter", "syntax", "boolean"] => { current_boolean_syntax = BooleanSyntax::default(); state = ParseState::InBoolean; } ["document", "model", "object", "parameter", "syntax", "default"] => { if let Some(val) = extract_attribute(attributes, "value") { if state == ParseState::InBoolean { current_boolean_syntax.set_default(val == "true"); } } } ["document", "model", "object", "parameter", "syntax", "string"] => { current_string_syntax = StringSyntax::default(); state = ParseState::InString; } ["document", "model", "object", "parameter", "syntax", "string", "size"] => { if let Some(min_length) = extract_attribute(attributes, "minLength") { if state == ParseState::InString { current_string_syntax.set_min_length(min_length.parse::<usize>().unwrap()); } } if let Some(max_length) = extract_attribute(attributes, "maxLength") { if state == ParseState::InString { current_string_syntax.set_max_length(max_length.parse::<usize>().unwrap()); } } } ["document", "model", "object", "parameter", "syntax", "string", "pattern"] => { if let Some(pattern) = extract_attribute(attributes, "value") { if state == ParseState::InString { current_string_syntax.push_pattern(pattern); } } } ["document", "model", "object", "parameter", "syntax", "string", "enumeration"] => { if let Some(enum_value) = extract_attribute(attributes, "value") { let a = extract_attribute(attributes, "access"); let access = if a.is_some() { a.unwrap() } else { "readWrite".to_string() }; let o = extract_attribute(attributes, "optional"); let optional = if o.is_some() { o.unwrap() == "true".to_string() } else { false }; current_string_syntax.push_enumeration(enum_value, access, optional); } } ["document", "model", "object", "parameter", "syntax", "list"] => { current_parameter_attributes.set_list(true); if let Some(min_items) = extract_attribute(attributes, "minItems") { current_parameter_attributes.set_list_min_items(min_items.parse::<usize>().unwrap()); } if let Some(max_items) = extract_attribute(attributes, "maxItems") { current_parameter_attributes.set_list_max_items(max_items.parse::<usize>().unwrap()); } } ["document", "model", "object", "parameter", "syntax", "dataType"] => { // a reference to a dataType entry if let Some(r) = extract_attribute(attributes, "ref") { if data_types.contains_key(&r) { if let Some(base_type) = data_types.get(&r) { current_syntax = base_type.syntax.clone(); } } } } _ => {} } } Ok(XmlEvent::EndElement { name: _ }) => { let path_pattern: Vec<&str> = path.iter().map(AsRef::as_ref).collect(); match &path_pattern[..] { ["document", "dataType"] => { if state == ParseState::InString { current_datatype.syntax = ParameterSyntax::String(current_string_syntax.clone()); state = ParseState::None; } // we are done collecting elements for the current_datatype, now // stuff it into the data_types HashMap data_types.insert( current_datatype.get_name().unwrap(), current_datatype.clone(), ); } ["document", "model", "object", "parameter"] => { if let Some(m) = result.get_mut(&current_model_name.clone().unwrap()) { if let Some(pn) = current_parameter_name { // println!("matched pn {:?}", current_parameter_attributes); m.insert(pn, current_parameter_attributes.clone()); } } current_parameter_name = None; } ["document", "model"] => { current_model_name = None; } ["document", "model", "object", "parameter", "syntax"] => { current_syntax = match state { ParseState::InBoolean => ParameterSyntax::Boolean(current_boolean_syntax.clone()), ParseState::InString => ParameterSyntax::String(current_string_syntax.clone()), _ => current_syntax, }; current_parameter_attributes.set_syntax(current_syntax.clone()); state = ParseState::None; } _ => {} } path.pop(); } Ok(XmlEvent::Characters(ref s)) => { let path_pattern: Vec<&str> = path.iter().map(AsRef::as_ref).collect(); match &path_pattern[..] { ["document", "dataType", "description"] => { current_datatype.set_description(Some(s.to_string())); } ["document", "model", "object", "parameter", "description"] => { current_parameter_attributes.set_description(Some(s.to_string())); } _ => {} } } Err(e) => { println!("XML parse error: {}", e); break; } _ => {} } } Ok(result) } /// Extracts the value of a given attributes fn extract_attribute( attributes: &Vec<xml::attribute::OwnedAttribute>, attrib_name: &str, ) -> Option<String> { let f = attributes .iter() .filter(|&x| x.name.local_name == attrib_name) .next(); match f { Some(e) => Some(e.value.to_string()), None => None, } } #[cfg(test)] #[macro_use] extern crate lazy_static; #[cfg(test)] mod tests { use super::*; lazy_static! { static ref SPEC: HashMap<String, HashMap<String, ParameterAttributes>> = parse_spec("./tr-181-2-13-0-cwmp-full.xml".to_string()).unwrap(); } #[test] fn parse_string_list_1() { assert_eq!(SPEC.len(), 1); // check specific syntaxes, as many as you have energy for... assert_eq!(SPEC.contains_key("Device:2.13"), true); let a = SPEC.get("Device:2.13"); assert_eq!(a.is_some(), true); if let Some(attr) = a { assert_eq!(attr.keys().len(), 4525); // start asserting syntaxes assert!(attr.contains_key("Device.DeviceInfo.DeviceCategory")); if let Some(dc) = attr.get("Device.DeviceInfo.DeviceCategory") { assert!(dc.is_list); assert_eq!(dc.syntax, ParameterSyntax::String(StringSyntax::default())); } } } #[test] fn basetype_reference_parsed_1() { let mut vca_ssyn = StringSyntax::default(); vca_ssyn.set_max_length(64); perform_test( "Device.DeviceInfo.VendorConfigFile.{i}.Alias", ParameterSyntax::String(vca_ssyn), ); } #[test] fn string_enumeration_parsed_1() { let mut exp_tssyn = StringSyntax::default(); exp_tssyn.push_enumeration("Disabled".to_string(), "readWrite".to_string(), false); exp_tssyn.push_enumeration("Enabled".to_string(), "readWrite".to_string(), false); exp_tssyn.push_enumeration("Error".to_string(), "readWrite".to_string(), false); perform_test( "Device.DeviceInfo.TemperatureStatus.TemperatureSensor.{i}.Status", ParameterSyntax::String(exp_tssyn), ); } #[test] fn boolean_parsed_1() { perform_test( "Device.Security.Certificate.{i}.Enable", ParameterSyntax::Boolean(BooleanSyntax::default()), ); } #[test] fn long_parsed_1() { let mut exp_syn = IntSyntax::<i64>::default(); exp_syn.set_min_inclusive(-1); exp_syn.set_unit("seconds".to_string()); perform_test( "Device.XMPP.Connection.{i}.KeepAliveInterval", ParameterSyntax::Long(exp_syn), ); } #[test] fn ipaddress_parsed_1() { // an IP address, ie. a string with a specific length and pattern let mut exp_ssyn = StringSyntax::default(); exp_ssyn.push_pattern("".to_string()); exp_ssyn.push_pattern( "((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])" .to_string(), ); exp_ssyn.set_max_length(15); perform_test( "Device.DHCPv4.Server.Pool.{i}.MinAddress", ParameterSyntax::String(exp_ssyn), ); } fn perform_test(param_name: &str, expected_syntax: ParameterSyntax) { let a = SPEC.get("Device:2.13"); if let Some(attr) = a { assert!(attr.contains_key(&param_name.to_string())); if let Some(pa) = attr.get(&param_name.to_string()) { assert_eq!(pa.syntax, expected_syntax); } } } }
// file: max_parabole.rs // // Copyright 2015-2017 The RsGenetic Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! This simple example shows how to use a simulator //! that finds the maximum of the function f(x) = 10-(x+3)^2 (which is (-3,10)). extern crate rand; extern crate rsgenetic; use rsgenetic::sim::*; use rsgenetic::sim::seq::Simulator; use rsgenetic::sim::select::*; use rsgenetic::pheno::*; use rand::distributions::{IndependentSample, Range}; use std::cmp::Ordering; struct MyFitness { f: f64, } impl Eq for MyFitness {} impl PartialEq for MyFitness { fn eq(&self, other: &MyFitness) -> bool { (self.f - other.f).abs() < 0.0001 } } impl PartialOrd for MyFitness { fn partial_cmp(&self, other: &MyFitness) -> Option<Ordering> { self.f.partial_cmp(&other.f) } } impl Ord for MyFitness { fn cmp(&self, other: &MyFitness) -> Ordering { self.partial_cmp(other).unwrap_or(Ordering::Equal) } } impl Fitness for MyFitness { fn zero() -> MyFitness { MyFitness { f: 0.0 } } fn abs_diff(&self, other: &MyFitness) -> MyFitness { MyFitness { f: (self.f - other.f).abs(), } } } struct MyData { x: f64, } impl Phenotype<MyFitness> for MyData { fn fitness(&self) -> MyFitness { // Calculate the function here, because it's what we wish to maximize. MyFitness { f: 10.0 - ((self.x + 3.0) * (self.x + 3.0)), } } fn crossover(&self, other: &MyData) -> MyData { // We take the average for crossover. MyData { x: (self.x + other.x) / 2.0, } } fn mutate(&self) -> MyData { // Shift x with a random number. // (This RNG code should reside somewhere else, not in this function, but it's just an // example). // Because we don't want to have too big mutations, we limit the range to -1, +1. // Smaller values can cause slower convergence, but larger values may cause completely // wrong values. let between = Range::new(-1.0, 1.0); let mut rng = rand::thread_rng(); let offset = between.ind_sample(&mut rng); MyData { x: self.x + offset } } } impl Clone for MyData { fn clone(&self) -> MyData { MyData { x: self.x } } } fn main() { let mut population = (-300..300).map(|i| MyData { x: f64::from(i) }).collect(); let mut s = Simulator::builder(&mut population) .set_selector(Box::new(StochasticSelector::new(10))) .set_max_iters(50) .build(); s.run(); let result = s.get().unwrap(); let time = s.time(); println!("Execution time: {} ns.", time.unwrap()); println!("Expected result: (-3, 10)."); println!("Result: ({}, {}).", result.x, result.fitness().f); }
use std; use std::slice; use std::ptr; use std::marker::PhantomData; use std::cmp; use std::io::Cursor; use std::collections::VecDeque; use bytes::Buf; use bytes::BufMut; use bytes::BigEndian; use bytes::ByteOrder; use bincode; use mio; use serde::Serialize; use serde::de::DeserializeOwned; use iovec::IoVec; const DEFAULT_CAP: usize = 1024; const MIN_CAP: usize = 32; const IOVEC_MAX_LEN: usize = 128; /// IO Buffer implementing `bytes::{Buf,BufMut}`. #[derive(Default)] pub struct IoBuffer { inner: VecDeque<Cursor<Vec<u8>>>, /// amount of data in the buffer remaining: usize, } impl IoBuffer { pub fn new() -> Self { IoBuffer { inner: VecDeque::new(), remaining: 0, } } pub fn with_capacity(cap: usize) -> Self { let mut cb = IoBuffer::new(); cb.reserve(cap); cb } pub fn reserve(&mut self, cnt: usize) { // mutually exclusive let mut alloc = false; let mut resize = false; if let Some(cur) = self.inner.back_mut() { if cur.get_mut().capacity() - cur.get_mut().len() < cnt { if cur.get_mut().is_empty() { resize = true; } else { alloc = true; } } } else { alloc = true; } if alloc { // add a new vec let mut size = MIN_CAP; while size < cnt { size <<= 1; } let new = Cursor::new(Vec::with_capacity(size)); self.inner.push_back(new); } else if resize { // resize last vec let mut size = MIN_CAP; while size < cnt { size <<= 1; } self.inner.back_mut().unwrap().get_mut().reserve(size); } } pub fn len(&self) -> usize { self.remaining } pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn writev_to(&self, stream: &mio::net::TcpStream) -> std::io::Result<usize> { let mut iovecs: [&IoVec; IOVEC_MAX_LEN] = unsafe { std::mem::uninitialized() }; let cnt = cmp::min(IOVEC_MAX_LEN, self.inner.len()); for i in 0 .. cnt { iovecs[i] = self.inner[i].bytes().into(); } stream.write_bufs(&iovecs[.. cnt]) } /// Peeks the next cnt bytes if available. It doesn't modify the /// buffer, but may need to create a temporary vec if the /// data is not contiguous. pub fn with_peek<F, T>(&mut self, cnt: usize, mut f: F) -> T where F: FnMut(Option<&[u8]>) -> T, { if self.remaining < cnt { // not enough available data f(None) } else { // available contiguous data if let Some(cur) = self.inner.front() { if cur.remaining() >= cnt { let pos = cur.position() as usize; return f(Some(&cur.get_ref()[pos..pos+cnt])); } } // data available but not contiguous if cnt <= 32 { // avoid allocation if peek is small let mut tmp = [0; 32]; let tmp_ptr = tmp.as_mut_ptr(); let mut off: isize = 0; let mut curidx: usize = 0; while (off as usize) < cnt { let cur = &mut self.inner[curidx]; let pos = cur.position() as isize; let ptr = cur.get_mut().as_mut_ptr(); unsafe { let to_copy = cmp::min(cnt - off as usize, cur.remaining()); ptr::copy_nonoverlapping(ptr.offset(pos), tmp_ptr.offset(off), to_copy); off += to_copy as isize; curidx += 1; } } f(Some(&tmp[..cnt])) } else { let mut tmp = Vec::with_capacity(cnt); let tmp_ptr: *mut u8 = tmp.as_mut_ptr(); let mut off: isize = 0; let mut curidx: usize = 0; while (off as usize) < cnt { let cur = &mut self.inner[curidx]; let pos = cur.position() as isize; let ptr: *const u8 = cur.get_mut().as_ptr(); unsafe { let to_copy = cmp::min(cnt - off as usize, cur.remaining()); ptr::copy_nonoverlapping(ptr.offset(pos), tmp_ptr.offset(off), to_copy); off += to_copy as isize; curidx += 1; } } f(Some(&tmp[..cnt])) } } } pub fn put_frame_bincode<M: Serialize>(&mut self, msg: &M) -> Result<(), bincode::Error> { let size = bincode::serialized_size(msg)? as usize; // write the length header self.put_u32::<BigEndian>(size as u32); bincode::serialize_into(&mut self.writer(), msg)?; Ok(()) } pub fn drain_frames_bincode<M: DeserializeOwned>(&mut self) -> BincodeFrameIterator<M> { BincodeFrameIterator { inner: self, phantom: PhantomData, } } } pub struct BincodeFrameIterator<'a, M: DeserializeOwned> { inner: &'a mut IoBuffer, phantom: PhantomData<M>, } impl<'a, M: DeserializeOwned> Iterator for BincodeFrameIterator<'a, M> { type Item = Result<M, bincode::Error>; fn next(&mut self) -> Option<Self::Item> { // do we have a header? let hdr = self.inner.with_peek(4, |hdr| { hdr.map(|bytes| { BigEndian::read_u32(bytes) as usize }) }); if let Some(size) = hdr { if self.inner.remaining() >= 4 + size { self.inner.advance(4); return Some(bincode::deserialize_from(&mut self.inner.reader())); } } None } } impl<'a, M: DeserializeOwned> Drop for BincodeFrameIterator<'a, M> { fn drop(&mut self) { while let Some(_) = self.next() {} } } impl BufMut for IoBuffer { fn remaining_mut(&self) -> usize { std::usize::MAX } unsafe fn advance_mut(&mut self, cnt: usize) { // advancing more than what can be written in the last vec does not make sense if let Some(cur) = self.inner.back_mut() { let curvec = cur.get_mut(); let len = curvec.len(); let remaining_mut = curvec.capacity() - len; assert!(remaining_mut >= cnt); curvec.set_len(len + cnt); self.remaining += cnt; } else if cnt > 0 { panic!("not enough remaining_mut"); } } unsafe fn bytes_mut(&mut self) -> &mut [u8] { let mut alloc = false; if let Some(cur) = self.inner.back_mut() { if cur.get_mut().capacity() == cur.get_mut().len() { alloc = true; } } else { alloc = true; } if alloc { let new = Cursor::new(Vec::with_capacity(DEFAULT_CAP)); self.inner.push_back(new); } let backvec = self.inner.back_mut().unwrap().get_mut(); let len = backvec.len(); let cap = backvec.capacity(); let ptr = backvec.as_mut_ptr(); let slice = slice::from_raw_parts_mut(ptr, cap); &mut slice[len .. cap] } } impl Buf for IoBuffer { fn remaining(&self) -> usize { self.remaining } fn bytes(&self) -> &[u8] { if let Some(cur) = self.inner.front() { cur.bytes() } else { std::default::Default::default() // empty slice } } fn advance(&mut self, cnt: usize) { assert!(cnt <= self.remaining); self.remaining -= cnt; let mut adv = 0; while adv < cnt { let mut cur = self.inner.pop_front().unwrap(); let to_adv = cmp::min(cur.remaining(), cnt - adv); cur.advance(to_adv); adv += to_adv; if cur.remaining() > 0 { assert!(adv == cnt); self.inner.push_front(cur); } } } } #[cfg(test)] mod tests { use super::*; use bytes::BigEndian; #[test] fn buffer_new() { let mut cb = IoBuffer::new(); assert_eq!(cb.remaining(), 0); unsafe { assert_eq!(cb.bytes_mut().len(), DEFAULT_CAP); } let mut cb = IoBuffer::with_capacity(100); assert_eq!(cb.remaining(), 0); unsafe { assert_eq!(cb.bytes_mut().len(), 128); } } #[test] fn buffer_put() { let mut cb = IoBuffer::new(); cb.put_i64::<BigEndian>(42); cb.put_i64::<BigEndian>(123); cb.put_i64::<BigEndian>(-21444); cb.put_i64::<BigEndian>(88); assert_eq!(cb.remaining(), 8*4); unsafe { assert_eq!(cb.bytes_mut().len(), DEFAULT_CAP - 8*4); } assert_eq!(cb.bytes().len(), 8*4); assert_eq!(cb.get_i64::<BigEndian>(), 42); assert_eq!(cb.get_i64::<BigEndian>(), 123); assert_eq!(cb.bytes().len(), 8*2); assert_eq!(cb.get_i64::<BigEndian>(), -21444); assert_eq!(cb.get_i64::<BigEndian>(), 88); assert_eq!(cb.bytes().len(), 0); unsafe { assert_eq!(cb.bytes_mut().len(), DEFAULT_CAP); } } #[test] fn buffer_peek() { let mut cb = IoBuffer::with_capacity(10); cb.with_peek(0, |buf| { assert!(buf.is_some()); }); cb.with_peek(1, |buf| { assert!(buf.is_none()); }); cb.put_u64::<BigEndian>(0x1122334455667788); cb.with_peek(8, |buf| { let bytes = buf.unwrap(); assert_eq!(BigEndian::read_u64(bytes), 0x1122334455667788); }); cb.with_peek(9, |buf| { assert!(buf.is_none()); }); assert_eq!(cb.get_u32::<BigEndian>(), 0x11223344); // read 4 assert_eq!(cb.get_u16::<BigEndian>(), 0x5566); // read 2 cb.with_peek(2, |buf| { let bytes = buf.unwrap(); assert_eq!(BigEndian::read_u16(bytes), 0x7788); }); cb.with_peek(3, |buf| { assert!(buf.is_none()); }); cb.put_u64::<BigEndian>(0x1122334455667788); // put 8 => buffer full and starting at 6 cb.with_peek(10, |buf| { let bytes = buf.unwrap(); assert_eq!(BigEndian::read_u16(&bytes[..2]), 0x7788); assert_eq!(BigEndian::read_u64(&bytes[2..10]), 0x1122334455667788); }); } #[test] fn buffer_reserve() { let mut cb = IoBuffer::new(); unsafe { cb.reserve(1); assert_eq!(cb.bytes_mut().len(), 32); cb.reserve(32); assert_eq!(cb.bytes_mut().len(), 32); cb.reserve(33); assert_eq!(cb.bytes_mut().len(), 64); } } #[test] fn buffer_bincode() { let mut cb = IoBuffer::with_capacity(3); let msg0 = (1u64, Some(true), "foobar".to_owned()); let msg1 = (1234u64, Some(false), "bladsf lakds jfkjsa kfdjds".to_owned()); cb.put_frame_bincode(&msg0).unwrap(); cb.put_frame_bincode(&msg1).unwrap(); let msgs: Vec<Result<(u64, Option<bool>, String), bincode::Error>> = cb.drain_frames_bincode().collect(); assert_eq!(msgs[0].as_ref().expect("deserialization failure"), &msg0); assert_eq!(msgs[1].as_ref().expect("deserialization failure"), &msg1); } }
/* This is part of mktcb - which is under the MIT License ********************/ use snafu::{ResultExt, OptionExt}; use std::path::PathBuf; use crate::error::Result; use crate::error; use log::*; /// Retrieve the last path component of an URL, as a PathBuf pub fn url_last(url: &url::Url) -> Result<PathBuf> { let filename = url.path_segments() .context(error::URLExtractError{url: url.clone()})? .last() .context(error::URLExtractError{url: url.clone()})?; Ok(std::path::PathBuf::from(filename)) } pub fn copy_config(opt_cfg: &Option<PathBuf>, build_dir: &PathBuf) -> Result<()> { // Let create the build directory. We will need it anyway. std::fs::create_dir_all(build_dir).context( error::CreateDirError{ path: build_dir.clone() })?; if let Some(cfg) = opt_cfg { let mut build_cfg = build_dir.clone(); build_cfg.push(".config"); info!("Copying configuration {:#?} to {:#?}", cfg, build_cfg); std::fs::copy(cfg, &build_cfg).context(error::CopyFailed{ from: cfg.clone(), to: build_cfg, })?; } else { debug!("No configuration selected"); } Ok(()) } pub fn getenv(var: &str) -> Result<String> { std::env::var(var).context(error::MaintainerError{ var: var.to_string() }) } pub fn read_file(path: &std::path::PathBuf) -> Result<String> { let contents = std::fs::read(&path).context( error::FailedToReadVersion { path: path.clone() } )?; let mut data = std::string::String::from_utf8(contents) .context(error::FailedToDecodeUTF8{})?; // Right-trim the string from any whitespaces (including newlines) while let Some(idx) = data.rfind(char::is_whitespace) { data.truncate(idx); } Ok(data) }
pub fn nyt_archive_urls() -> Vec<String> { let mut urls = vec![]; for i in 1853..2019 { for j in 1..=12 { let url = format!( "https://api.nytimes.com/svc/archive/v1/{}/{}.json?api-key={}", i, j, crate::keys::NYT_KEY.to_string() ); urls.push(url); } } urls } // https://api.nytimes.com/svc/news/v3/content/all/all.json #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct NYTFeed { pub status: String, pub copyright: Option<String>, #[serde(rename = "num_results")] pub num_results: i64, pub results: Vec<NYTFeedArticle>, } impl crate::HasRecs for NYTFeed { fn to_recs(&self) -> Vec<Vec<String>> { let mut recs: Vec<Vec<String>> = Vec::new(); for article in self.results.iter() { recs.push(NYTFeedArticle::to_rec(article)); } return recs; } } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct NYTFeedArticle { #[serde(rename = "slug_name")] pub slug_name: String, pub section: String, pub subsection: String, pub title: String, #[serde(rename = "abstract")] pub abstract_field: String, pub url: String, pub byline: String, #[serde(rename = "item_type")] pub item_type: String, pub source: String, #[serde(rename = "updated_date")] pub updated_date: String, #[serde(rename = "created_date")] pub created_date: String, #[serde(rename = "published_date")] pub published_date: String, #[serde(rename = "first_published_date")] pub first_published_date: String, #[serde(rename = "material_type_facet")] pub material_type_facet: String, pub kicker: String, pub subheadline: String, #[serde(rename = "des_facet")] #[serde(default)] pub des_facet: Option<Vec<String>>, #[serde(rename = "org_facet")] #[serde(default)] pub org_facet: Option<Vec<String>>, #[serde(rename = "per_facet")] #[serde(default)] pub per_facet: Option<Vec<String>>, #[serde(rename = "geo_facet")] #[serde(default)] pub geo_facet: Option<Vec<String>>, #[serde(rename = "related_urls")] pub related_urls: ::serde_json::Value, pub multimedia: Option<Vec<NYTFeedMultimedia>>, #[serde(rename = "thumbnail_standard")] pub thumbnail_standard: Option<String>, } impl NYTFeedArticle { pub fn to_rec(&self) -> Vec<String> { //limiting 1 for tags //let thumbnail_url = utils::lilmatcher(self.thumbnail_standard.clone()); let rec: Vec<String> = vec![ self.slug_name.to_string(), self.first_published_date.to_string(), self.section.to_string(), self.subsection.to_string(), self.byline.to_string(), self.title.to_string(), self.subheadline.to_string(), self.abstract_field.to_string(), self.material_type_facet.to_string(), //self.geo_facet.unwrap_or(""), //self.org_facet[0].unwrap_or(""), //self.des_facet[0].unwrap_or(""), //self.per_facet[0].unwrap_or(""), self.source.to_string(), self.published_date.to_string(), self.created_date.to_string(), self.updated_date.to_string(), self.url.to_string(), //self.thumbnail_standard.unwrap_or("").to_string(), self.kicker.to_string(), self.item_type.to_string(), ]; return rec; } } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct NYTFeedMultimedia { pub url: Option<String>, pub format: Option<String>, pub height: Option<i64>, pub width: Option<i64>, #[serde(rename = "type")] pub type_field: Option<String>, pub subtype: Option<String>, pub caption: Option<String>, pub copyright: Option<String>, } // https://api.nytimes.com/svc/archive/v1/1926/1.json //#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] //#[serde(rename_all = "camelCase")] //pub struct NYTArchive { // pub copyright: Option<String>, // pub response: NYTArchiveResponse, //} // //impl NYTArchive { // pub fn to_recs(&self) -> Vec<Vec<String>> { // let mut recs: Vec<Vec<String>> = Vec::new(); // for article in self.response.docs.iter() { // recs.push(NYTArchiveArticle::to_rec(article)); // } // return recs; // } //} // //#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] //#[serde(rename_all = "camelCase")] //pub struct NYTArchiveResponse { // pub meta: NYTArchiveMeta, // pub docs: Vec<NYTArchiveArticle>, //} // //#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] //#[serde(rename_all = "camelCase")] //pub struct NYTArchiveMeta { // pub hits: i64, //} // //#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] //#[serde(rename_all = "camelCase")] //pub struct NYTArchiveArticle { // #[serde(rename = "web_url")] // pub web_url: String, // pub snippet: Option<String>, // #[serde(rename = "lead_paragraph")] // pub lead_paragraph: Option<String>, // #[serde(rename = "abstract")] // pub abstract_field: Option<String>, // #[serde(rename = "print_page")] // pub print_page: Option<String>, // pub blog: Option<Vec<::serde_json::Value>>, // pub source: String, // pub multimedia: Vec<::serde_json::Value>, // pub headline: NYTArchiveHeadline, // pub keywords: Vec<Keyword>, // #[serde(rename = "pub_date")] // pub pub_date: String, // #[serde(rename = "document_type")] // pub document_type: String, // #[serde(rename = "news_desk")] // pub news_desk: Option<serde_json::Value>, // #[serde(rename = "section_name")] // pub section_name: Option<serde_json::Value>, // #[serde(rename = "subsection_name")] // pub subsection_name: Option<serde_json::Value>, // pub byline: Option<Byline>, // #[serde(rename = "type_of_material")] // pub type_of_material: Option<String>, // #[serde(rename = "_id")] // pub id: String, // #[serde(rename = "word_count")] // pub word_count: i64, // #[serde(rename = "slideshow_credits")] // pub slideshow_credits: Option<serde_json::Value>, //} // //impl NYTArchiveArticle { // pub fn to_rec(&self) -> Vec<String> { // // let first_name = lilmatcher(self.byline.person.firstname); // // let first_name = lilmatcher(self.byline.person.middlename); // // let first_name = lilmatcher(self.byline.person.lastname); // // let rec: Vec<String> = vec![ // self.id.to_string(), // self.word_count.to_string(), // orig.replace(",", ";").to_string(), // self.pub_date.to_string(), // self.document_type.to_string(), // page.to_string(), // self.headline.main.replace(",", ";").to_string(), // kicker.replace(",", ";").to_string(), // snip.replace(",", ";").to_string(), // abs_field.replace(",", ";").to_string(), // self.web_url.to_string(), // self.source.to_string(), // ]; // return rec; // } //} #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct NYTArchiveHeadline { pub main: String, pub kicker: Option<String>, #[serde(rename = "content_kicker")] pub content_kicker: Option<String>, #[serde(rename = "print_headline")] pub print_headline: Option<String>, pub name: Option<serde_json::Value>, pub seo: Option<serde_json::Value>, pub sub: Option<serde_json::Value>, } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct Keyword { pub name: Option<String>, pub value: Option<String>, pub rank: Option<i64>, pub major: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct Byline { pub original: Option<String>, #[serde(default)] pub person: Option<Vec<Person>>, pub organization: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct Person { pub firstname: Option<String>, pub middlename: Option<String>, pub lastname: Option<String>, pub qualifier: Option<String>, pub title: Option<serde_json::Value>, pub role: String, pub organization: String, pub rank: i64, } //pub fn byline_orig(byline: Option<Byline>) -> String { // if let Some(byline) = byline { // return utils::lilmatcher(byline.original); // } // return "".to_string(); //} /* admin arts automobiles books briefing business climate corrections crosswords \u0026 games education en español fashion food guides health home \u0026 garden home page job market lens magazine movies multimedia/photos new york obituaries opinion parenting podcasts reader center real estate science smarter living sports style sunday review t brand t magazine technology the learning network the upshot the weekly theater times insider today’s paper travel u.s. universal video well world your money */
use actix::{Actor, ActorContext, Addr, Handler, Message, StreamHandler}; use actix_web::{middleware, web, App, Error, HttpRequest, HttpResponse, HttpServer, Resource}; use actix_web_actors::ws; use serde::Deserialize; use std::{ collections::HashMap, sync::{Arc, Mutex}, }; #[derive(Deserialize)] struct PathChannel { channel: String, } enum ControlMessage { Connected(Addr<Connection>), Transmit(Result<ws::Message, ws::ProtocolError>), Listener, Dialer, } impl Message for ControlMessage { type Result = (); } enum Connection { WaitingPeer(Vec<Result<ws::Message, ws::ProtocolError>>), Connected(Addr<Connection>), } impl Connection { fn new() -> Connection { Connection::WaitingPeer(Vec::new()) } } impl Actor for Connection { type Context = ws::WebsocketContext<Self>; } impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for Connection { fn handle(&mut self, item: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) { match self { Connection::WaitingPeer(pending) => { match item { Ok(ws::Message::Close(_)) | Err(_) => { ctx.stop(); } _ => {} } pending.push(item); } Connection::Connected(peer) => { match item { Ok(ws::Message::Close(_)) | Err(_) => { ctx.stop(); } _ => {} } peer.do_send(ControlMessage::Transmit(item)); } } } } impl Handler<ControlMessage> for Connection { type Result = (); fn handle(&mut self, msg: ControlMessage, ctx: &mut Self::Context) -> Self::Result { match msg { ControlMessage::Connected(peer) => { let mut pending = Vec::new(); match self { Connection::WaitingPeer(pending2) => { std::mem::swap(&mut pending, pending2); } Connection::Connected(_) => {} } pending .into_iter() .for_each(|msg| peer.do_send(ControlMessage::Transmit(msg))); let mut new_self = Connection::Connected(peer); std::mem::swap(self, &mut new_self); } ControlMessage::Transmit(msg) => match msg { Ok(ws::Message::Binary(data)) => ctx.binary(data), Ok(ws::Message::Text(text)) => ctx.text(text), Ok(ws::Message::Ping(msg)) => ctx.ping(&msg), Ok(ws::Message::Pong(msg)) => ctx.pong(&msg), Ok(ws::Message::Close(reason)) => { ctx.close(reason); ctx.stop(); } _ => { ctx.stop(); } }, ControlMessage::Listener => ctx.text("LISTENER"), ControlMessage::Dialer => ctx.text("DIALER"), } } } struct Signaling { half_channels: HashMap<String, Addr<Connection>>, } impl Signaling { fn scope(obj: Arc<Mutex<Self>>) -> Resource { web::resource("{channel}").route(web::get().to( move |req, stream, path: web::Path<PathChannel>| { let obj = obj.clone(); async move { obj.lock() .unwrap() .ws_index(req, stream, &path.channel) .await } }, )) } fn new() -> Arc<Mutex<Self>> { Arc::new(Mutex::new(Signaling { half_channels: Default::default(), })) } async fn ws_index( &mut self, req: HttpRequest, stream: web::Payload, channel: &str, ) -> Result<HttpResponse, Error> { let conn = Connection::new(); let (addr, res) = ws::start_with_addr(conn, &req, stream)?; self.new_connection(addr, channel); Ok(res) } fn new_connection(&mut self, user: Addr<Connection>, channel: &str) { match self.half_channels.entry(channel.into()) { std::collections::hash_map::Entry::Occupied(entry) => { let peer = entry.remove(); if !peer.connected() { return self.new_connection(user, channel); } peer.do_send(ControlMessage::Connected(user.clone())); user.do_send(ControlMessage::Connected(peer)); user.do_send(ControlMessage::Dialer); } std::collections::hash_map::Entry::Vacant(entry) => { user.do_send(ControlMessage::Listener); entry.insert(user); } } } } #[actix_web::main] async fn main() -> std::io::Result<()> { std::env::set_var("RUST_LOG", "actix_server=info,actix_web=info"); env_logger::init(); let bind = std::env::var("BIND").unwrap_or("127.0.0.1:8080".to_string()); let signal = Signaling::new(); HttpServer::new(move || { App::new() .wrap(middleware::Logger::default()) .service(web::scope("/signaling").service(Signaling::scope(signal.clone()))) .service(web::scope("/signalling").service(Signaling::scope(signal.clone()))) }) .bind(&bind)? .run() .await }
use hyper; use hyper::{Client, Url}; use hyper::status::StatusCode; use serde_json; use serde_json::Deserializer; use ::sources::GithubSource; #[derive(Deserialize, Debug)] pub struct Package { pub name: String, pub url: String } #[derive(Debug)] pub enum PackageError { Http(hyper::Error), HttpStatus(StatusCode), Parse(serde_json::Error), } impl From<hyper::Error> for PackageError { fn from(error: hyper::Error) -> PackageError { PackageError::Http(error) } } #[derive(Deserialize, Debug)] pub struct PackageInfo { pub name: String, pub title: Option<String>, pub description: Option<String>, pub version: Option<String>, pub main: Option<String>, //Option<Vec<String>> pub author: Option<String>, //Option<Person>, pub contributors: Option<Vec<Person>>, pub homepage: Option<String>, pub keywords: Option<Vec<String>> } #[derive(Deserialize, Debug)] pub struct Person { pub name: Option<String>, pub url: Option<String>, pub email: Option<String> } impl Package { pub fn find(name: &str) -> Result<Package, PackageError> { let client = Client::with_http_proxy("localhost", 12345); let request: Url = format!("https://bower.herokuapp.com/packages/{}", name).parse().unwrap(); let response = try!(client.get(request).send()); match response.status { StatusCode::Ok => Ok(serde_json::from_reader(response).unwrap()), _other => Err(PackageError::HttpStatus(response.status)) } } pub fn search(query: &str) -> Result<Vec<Package>, PackageError> { let client = Client::with_http_proxy("localhost", 12345); let request: Url = format!("https://bower.herokuapp.com/packages/search/{}", query).parse().unwrap(); let response = client.get(request).send().unwrap(); match response.status { StatusCode::Ok => match serde_json::from_reader(response) { Ok(packages) => Ok(packages), Err(error) => Err(PackageError::Parse(error)) }, _other => Err(PackageError::HttpStatus(response.status)) } } pub fn info(self) -> PackageInfo { let source = GithubSource::new(self); source.info() } }
// Copyright 2018 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. extern crate fidl; extern crate fidl_fidl_examples_echo; extern crate failure; extern crate fuchsia_app as component; extern crate fuchsia_async as async; extern crate fuchsia_zircon as zx; extern crate futures; #[macro_use] extern crate structopt; use component::client::Launcher; use failure::{Error, ResultExt}; use futures::prelude::*; use fidl_fidl_examples_echo::EchoMarker; use structopt::StructOpt; fn main() -> Result<(), Error> { let mut executor = async::Executor::new().context("Error creating executor")?; #[derive(StructOpt, Debug)] #[structopt(name = "echo_client_rust")] struct Opt { #[structopt(long = "server", help = "URL of echo server", default_value = "echo2_server_rust")] server_url: String, } // Launch the server and connect to the echo service. let Opt { server_url } = Opt::from_args(); let launcher = Launcher::new().context("Failed to open launcher service")?; let app = launcher.launch(server_url, None) .context("Failed to launch echo service")?; let echo = app.connect_to_service(EchoMarker) .context("Failed to connect to echo service")?; let fut = echo.echo_string(Some("hello world!")) .map(|res| println!("response: {:?}", res)); executor.run_singlethreaded(fut).context("failed to execute echo future")?; Ok(()) }
//! ## Data for the [`Character` component](https://docs.lu-dev.net/en/latest/components/004-character.html) use serde::{Deserialize, Serialize}; #[derive(Default, Debug, PartialEq, Deserialize, Serialize)] /// Data for the [`Character` component][c004] /// /// [c004]: https://docs.lu-dev.net/en/latest/components/004-character.html pub struct Character { /// Account ID #[serde(rename = "acct")] account: u32, /// Current amount of currency #[serde(rename = "cc")] currency_current: u32, /// GM level #[serde(rename = "gm")] gm_level: u32, /// FreeToPlay status #[serde(rename = "ft")] free_to_play: u32, /// Timestamp of last login as this character #[serde(rename = "llog")] last_login: u64, /// LEGO score / Uscore #[serde(rename = "ls")] lego_score: u32, /// Last world position X-coordinate lzx: f32, /// Last world position Y-coordinate lzy: f32, /// Last world position Z-coordinate lzz: f32, /// Last world rotation X component lzrx: f32, /// Last world rotation Y component lzry: f32, /// Last world rotation Z component lzrz: f32, /// Last world rotation W component lzrw: f32, /// Player stats stt: String, /// Last zone ID (packed) lzid: u32, /// ??? lnzid: u32, /// Last world ID lwid: u32, /// ??? tscene: String, /// ??? lrid: u64, /// Total time played, in seconds time: u32, /// Unlocked emotes #[serde(rename = "ue")] pub unlocked_emotes: UnlockedEmotes, /// Zone summaries #[serde(default, rename = "vl")] pub visited_levels: VisitedLevels, /// Zone summaries #[serde(rename = "zs")] pub zone_summaries: ZoneSummaries, } #[derive(Default, Debug, PartialEq, Deserialize, Serialize)] /// Unlocked emotes pub struct UnlockedEmotes { /// List of unlocked emotes #[serde(rename = "e")] pub children: Vec<UnlockedEmote>, } #[derive(Default, Debug, PartialEq, Deserialize, Serialize)] /// A single unlocked emote pub struct UnlockedEmote { /// The ID from the [`Emotes` tables](https://docs.lu-dev.net/en/latest/database/Emotes.html) pub id: u32, } #[derive(Default, Debug, PartialEq, Deserialize, Serialize)] /// List of zone summaries pub struct ZoneSummaries { /// The list of summaries #[serde(rename = "s")] pub children: Vec<ZoneSummary>, } #[derive(Default, Debug, PartialEq, Deserialize, Serialize)] /// A single zone summary pub struct ZoneSummary { /// The relevant map ID from the [`ZoneTable`](https://docs.lu-dev.net/en/latest/database/ZoneTable.html) map: u32, /// Number of achievements #[serde(rename = "ac")] pub achievement_count: u32, /// Number of bricks collected #[serde(rename = "bc")] pub bricks_collected: u32, /// Number of coins collected #[serde(rename = "cc")] pub coins_collected: u32, /// Number of enemies smashed #[serde(rename = "es")] pub enemies_smashed: u32, /// Number of quick-builds constructed #[serde(rename = "qbc")] pub quick_builds_constructed: u32, } #[derive(Default, Debug, PartialEq, Deserialize, Serialize)] /// List of zone summaries pub struct VisitedLevels { /// The list of summaries #[serde(rename = "v")] pub children: Vec<VisitedLevel>, } #[derive(Default, Debug, PartialEq, Deserialize, Serialize)] /// A level the player visited pub struct VisitedLevel { /// Clone ID (used for properties, 0 if not a property) #[serde(rename = "cid")] clone_id: u32, /// World ID. id: u32, }
/* MIT License Copyright (c) 2017 Frederik Delaere Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ extern crate rand; use rand::Rng; use std::panic; #[macro_use] mod macros; #[macro_use] extern crate lazy_static; #[path = "rsmodules.rs"] mod rsmod; mod wizard; use rsmod::Rsmodule; extern crate bincode; extern crate dirs; extern crate rustc_serialize; extern crate users; extern crate walkdir; extern crate ansi_term; extern crate getopts; extern crate glob; extern crate gumdrop; extern crate gumdrop_derive; extern crate is_executable; extern crate mdcat; extern crate pbr; extern crate pulldown_cmark; extern crate regex; extern crate shellexpand; extern crate syntect; use ansi_term::Style; use std::collections::HashMap; use std::env; use std::fs::{remove_file, File}; use std::io::Write; use std::path::PathBuf; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; lazy_static! { static ref TMPFILE_INITIALIZED: Arc<AtomicBool> = Arc::new(AtomicBool::new(false)); static ref TMPFILE_PATH: Mutex<String> = Mutex::new(String::new()); static ref OUTPUT_BUFFER: Mutex<Vec<String>> = Mutex::new(vec![]); } static CRASH_UNSUPPORTED_SHELL: i32 = 1; static CRASH_FAILED_TO_CREATE_TEMPORARY_FILE: i32 = 2; static CRASH_FAILED_TO_WRITE_TO_TEMPORARY_FILE: i32 = 3; static CRASH_NO_CACHE_FILES_FOUND: i32 = 4; static CRASH_MODULE_NOT_FOUND: i32 = 5; static CRASH_COULDNT_OPEN_CACHE_FILE: i32 = 5; //static CRASH_NO_ARGS: i32 = 6; static CRASH_MODULEPATH_IS_FILE: i32 = 7; static CRASH_CANNOT_ADD_TO_ENV: i32 = 8; static CRASH_MISSING_INIT_FILES: i32 = 9; static CRASH_GET_SHELL: i32 = 10; static CRASH_CREATE_ERROR: i32 = 11; static CRASH_INVALID_REGEX: i32 = 12; const VERSION: &str = env!("CARGO_PKG_VERSION"); const AUTHORS: &str = env!("CARGO_PKG_AUTHORS"); fn is_shell_supported(shell: &str) -> bool { // when noshell is selected, all output is printed // to stdout instead of the temp file // noshell is also useful for debugging purposes let shell_list = vec!["tcsh", "csh", "bash", "zsh", "noshell", "python", "perl", "progressbar", "r"]; if shell_list.contains(&shell) { return true; } false } #[cfg(debug_assertions)] fn release_debug() -> String { String::from(" (debug)") } #[cfg(not(debug_assertions))] fn release_debug() -> String { String::from("") } fn usage(in_eval: bool, subcommand_help: bool) { // SubCommandHelp let mut sch = HashMap::new(); // the \t is part of an advanced markup codebase ^^ // \t doesn't show up in the full usage text // but in the partial usage text its replaced by a newline // this makes the partial usage text look nicer sch.insert("load".to_owned(), "load [(partial) module name(s)]"); sch.insert( "unload".to_owned(), "unload [(partial) module name(s)]\t A partial module name is the part of the modulename before a slash, eg: you have module name 'rsmodules/2.0.0' the partial name is 'rsmodules'.", ); sch.insert( "switch".to_owned(), "switch [(partial) module name from] [(partial) module name to]\t Switches between two version of modules. This does the same as module load blast/1.2.3 when blast/1.2.5 was already loaded. This feature was added for compatibility reasons.", ); sch.insert( "list".to_owned(), "list\t Lists all the loaded modules.", ); sch.insert( "purge".to_owned(), "purge\t Unloads all loaded modules.", ); sch.insert( "refurbish".to_owned(), "refurbish\t Unloads all loaded modules. And loads the autoloaded modules.", ); sch.insert( "refresh".to_owned(), "refresh\t Reloads all loaded modules.", ); sch.insert( "available".to_owned(), "available [--default] [--regex] [search string]\t Lists all the available modules. If a [search string] is given then all modules which match the search string will be listed. The search string can also contain multiple items separated by spaces. When --default, -d is specified then only default modules will be listed. When --deprecated, -R is specified then only deprecated modules will be listed. When --regex or -r is specified the search term can be a regular expression.", ); sch.insert( "info".to_owned(), "info [(partial) module name(s)]\t Gives more info about a module. Description, which variables it modifies and/or which commands are executed upon launch.", ); sch.insert( "undo".to_owned(), "undo\t Undo the previous module command, only works for load, unload, switch and purge.", ); sch.insert( "cache".to_owned(), "cache [--help] [make|add|edit|delete]\t Manipulate the contents of a .modulecache file. make\t Updates the .modulecache file in all the paths that are found in the $MODULEPATH variable. This will only work if you have the correct permissions. If you want a progress bar use the command: update_modules_cache instead of module cache make add --modulepath [path] --name [modulename] --description \"description\" [--default] [--deprecated] \t Adds a module to the .modulecache file This will only work if you have the correct permissions. edit --modulepath [path] --name [modulename] --new-name [new modulename] --description \"description\" [--default] [--deprecated] \t Edits an existing module in the .modulecache file This will only work if you have the correct permissions. The modulename is used for matching the existing one. Updating deprecated and description only updates this values in the cache not in the modulefile itself. ", ); /* sch.insert( "addtocache".to_owned(), "addtocache [path from $MODULEPATH] [modulename] [default: true/false] [deprecated: true/false ][\"description\"]\t Adds a module to the .modulecache file This will only work if you have the correct permissions.", ); sch.insert( "makecache".to_owned(), "makecache\t Updates the .modulecache file in all the paths that are found in the $MODULEPATH variable. This will only work if you have the correct permissions. If you want a progress bar use the command: update_modules_cache instead of module makecache", ); */ sch.insert( "create".to_owned(), "create [--help] [modulename]\t Starts a wizard to create a modulefile.", ); sch.insert( "delete".to_owned(), "delete\t Deletes a modulefile. This only works if you have the correct permissions.", ); sch.insert( "autoload".to_owned(), "autoload append|prepend|remove|list|purge [module name(s)]\t Manages the autoloading of modules when opening a new terminal.", ); sch.insert( "readme".to_owned(), "readme [(partial)modulename]\t Looks for a manpage or a README file in the module installation folder and displays the contents of this file.", ); sch.insert( "cd".to_owned(), "cd [(partial)modulename]\t Changes your current working directory to the module installation folder. When you don't provide a modulename the working directory is changed to the module installation folder of the last loaded module.", ); sch.insert( "edit".to_owned(), "edit [(partial)modulename]\t Opens the modulefile in your $EDITOR or if this variable is not present in vi -e.", ); let long_help: &str = &format!( " RSModules manages your user environment on linux and macOS. The RSModules package is a tool to help users modifying their environment during a session by using modulefiles. A modulefile contains all the settings needed to configure the shell for using a certain application. A modulefile sets or alters environment variables such as PATH, LD_LIBRARY_PATH, MANPATH, PYTHONPATH, PERL5LIB, ... Modulefiles can be shared by many users or can be used by individuals by setting up paths in the MODULEPATH environment variable. Once a new modulepath is created and added to MODULEPATH, the cache needs to be updated by invoking the command: module cache make. Modulefiles can be loaded and unloaded by the user whenever the module command is available. * module [subcommand] <module name> subcommands ----------- * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} * {} ", help!(sch, "load"), help!(sch, "unload"), help!(sch, "switch"), help!(sch, "list"), help!(sch, "purge"), help!(sch, "refurbish"), help!(sch, "refresh"), help!(sch, "available"), help!(sch, "info"), help!(sch, "undo"), help!(sch, "cache"), //help!(sch, "addtocache"), //help!(sch, "makecache"), help!(sch, "create"), help!(sch, "delete"), help!(sch, "autoload"), help!(sch, "readme"), help!(sch, "cd"), help!(sch, "edit") ); let error_msg: &str; let args: Vec<String> = std::env::args().collect(); if args.len() == 3 && subcommand_help { let (shell, _) = rsmod::get_shell_info(); eprintln!(""); eprintln!( " {}: module {}", bold(&shell, "Usage"), help!(sch, &args[2]).replace(" ", " ").replace("\t", "\n") ); eprintln!(""); return; } eprintln!(" RSModules {}{} - {}", VERSION, release_debug(), AUTHORS); eprintln!(""); eprintln!(" 2017 - Ghent University / VIB"); eprintln!(" http://www.psb.ugent.be - http://www.ugent.be - http://www.vib.be"); eprintln!(""); eprintln!(""); if in_eval { //<load|unload|list|switch|purge|refurbish|refresh|available|undo|info|addtocache|makecache|delete|autoload|readme|cd|edit> [module \ error_msg = " Usage: module \ <load|unload|list|switch|purge|refurbish|refresh|available|undo|info|delete|autoload|readme|cd|edit> [module \ name]"; } else { //<load|unload|list|switch|purge|refurbish|refresh|available|undo|info|addtocache|makecache|delete|autoload|readme|cd|edit> [module \ error_msg = " Usage: rsmodules <shell> \ <load|unload|list|switch|purge|refurbish|refresh|available|undo|info|delete|autoload|readme|cd|edit> [module \ name]"; } eprintln!("{}", &error_msg); if !in_eval { eprintln!( " Supported shells: bash, zsh, csh, tcsh, python, perl and \ noshell" ); eprintln!(""); eprintln!(" When noshell is selected all output is printed to stdout,"); eprintln!(" module available will then print a nice list without gaps,"); eprintln!(" which makes your life easier when you want to parse this output."); } eprintln!("{}", &long_help); } fn set_global_tmpfile(tmp_file_path: String) { let mut tmp = lu!(TMPFILE_PATH); *tmp = tmp_file_path; TMPFILE_INITIALIZED.store(true, Ordering::Relaxed); } fn run(args: &[String]) { let command: &str; let tmp: String; let mut modulename: &str = ""; let (shell, shell_width) = rsmod::get_shell_info(); //// if !is_shell_supported(&shell) { usage(false, false); rsmod::crash(CRASH_UNSUPPORTED_SHELL, &format!("{} is not a supported shell", shell)); } let modulepaths = rsmod::get_module_paths(false); // create temporary file in the home folder // if the file cannot be created try to create it // in /tmp, if that fails, the program exits // // ~/.rsmodulestmpXXXXXXXX // /tmp/.rsmodulestmpXXXXXXXX let mut tmpfile: File; let rstr: String = rand::thread_rng().gen_ascii_chars().take(8).collect(); let mut tmp_file_path: PathBuf; match dirs::home_dir() { Some(path) => tmp_file_path = path, None => { show_warning!( "We were unable to find your home directory, checking if /tmp is an \ option" ); // this is wrong, as we try to use temp again a bit later tmp_file_path = env::temp_dir(); // return; } }; let filename: String = format!(".rsmodulestmp{}", rstr); let filename: &str = filename.as_ref(); tmp_file_path.push(filename); match File::create(&tmp_file_path) { Ok(file) => { tmpfile = file; set_global_tmpfile(tmp_file_path.to_str().unwrap().to_string()); } Err(_) => { // home exists but we can't create the temp file in it or // worst case, /tmp exists but we can't create the temp file in it tmp_file_path = env::temp_dir(); let filename: String = format!(".rsmodulestmp{}", rstr); let filename: &str = filename.as_ref(); tmp_file_path.push(filename); match File::create(&tmp_file_path) { Ok(newfile) => { tmpfile = newfile; set_global_tmpfile(tmp_file_path.to_str().unwrap().to_string()); } Err(e) => { rsmod::crash( CRASH_FAILED_TO_CREATE_TEMPORARY_FILE, &format!("Failed to create temporary file: {}", e), ); return; } }; } }; panic::set_hook(Box::new(|_| { let tmp = lu!(TMPFILE_PATH); let tmp_file_path = &*tmp; remove_file(tmp_file_path).unwrap(); })); let filename = tmp_file_path.to_str().unwrap().to_string(); let mut quoted_string: String; let mut command_hit: &str = ""; if args.len() >= 3 { command = &args[2]; let matches: bool; let mut modulenames: Vec<String> = Vec::new(); if args.len() > 3 { for arg in args.iter().skip(3) { let whitespace: Vec<&str> = arg.split_whitespace().collect(); if whitespace.len() > 1 { quoted_string = format!("\"{}\"", arg); modulenames.push(quoted_string); } else { modulenames.push(arg.clone()); } } //modulename = &args[3]; tmp = modulenames.join(" "); modulename = &tmp; } let mut command_list: Vec<&str> = Vec::new(); command_list.push("load"); command_list.push("add"); command_list.push("unload"); command_list.push("rm"); command_list.push("available"); command_list.push("list"); command_list.push("purge"); command_list.push("refurbish"); command_list.push("refresh"); command_list.push("info"); command_list.push("display"); command_list.push("show"); command_list.push("switch"); command_list.push("cache"); //command_list.push("addtocache"); //command_list.push("makecache"); command_list.push("help"); command_list.push("undo"); command_list.push("autoload"); command_list.push("readme"); command_list.push("delete"); command_list.push("create"); command_list.push("cd"); command_list.push("edit"); command_list.push("--help"); command_list.push("-h"); // TODO // "create" -> wizard to create a new mdoule // "addmodulepath" -> wizard to add a path to $MODULEPATH // "removemodulepath" -> wizard to remove a path from $MODULEPATH // ask to update /etc/profile.d or bashrc or personal_cshrc // "delete" -> deletes a modulefile // "update" -> when you have blast/12.3 as module // module update blast 13.3 or module update blast/12.3 13.3 // will copy that module file to a new file blast/13.3 // and it will replace all instances of 12.3 in the file with // 13.3 // if command == "help" || command == "--help" || command == "-h" { usage(true, false); return; } let mut num_hits: i32 = 0; for cmd in command_list { if cmd.starts_with(command) { num_hits += 1; command_hit = cmd; } } let loadedmodules: String; if num_hits != 1 { usage(true, false); return; } else { matches = true; if command_hit == "cd" { modulename = if modulename.is_empty() { match env::var(rsmod::ENV_LOADEDMODULES) { Ok(list) => loadedmodules = list, Err(_) => { loadedmodules = String::from(""); } }; let mut loadedmodules: Vec<&str> = loadedmodules.split(':').collect(); loadedmodules.retain(|&x| x != ""); let loadedmodule: &str = if !loadedmodules.is_empty() { loadedmodules[0] } else { "" }; loadedmodule } else { modulename }; } if command_hit == "add" { command_hit = "load"; } if command_hit == "rm" { command_hit = "unload"; } if command_hit == "display" || command_hit == "show" { command_hit = "info"; } if command_hit == "load" || command_hit == "unload" { // undo doesn't work for dependency loaded modules let data = setenv( "RSMODULES_UNDO", &format!("{} {}", command_hit, modulename.to_string()), &shell, ); crash_cleanup_if_err!( CRASH_FAILED_TO_WRITE_TO_TEMPORARY_FILE, tmpfile.write_all(data.as_bytes()), filename ); } if (command_hit == "load" || command_hit == "unload" || command_hit == "info" || command_hit == "delete" || command_hit == "readme" || command_hit == "edit") && args.len() == 3 { usage(true, true); return; } if command_hit == "switch" && args.len() != 5 { usage(true, true); return; } if command_hit == "switch" { modulenames.reverse(); let data = setenv( "RSMODULES_UNDO", &format!("{} {}", command_hit, modulenames.join(" ")), &shell, ); crash_cleanup_if_err!( CRASH_FAILED_TO_WRITE_TO_TEMPORARY_FILE, tmpfile.write_all(data.as_bytes()), filename ); } if command_hit == "purge" { let loaded_list = rsmod::get_loaded_list(); let mut args: Vec<String> = Vec::new(); for (argument, _, _) in loaded_list { args.push(argument); } let loadedmodules = args.join(" "); let data = setenv("RSMODULES_UNDO", &format!("unload {}", loadedmodules), &shell); crash_cleanup_if_err!( CRASH_FAILED_TO_WRITE_TO_TEMPORARY_FILE, tmpfile.write_all(data.as_bytes()), filename ); } let mut rsmod_command: Rsmodule = Rsmodule { cmd: command_hit, typed_command: command, arg: modulename, search_path: &modulepaths, shell: &shell, //shell_width: shell_width, shell_width, }; rsmod::command(&mut rsmod_command); } if !matches { usage(false, false); } } // when noshell is choosen, we just output to stdout // this is used for scripts that want to parse the module av output // for example for tab completion if shell != "noshell" && shell != "python" && shell != "perl" && shell != "progressbar" && shell != "r" { // we want a self destructing tmpfile // so it must delete itself at the end of the run // if it crashes it will be deleted after the source stuff // if the code that writes the file crashes it should clean up let cmd = format!("\\rm -f {}\n", tmp_file_path.display()); let mut output_buffer = lu!(OUTPUT_BUFFER); let output_buffer = &mut (*output_buffer); output_buffer.push(cmd); for line in output_buffer { crash_cleanup_if_err!( CRASH_FAILED_TO_WRITE_TO_TEMPORARY_FILE, tmpfile.write_all(line.as_bytes()), filename ); } // source tmpfile println!("source {}", tmp_file_path.display()); // doesn't this make more sense than creating a // self destructing file ? println!("rm -f {}", tmp_file_path.display()); } else { remove_file(tmp_file_path.to_str().unwrap().to_string()).unwrap(); } } pub fn setenv(var: &str, val: &str, shell: &str) -> String { let mut data: String = String::new(); if shell == "bash" || shell == "zsh" { data = format!("export {}=\"{}\"\n", var, val); } else if shell == "tcsh" || shell == "csh" { data = format!("setenv {} \"{}\"\n", var, val); } else if shell == "python" { data = format!("os.environ[\"{}\"] = \"{}\";\n", var, val); } else if shell == "r" { data = format!( "old_path <- Sys.getenv(\"{}\") Sys.setenv({} = paste(old_path, \"{}\", sep = \":\"))", var, var, val ); } else if shell == "perl" { data = format!("$ENV{{{}}}=\"{}\";\n", var, val); } data } fn bold<'a>(shell: &str, msg: &'a str) -> ansi_term::ANSIGenericString<'a, str> { if shell == "noshell" || shell == "perl" || shell == "r" || shell == "python" || env::var("TERM") == Ok(String::from("")) || env::var("NO_COLOR").is_ok() { return Style::new().paint(msg); } Style::new().bold().paint(msg) } pub fn output(line: String) { let mut output_buffer = lu!(OUTPUT_BUFFER); let output_buffer = &mut (*output_buffer); output_buffer.push(line); } fn init() { TMPFILE_INITIALIZED.store(false, Ordering::Relaxed); } fn main() { init(); let args: Vec<String> = std::env::args().collect(); if args.len() == 1 { if !wizard::run(false) { usage(false, false); } return; } if args.len() == 2 { usage(true, false); } if args.len() >= 2 && (args.get(1) == Some(&String::from("-h")) || args.get(1) == Some(&String::from("--help"))) { usage(false, false); return; } run(&args); } #[cfg(test)] mod tests { use super::is_shell_supported; #[test] fn supported_shells() { assert_eq!(false, is_shell_supported("randomshellname")); assert_eq!(true, is_shell_supported("bash")); assert_eq!(true, is_shell_supported("zsh")); assert_eq!(true, is_shell_supported("tcsh")); assert_eq!(true, is_shell_supported("csh")); assert_eq!(true, is_shell_supported("r")); } }
use super::{ Expr, ExprValue, UnaryOp, BinOp, IfExpr }; use crate::parser::token::Token; use std::borrow::Borrow; #[derive(Clone, Debug)] pub struct BasicExpr<'a> { token: Token<'a>, value: ExprValue<BasicExpr<'a>> } impl<'a> Expr for BasicExpr<'a> {} impl<'a> BasicExpr<'a> { pub fn token(&self) -> &Token<'a> { return &self.token; } pub fn unary_op(token: Token<'a>, op: UnaryOp, expr: Box<BasicExpr<'a>>) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::UnaryOp(op, expr) }) } pub fn bin_op(token: Token<'a>, op: BinOp, lhs: Box<BasicExpr<'a>>, rhs: Box<BasicExpr<'a>>) -> Box<BasicExpr<'a>> { Box::new (BasicExpr { token: token, value: ExprValue::BinOp(op, lhs, rhs) }) } pub fn function_app(token: Token<'a>, iden: String, args: Vec<Box<BasicExpr<'a>>>) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::FunctionApp(iden, args) }) } pub fn block(token: Token<'a>, exprs: Vec<Box<BasicExpr<'a>>>) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::Block(exprs) }) } pub fn conditional(token: Token<'a>, cond: Box<BasicExpr<'a>>, then_expr: Box<BasicExpr<'a>>, else_expr: Box<BasicExpr<'a>>) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::If(IfExpr { cond: cond, then_expr: then_expr, else_expr: else_expr }) }) } pub fn const_int(token: Token<'a>, value: i64) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::ConstInt(value) }) } pub fn const_bool(token: Token<'a>, value: bool) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::ConstBool(value) }) } pub fn const_double(token: Token<'a>, value: f64) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::ConstDouble(value) }) } pub fn const_unit(token: Token<'a>) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::ConstUnit }) } pub fn var(token: Token<'a>, iden: String) -> Box<BasicExpr<'a>> { Box::new(BasicExpr { token: token, value: ExprValue::Var(iden) }) } } impl<'a> Borrow<ExprValue<BasicExpr<'a>>> for BasicExpr<'a> { fn borrow(&self) -> &ExprValue<BasicExpr<'a>> { return &self.value; } }
// FIXED fn foo<T: Copy>(v: &[T], idx: &[usize]) { let _ = idx.iter().map(|_| v[0]); } fn main() {}
mod vm; mod compiler; #[cfg(test)] mod tests { use crate::vm::{*, Instruction::*}; #[test] fn vm_works() { let code = [PushI(1)]; let mut vm = VmState::new(&code); let res = vm.run(); assert_eq!(res, Ok(())); assert_eq!(vm.get_int(0), Ok(1)); } #[test] fn vm_adds() { let code = [PushI(1), PushI(4), AddI]; let mut vm = VmState::new(&code); let res = vm.run(); assert_eq!(res, Ok(())); assert_eq!(vm.get_int(0), Ok(5)); } #[test] fn vm_subs() { let code = [PushI(1), PushI(4), SubI]; let mut vm = VmState::new(&code); let res = vm.run(); assert_eq!(res, Ok(())); assert_eq!(vm.get_int(0), Ok(-3)); } #[test] fn vm_adds_float() { let code = [PushF(1.1), PushF(4.5), AddF]; let mut vm = VmState::new(&code); let res = vm.run(); assert_eq!(res, Ok(())); assert_eq!(vm.get_float(0), Ok(5.6)); } use crate::compiler::{*, Expr::*}; #[test] fn compiles_lit() { let expr = LitI(42); let code = compile(expr); assert_eq!(code, vec![PushI(42)]); } #[test] fn compiles_add() { let expr = Add(Box::new(LitI(2)), Box::new(LitI(3))); let code = compile(expr); assert_eq!(code, vec![PushI(2), PushI(3), AddI]); } #[test] fn vm_adds_compiled() { let expr = Add(Box::new(LitI(2)), Box::new(LitI(3))); let code = compile(expr); let mut vm = VmState::new(&code); let res = vm.run(); assert_eq!(res, Ok(())); assert_eq!(vm.get_int(0), Ok(5)); } #[test] fn vm_subs_compiled() { let expr = Sub(Box::new(LitI(2)), Box::new(LitI(3))); let code = compile(expr); let mut vm = VmState::new(&code); let res = vm.run(); assert_eq!(res, Ok(())); assert_eq!(vm.get_int(0), Ok(-1)); } }
//! Protection Buffer Size //! //! To protect the data channel as well, the PBSZ command, followed by the PROT command //! sequence, MUST be used. The PBSZ (protection buffer size) command, as detailed //! in [RFC-2228], is compulsory prior to any PROT command. //! //! For FTP-TLS, which appears to the FTP application as a streaming protection mechanism, this //! is not required. Thus, the PBSZ command MUST still be issued, but must have a parameter //! of '0' to indicate that no buffering is taking place and the data connection should //! not be encapsulated. use crate::{ auth::UserDetail, server::controlchan::{ error::ControlChanError, handler::{CommandContext, CommandHandler}, Reply, ReplyCode, }, storage::{Metadata, StorageBackend}, }; use async_trait::async_trait; #[derive(Debug)] pub struct Pbsz; #[async_trait] impl<Storage, User> CommandHandler<Storage, User> for Pbsz where User: UserDetail + 'static, Storage: StorageBackend<User> + 'static, Storage::Metadata: Metadata, { #[tracing_attributes::instrument] async fn handle(&self, _args: CommandContext<Storage, User>) -> Result<Reply, ControlChanError> { Ok(Reply::new(ReplyCode::CommandOkay, "OK")) } }
mod api; mod app; fn main() { if let Err(error) = app::run() { eprintln!("{}", error); std::process::exit(1); } }
use piston_window::*; use std::thread; use std::sync::mpsc; use std::time::Duration; use rand::{thread_rng, Rng}; use self::object::*; pub mod object; const MAXINUM: u32 = 10; const TIME_LIMIT: u64 = 2000; const MAX_EPISODE: u32 = 100; const BATCH_SIZE: u32 = 200; pub struct Environment { reward: f64, episode: u32, obstacles: Vec<Object>, } impl Environment { pub fn new() -> Self { Environment { reward: 0.0, episode: 0, obstacles: Vec::new(), } } pub fn start(&mut self, machine: &mut Object) { let opengls = OpenGL::V4_5; let mut window: PistonWindow = WindowSettings::new("machine_dodge", [400, 400]) .opengl(opengls) .exit_on_esc(true) .build() .unwrap(); let (tx, rx) = mpsc::channel(); let mut start: bool = true; let mut game_end: bool = false; let mut count = 0; let time_limit = Duration::from_millis(TIME_LIMIT); while let Some(e) = window.next() { if let Some(r) = e.render_args() { if start { machine.set_pos(&r, Side::Center); machine.random_arrow_set(); start = false; } if game_end { self.episode += 1; if self.episode > MAX_EPISODE { break; } self.reward = 0.0; start = true; game_end = false; count = 0; self.obstacles.clear(); continue; } if count < MAXINUM { let rng = thread_rng().gen_range(1, 4); count += rng; for _ in 0..rng { let tx = tx.clone(); thread::spawn(move || { let mut obstacle = Object::new(10.0, 10.0); let position = match thread_rng().gen_range(0, 4) { 0 => Side::Up, 1 => Side::Right, 2 => Side::Down, 3 => Side::Left, _ => { panic!("system error"); } }; obstacle.set_pos(&r, position); obstacle.set_speed(30.0); obstacle.set_color([0.0, 0.0, 1.0, 1.0]); obstacle.random_arrow_set(); tx.send(obstacle).unwrap(); }); let temp = match rx.recv_timeout(time_limit) { Ok(result) => result, Err(_) => { panic!("send fail"); } }; self.obstacles.push(temp); } } window.draw_2d(&e, |c, g| { let transform = c.transform .trans(machine.current_state.0, machine.current_state.1); machine.set_place(&r); clear([0.0, 0.0, 0.0, 1.0], g); Rectangle::new(machine.color).draw([0.0, 0.0, machine.size.0, machine.size.1], &c.draw_state, transform, g); for obstacle in self.obstacles.iter_mut() { let transform = c.transform .trans(obstacle.current_state.0, obstacle.current_state.1); obstacle.set_place(&r); Rectangle::new(obstacle.color) .draw([0.0, 0.0, obstacle.size.0, obstacle.size.1], &c.draw_state, transform, g); } }); } if let Some(u) = e.update_args() { machine.update(&u); for obstacle in self.obstacles.iter_mut() { { game_end = machine.is_hit(&obstacle); if game_end { break; } } obstacle.update(&u); } self.reward += 0.01; } } } }
use super::{Backend, Component, State}; use tui::{ layout::Rect, style::{Modifier, Style}, text::{Span, Spans}, widgets::{Block, Borders, Paragraph}, Frame, }; pub struct HelpBar; const MENU_ITEMS: [&str; 4] = ["Select", "Filter", "Delete", "Quit"]; impl Component for HelpBar { fn render_to(frame: &mut Frame<Backend>, layout: Rect, _state: &mut State) { let spans = Spans::from( MENU_ITEMS .iter() .enumerate() .flat_map(|(idx, item)| { let (first, rest) = item.split_at(1); let mut menu_items = Vec::with_capacity(3); if idx > 0 { menu_items.push(Span::from(" ")) } menu_items.push(Span::styled( first, Style::default().add_modifier(Modifier::UNDERLINED), )); menu_items.push(Span::from(rest)); menu_items }) .collect::<Vec<_>>(), ); let help_bar = Paragraph::new(spans).block(Block::default().title("Help").borders(Borders::ALL)); frame.render_widget(help_bar, layout) } }
/*! * Rust bindings for [LevelDB](https://code.google.com/p/leveldb/), a fast and * lightweight key/value database library from Google. * * Warning: Some portions of this library are still unsafe to use, in that it * is possible to call methods from LevelDB with stale pointers, or otherwise * cause memory-unsafety. If you'd like to avoid this, and until I fix them, * please don't use: * * - Custom comparators * - DB snapshots * * And please be careful with write batches. Patches are welcome! */ #![crate_type = "lib"] #![warn(missing_docs)] #![warn(non_upper_case_globals)] #![warn(unused_qualifications)] extern crate libc; extern crate leveldb_sys; use std::cmp::Ordering; use std::ffi::{CStr,CString}; use std::path::Path; use std::mem::transmute; use std::ptr; use std::slice; use std::sync::Arc; use std::str; use libc::{c_char, c_int, c_uchar, c_void, size_t}; use leveldb_sys as cffi; /// Our error type #[derive(Clone, Hash, PartialEq, Eq, Debug)] pub enum LevelDBError { /// An error from the LevelDB C library. LibraryError(String), /// Out of memory. OutOfMemoryError, } impl std::fmt::Display for LevelDBError { fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { match *self { LevelDBError::LibraryError(ref msg) => msg.fmt(f), LevelDBError::OutOfMemoryError => write!(f, "Out of memory"), } } } impl LevelDBError { fn lib_error(errptr: *mut c_char) -> LevelDBError { // Convert to a rust String, then free the LevelDB string. let p = errptr as *const c_char; let slice = unsafe { CStr::from_ptr(p) }; let err = str::from_utf8(slice.to_bytes()).unwrap_or("Invalid error message").to_string(); unsafe { cffi::leveldb_free(errptr as *mut c_void) }; LevelDBError::LibraryError(err) } } /// An alias for `Result<T, LevelDBError>` pub type LevelDBResult<T> = Result<T, LevelDBError>; // Provides an errptr for use with LevelDB, and properly returns a Result if // it's non-null. fn with_errptr<F, T>(mut f: F) -> LevelDBResult<T> where F: FnMut(*mut *mut c_char) -> T { let mut errptr: *mut c_char = ptr::null_mut(); let ret = f(&mut errptr as *mut *mut c_char); if !errptr.is_null() { Err(LevelDBError::lib_error(errptr)) } else { Ok(ret) } } fn bool_to_uchar(val: bool) -> c_uchar { if val { 1 as c_uchar } else { 0 as c_uchar } } fn uchar_to_bool(val: c_uchar) -> bool { if val == 0 { false } else { true } } /** * This structure represents options that can be used when constructing a * LevelDB instance. */ pub struct DBOptions { opts: *mut cffi::leveldb_options_t, // An (optional) comparator. We hold on to a pointer to this to keep it // alive for the lifetime of this options struct. comparator: Option<DBComparator>, } impl DBOptions { /** * Create and return a new DBOptions instance. Returns `None` if the * underlying library call returns a null pointer. */ pub fn new() -> Option<DBOptions> { let opts = unsafe { cffi::leveldb_options_create() }; if opts.is_null() { None } else { Some(DBOptions { opts: opts, comparator: None, }) } } /** * Set the comparator to use for this database. By default, LevelDB uses * a comparator does a lexicographic comparison. * Note also that a comparator must be thread-safe. */ pub fn set_comparator(&mut self, cmp: DBComparator) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_comparator(self.opts, cmp.state.ptr); } // Hold on to this comparator so it gets dropped (and thus destroyed/ // freed) when we do. self.comparator = Some(cmp); self } /** * Create the database if it's missing when we try to open it. * * Default: false */ pub fn set_create_if_missing(&mut self, val: bool) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_create_if_missing(self.opts, bool_to_uchar(val)); } self } /** * Return an error if the database already exists. * * Default: false */ pub fn set_error_if_exists(&mut self, val: bool) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_error_if_exists(self.opts, bool_to_uchar(val)); } self } /** * If set to true, the library will do aggressive checking of all data * that it is processing and will stop early if it detects any errors. * * Default: false */ pub fn set_paranoid_checks(&mut self, val: bool) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_paranoid_checks(self.opts, bool_to_uchar(val)); } self } /** * Amount of data to build up in memory (backed by an unsorted log on-disk) * before converting to a sorted on-disk file. * * Default: 4MiB */ pub fn set_write_buffer_size(&mut self, val: usize) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_write_buffer_size(self.opts, val as size_t); } self } /** * Number of open files that can be used by the DB. This value should be * approximately one open file per 2MB of working set. * * Default: 1000 */ pub fn set_max_open_files(&mut self, val: isize) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_max_open_files(self.opts, val as c_int); } self } /** * Approximate size of user data packed per block. Note that this * corresponds to uncompressed data. * * Default: 4KB */ pub fn set_block_size(&mut self, val: usize) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_block_size(self.opts, val as size_t); } self } /** * Number of keys between restart points for delta encoding of keys. Most * clients should not change this parameter. * * Default: 16 */ pub fn set_block_restart_interval(&mut self, val: isize) -> &mut DBOptions { unsafe { cffi::leveldb_options_set_block_restart_interval(self.opts, val as c_int); } self } /** * Enable or disable compression. Note that the default compression * algorithm, Snappy, is significantly faster than most persistent storage. * Thus, it's typically never worth switching this off. * * Default: true */ #[cfg(feature = "snappy")] pub fn set_compression(&mut self, val: bool) -> &mut DBOptions { let val = if val { cffi::Compression::Snappy } else { cffi::Compression::No }; unsafe { cffi::leveldb_options_set_compression(self.opts, val); } self } unsafe fn ptr(&self) -> *const cffi::leveldb_options_t { self.opts as *const cffi::leveldb_options_t } } impl Drop for DBOptions { fn drop(&mut self) { unsafe { cffi::leveldb_options_destroy(self.opts) }; } } /** * An internal structure to represent the comparator state. Note that, since * the comparator struct is movable, we can't take the address of it and pass * that to leveldb_comparator_create. So, we keep the internal state in a Box, * and can move the outer struct around freely. */ struct DBComparatorState { name: &'static str, cmp: Box<Fn(&[u8], &[u8]) -> Ordering + 'static>, ptr: *mut cffi::leveldb_comparator_t, } impl Drop for DBComparatorState { fn drop(&mut self) { unsafe { cffi::leveldb_comparator_destroy(self.ptr) }; } } /** * This structure represents a comparator for use in LevelDB. */ pub struct DBComparator { state: Box<DBComparatorState>, } impl DBComparator { /** * Create a new comparator with the given name and comparison function. */ pub fn new<F: 'static>(name: &'static str, cmp: F) -> DBComparator where F: Fn(&[u8], &[u8]) -> Ordering { let mut state = Box::new(DBComparatorState { name: name, cmp: Box::new(cmp), ptr: ptr::null_mut(), }); let ptr = unsafe { cffi::leveldb_comparator_create( transmute(&*state), comparator_destructor_callback, comparator_compare_callback, comparator_name_callback, ) }; state.ptr = ptr; DBComparator { state: state, } } } #[allow(dead_code)] extern "C" fn comparator_destructor_callback(_state: *mut c_void) { // Do nothing } #[allow(dead_code)] extern "C" fn comparator_compare_callback(state: *mut c_void, a: *const c_char, alen: size_t, b: *const c_char, blen: size_t) -> c_int { unsafe { // This is only safe since Box<T> is implemented as `struct Box(*mut T)` let cmp: *const DBComparatorState = transmute(state); let a_slice = slice::from_raw_parts::<u8>(a as *const u8, alen as usize); let b_slice = slice::from_raw_parts::<u8>(b as *const u8, blen as usize); // Comment from include/leveldb/comparator.h: // Three-way comparison. Returns value: // < 0 iff "a" < "b", // == 0 iff "a" == "b", // > 0 iff "a" > "b" match ((*cmp).cmp)(a_slice, b_slice) { Ordering::Less => -1, Ordering::Equal => 0, Ordering::Greater => 1, } } } #[allow(dead_code)] extern "C" fn comparator_name_callback(state: *mut c_void) -> *const c_char { unsafe { // This is only safe since Box<T> is implemented as `struct Box(*mut T)` let cmp: *const DBComparatorState = transmute(state); // This is safe to return, since the string has a static lifetime (*cmp).name.as_ptr() as *const c_char } } /** * This structure represents options that can be used when reading from a * LevelDB instance. */ pub struct DBReadOptions { opts: *mut cffi::leveldb_readoptions_t, } impl DBReadOptions { /** * Create and return a new DBReadOptions instance. Returns `None` if the * underlying library call returns a null pointer. */ pub fn new() -> Option<DBReadOptions> { let opts = unsafe { cffi::leveldb_readoptions_create() }; if opts.is_null() { None } else { Some(DBReadOptions { opts: opts, }) } } /** * If set to 'true', all data read from the underlying storage will be * verified against corresponding checksums. * * Defaults to 'false'. */ pub fn set_verify_checksums(&mut self, val: bool) -> &mut DBReadOptions { unsafe { cffi::leveldb_readoptions_set_verify_checksums(self.opts, bool_to_uchar(val)); } self } /** * Set whether the data read for this iteration should be cached in memory. * * Defaults to 'true'. */ pub fn set_fill_cache(&mut self, val: bool) -> &mut DBReadOptions { unsafe { cffi::leveldb_readoptions_set_fill_cache(self.opts, bool_to_uchar(val)); } self } /** * Set the snapshot to use when reading from the database. If this is not * set, then an implicit snapshot - of the state as of the beginning of the * read operation - will be used. * * Note: currently private, since all access should be performed through * DBSnapshot */ fn set_snapshot(&mut self, snap: *const cffi::leveldb_snapshot_t) -> &mut DBReadOptions { unsafe { cffi::leveldb_readoptions_set_snapshot(self.opts, snap); } self } unsafe fn ptr(&self) -> *const cffi::leveldb_readoptions_t { self.opts as *const cffi::leveldb_readoptions_t } } impl Drop for DBReadOptions { fn drop(&mut self) { unsafe { cffi::leveldb_readoptions_destroy(self.opts) }; } } /** * This structure represents options that can be used when writing to a LevelDB * instance. */ pub struct DBWriteOptions { opts: *mut cffi::leveldb_writeoptions_t, } impl DBWriteOptions { /** * Create and return a new DBWriteOptions instance. Returns `None` if the * underlying library call returns a null pointer. */ pub fn new() -> Option<DBWriteOptions> { let opts = unsafe { cffi::leveldb_writeoptions_create() }; if opts.is_null() { None } else { Some(DBWriteOptions { opts: opts, }) } } /** * Set whether the write will be flushed to disk before the write is * considered "complete". Essentially, if a write is performed without * this value set, it has the same semantics as the `write()` syscall. If * sync is set, the semantics are the same as a `write()` followed by a * `fsync()` call. * * The default value is false. */ pub fn set_sync(&mut self, val: bool) -> &mut DBWriteOptions { unsafe { cffi::leveldb_writeoptions_set_sync(self.opts, bool_to_uchar(val)); } self } unsafe fn ptr(&self) -> *const cffi::leveldb_writeoptions_t { self.opts as *const cffi::leveldb_writeoptions_t } } impl Drop for DBWriteOptions { fn drop(&mut self) { unsafe { cffi::leveldb_writeoptions_destroy(self.opts) }; } } /** * A write batch holds a collection of updates to apply atomically to a * database. Updates are applied in the order in which they are added to the * write batch. */ pub struct DBWriteBatch { batch: *mut cffi::leveldb_writebatch_t, } impl DBWriteBatch { /** * Create a new, empty write batch. Returns None if the underlying library * call returns a null pointer. */ pub fn new() -> Option<DBWriteBatch> { let batch = unsafe { cffi::leveldb_writebatch_create() }; if batch.is_null() { None } else { Some(DBWriteBatch { batch: batch, }) } } /** * Set the database entry for "key" to "value". See `put()` on `DB` for * more information. */ pub fn put(&mut self, key: &[u8], val: &[u8]) { // TODO: does the API copy the underlying key/value, or do we need to // ensure it lives long enough? unsafe { cffi::leveldb_writebatch_put( self.batch, key.as_ptr() as *const c_char, key.len() as size_t, val.as_ptr() as *const c_char, val.len() as size_t ) } } /** * Clear all updates buffered in this write batch. */ pub fn clear(&mut self) { unsafe { cffi::leveldb_writebatch_clear(self.batch) }; } /** * If the database contains the given key, erase it. Otherwise, do * nothing. */ pub fn delete(&mut self, key: &[u8]) { unsafe { cffi::leveldb_writebatch_delete( self.batch, key.as_ptr() as *const c_char, key.len() as size_t ) }; } /** * Iterate over the contents of the write batch by calling callbacks for * each operation in the batch. */ pub fn iterate<'a, F: 'a, G: 'a>(&'a self, put: F, delete: G) where F: FnMut(&'a [u8], &'a [u8]) + 'a, G: FnMut(&'a [u8]) + 'a, { let mut it = DBWriteBatchIter { put: Box::new(put), delete: Box::new(delete), }; unsafe { cffi::leveldb_writebatch_iterate( self.batch, &mut it as *mut _ as *mut c_void, writebatch_put_callback, writebatch_delete_callback ); }; } } struct DBWriteBatchIter<'a> { pub put: Box<FnMut(&'a [u8], &'a [u8]) + 'a>, pub delete: Box<FnMut(&'a [u8]) + 'a>, } // Callback for DBWriteBatchIter extern "C" fn writebatch_put_callback(state: *mut c_void, key: *const c_char, klen: size_t, val: *const c_char, vlen: size_t) { let it = state as *mut DBWriteBatchIter; let key_slice = unsafe { slice::from_raw_parts::<u8>(key as *const u8, klen as usize) }; let val_slice = unsafe { slice::from_raw_parts::<u8>(val as *const u8, vlen as usize) }; unsafe { ((*it).put)(key_slice, val_slice) }; } // Callback for DBWriteBatchIter extern "C" fn writebatch_delete_callback(state: *mut c_void, key: *const c_char, klen: size_t) { let it = state as *mut DBWriteBatchIter; let key_slice = unsafe { slice::from_raw_parts::<u8>(key as *const u8, klen as usize) }; unsafe { ((*it).delete)(key_slice) }; } impl Drop for DBWriteBatch { fn drop(&mut self) { unsafe { cffi::leveldb_writebatch_destroy(self.batch) }; } } /** * This structure represents an iterator over the database. Note that since * the next() function is bounded by a lifetime, it does not (quite) conform * to the Iterator trait. To get this, use the alloc() helper. */ pub struct DBIterator { iter: *mut cffi::leveldb_iterator_t, } impl DBIterator { // Note: deliberately not public fn new(i: *mut cffi::leveldb_iterator_t) -> DBIterator { unsafe { cffi::leveldb_iter_seek_to_first(i) }; DBIterator { iter: i, } } /** * Return the next key/value pair from this iterator. */ pub fn next<'a>(&'a mut self) -> Option<(&'a [u8], &'a [u8])> { if !uchar_to_bool(unsafe { cffi::leveldb_iter_valid(self.ptr()) }) { return None; } let key_slice = unsafe { let mut keylen: size_t = 0; let key = cffi::leveldb_iter_key(self.ptr(), &mut keylen as *mut size_t); slice::from_raw_parts::<u8>(key as *const u8, keylen as usize) }; let val_slice = unsafe { let mut vallen: size_t = 0; let val = cffi::leveldb_iter_value( self.ptr(), &mut vallen as *mut size_t); slice::from_raw_parts::<u8>(val as *const u8, vallen as usize) }; unsafe { cffi::leveldb_iter_next(self.iter) }; Some((key_slice, val_slice)) } /** * Return an instance of DBIteratorAlloc, an iterator that implements the * Iterator trait, but allocates new Vec<u8>s for each item. Note that * this consumes the DBIterator instance, so it can't be used again. */ pub fn alloc(self) -> DBIteratorAlloc { DBIteratorAlloc::new(self) } /** * Seek to the beginning of the database. */ pub fn seek_to_first(&mut self) { unsafe { cffi::leveldb_iter_seek_to_first(self.iter) }; } /** * Seek to the end of the database. */ pub fn seek_to_last(&mut self) { unsafe { cffi::leveldb_iter_seek_to_last(self.iter) }; } /** * Seek to the first key in the database that is at or past the given * target key. */ pub fn seek(&mut self, key: &[u8]) { unsafe { cffi::leveldb_iter_seek( self.iter, key.as_ptr() as *const c_char, key.len() as size_t ); } } /** * Move to the previous item in the database. */ pub fn prev(&mut self) { unsafe { cffi::leveldb_iter_prev(self.iter) }; } fn ptr(&self) -> *const cffi::leveldb_iterator_t { self.iter as *const cffi::leveldb_iterator_t } } impl Drop for DBIterator { fn drop(&mut self) { unsafe { cffi::leveldb_iter_destroy(self.iter) }; } } /** * An iterator over a database that implements the standard library's Iterator * trait. */ pub struct DBIteratorAlloc { underlying: DBIterator, } impl DBIteratorAlloc { // Note: deliberately not public fn new(i: DBIterator) -> DBIteratorAlloc { DBIteratorAlloc { underlying: i, } } /** * Wraps the underlying `seek_to_first` call. */ pub fn seek_to_first(&mut self) { self.underlying.seek_to_first() } /** * Wraps the underlying `seek_to_last` call. */ pub fn seek_to_last(&mut self) { self.underlying.seek_to_last() } /** * Wrap the underlying `seek` call. */ pub fn seek(&mut self, key: &[u8]) { self.underlying.seek(key) } } impl Iterator for DBIteratorAlloc { type Item = (Vec<u8>, Vec<u8>); fn next(&mut self) -> Option<(Vec<u8>, Vec<u8>)> { match self.underlying.next() { Some((key, val)) => { Some((key.to_vec(), val.to_vec())) }, None => None, } } } /** * An immutable snapshot of the database at a point in time. */ pub struct DBSnapshot { sn: *mut cffi::leveldb_snapshot_t, // We can't save a pointer to the underlying DB itself, since that would // prevent any further mutation (something that we want to allow). db: DBImplPtr, } impl DBSnapshot { // Note: deliberately not public fn new_from(db: &DBImplPtr) -> DBSnapshot { // Clone the underlying database to ensure that it doesn't go away. let db = db.clone(); let sn = unsafe { cffi::leveldb_create_snapshot(db.db) }; DBSnapshot { sn: sn, db: db, } } /** * As `DB.get`, except operating on the state of this snapshot. */ pub fn get(&self, key: &[u8]) -> LevelDBResult<Option<Vec<u8>>> { // TODO: proper return code for OOM let opts = match DBReadOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; self.get_opts(key, opts) } /** * As `DB.get_opts`, except operating on the state of this snapshot. */ pub fn get_opts(&self, key: &[u8], opts: DBReadOptions) -> LevelDBResult<Option<Vec<u8>>> { let mut opts = opts; opts.set_snapshot(self.sn as *const cffi::leveldb_snapshot_t); self.db.get(key, opts) } /** * As `DB.iter`, except operating on the state of this snapshot. */ pub fn iter(&self) -> LevelDBResult<DBIterator> { // TODO: proper return code for OOM let mut opts = match DBReadOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; opts.set_snapshot(self.sn as *const cffi::leveldb_snapshot_t); Ok(self.db.iter(opts)) } } impl Drop for DBSnapshot { fn drop(&mut self) { unsafe { cffi::leveldb_release_snapshot( self.db.db, self.sn as *const cffi::leveldb_snapshot_t, ) }; } } /* * This internal structure keeps a reference to the underlying database, along * with any other information that needs to be freed when the database goes out * of scope (e.g. DB options). It also provides the base interfaces for * reading/writing to the database. * * WARNING: The methods on this take &self, since LevelDB itself is safe for * concurrent access without any synchronization. Note that this *does* mutate * the database! We enforce synchronization at the DB / DBSnapshot level, as * opposed to this level. */ struct DBImpl { // The DB handle db: *mut cffi::leveldb_t, // The options used to open the database. We need to keep this alive for // the lifetime of the database. #[allow(dead_code)] opts: DBOptions, } impl DBImpl { fn open(path: &Path, opts: DBOptions) -> LevelDBResult<DBImplPtr> { let res = with_errptr(|errptr| { let c_string = CString::new(path.to_str().unwrap()).unwrap(); unsafe { cffi::leveldb_open(opts.ptr(), c_string.as_ptr(), errptr) } }); let db = match res { Ok(db) => db, Err(v) => return Err(v), }; Ok(Arc::new(DBImpl { db: db, opts: opts, })) } fn put(&self, key: &[u8], val: &[u8], opts: DBWriteOptions) -> LevelDBResult<()> { try!(with_errptr(|errptr| { unsafe { cffi::leveldb_put( self.db, opts.ptr(), key.as_ptr() as *const c_char, key.len() as size_t, val.as_ptr() as *const c_char, val.len() as size_t, errptr ) } })); Ok(()) } fn delete(&self, key: &[u8], opts: DBWriteOptions) -> LevelDBResult<()> { try!(with_errptr(|errptr| { unsafe { cffi::leveldb_delete( self.db, opts.ptr(), key.as_ptr() as *const c_char, key.len() as size_t, errptr ) } })); Ok(()) } fn write(&self, batch: DBWriteBatch, opts: DBWriteOptions) -> LevelDBResult<()> { try!(with_errptr(|errptr| { unsafe { cffi::leveldb_write( self.db, opts.ptr(), batch.batch, errptr ) } })); Ok(()) } fn get(&self, key: &[u8], opts: DBReadOptions) -> LevelDBResult<Option<Vec<u8>>> { let mut size: size_t = 0; let buff = try!(with_errptr(|errptr| { unsafe { cffi::leveldb_get( self.db, opts.ptr(), key.as_ptr() as *const c_char, key.len() as size_t, &mut size as *mut size_t, errptr ) } })); if buff.is_null() { return Ok(None) } let size = size as usize; let vec: Vec<u8> = unsafe { slice::from_raw_parts(buff as *mut u8, size).to_vec() }; Ok(Some(vec)) } fn iter(&self, opts: DBReadOptions) -> DBIterator { let it = unsafe { cffi::leveldb_create_iterator( self.db, opts.ptr() ) }; DBIterator::new(it) } } impl Drop for DBImpl { fn drop(&mut self) { unsafe { cffi::leveldb_close(self.db) } } } // A reference-counted pointer to a database implementation. We need to use // this, since snapshots can hold on to a reference to a DB. type DBImplPtr = Arc<DBImpl>; /** * This struct represents an open instance of the database. */ #[derive(Clone)] pub struct DB { db: DBImplPtr, } unsafe impl Send for DB {} unsafe impl Sync for DB {} impl DB { /** * Open a database at the given path. Returns a Result indicating whether * the database could be opened. Note that this function will not create * the database at the given location if it does not exist. */ pub fn open(path: &Path) -> LevelDBResult<DB> { // TODO: proper return code for OOM let opts = match DBOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; DB::open_with_opts(path, opts) } /** * Create and returns a database at the given path. */ pub fn create(path: &Path) -> LevelDBResult<DB> { // TODO: proper return code for OOM let mut opts = match DBOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; // TODO: can we remove a previously-existing database? opts.set_create_if_missing(true); DB::open_with_opts(path, opts) } /** * Open a database at the given path, using the provided options to control * the open behaviour. Returns a Result indicating whether or not the * database could be opened. */ pub fn open_with_opts(path: &Path, opts: DBOptions) -> LevelDBResult<DB> { match DBImpl::open(path, opts) { Ok(x) => Ok(DB { db: x }), Err(why) => Err(why), } } /** * Set the database entry for "key" to "value". Returns a result indicating * the success or failure of the operation. */ pub fn put(&mut self, key: &[u8], val: &[u8]) -> LevelDBResult<()> { // TODO: proper return code for OOM let opts = match DBWriteOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; self.put_opts(key, val, opts) } /** * Set the database entry for "key" to "value". Allows specifying the * write options to use for this operaton. */ pub fn put_opts(&mut self, key: &[u8], val: &[u8], opts: DBWriteOptions) -> LevelDBResult<()> { self.db.put(key, val, opts) } /** * Remove the database entry (if any) for "key". Returns a result * indicating the success of the operation. It is not an error if "key" * did not exist in the database. */ pub fn delete(&mut self, key: &[u8]) -> LevelDBResult<()> { // TODO: proper return code for OOM let opts = match DBWriteOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; self.delete_opts(key, opts) } /** * Remove the database entry (if any) for "key". As `delete()`, but allows * specifying the write options to use for this operation. */ pub fn delete_opts(&mut self, key: &[u8], opts: DBWriteOptions) -> LevelDBResult<()> { self.db.delete(key, opts) } /** * Apply the specified updates to the database, as given in the provided * DBWriteBatch. Returns a result indicating the success of the operation. */ pub fn write(&mut self, batch: DBWriteBatch) -> LevelDBResult<()> { // TODO: proper return code for OOM let opts = match DBWriteOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; self.write_opts(batch, opts) } /** * Apply the given write batch. As `write()`, but allows specifying the * write options to use for this operation. */ pub fn write_opts(&mut self, batch: DBWriteBatch, opts: DBWriteOptions) -> LevelDBResult<()> { self.db.write(batch, opts) } /** * If the database contains an entry for "key", return the associated value * - otherwise, return None. This value is wrapped in a Result to indicate * if an error occurred. */ pub fn get(&self, key: &[u8]) -> LevelDBResult<Option<Vec<u8>>> { // TODO: proper return code for OOM let opts = match DBReadOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; self.get_opts(key, opts) } /** * Get the value for a given key. As `get()`, but allows specifying the * options to use when reading. */ pub fn get_opts(&self, key: &[u8], opts: DBReadOptions) -> LevelDBResult<Option<Vec<u8>>> { self.db.get(key, opts) } /** * Return an iterator over the database. */ pub fn iter(&mut self) -> LevelDBResult<DBIterator> { // TODO: proper return code for OOM let opts = match DBReadOptions::new() { Some(o) => o, None => return Err(LevelDBError::OutOfMemoryError), }; Ok(self.db.iter(opts)) } /** * Return a snapshot of the database. */ pub fn snapshot(&self) -> DBSnapshot { DBSnapshot::new_from(&self.db) } // TODO: // - static `destroy()` and `repair` // - set caching // - approximate size / compact range // - property values // - filter policy (what's it do?) // - solve various memory leaks / lifetime issues } #[cfg(test)] mod tests { #![allow(unused_imports)] extern crate tempdir; use self::tempdir::TempDir; use leveldb_sys as ffi; use super::{DB, DBComparator, DBOptions, DBReadOptions, DBWriteBatch}; fn new_temp_db(name: &str) -> DB { let tdir = match TempDir::new(name) { Ok(t) => t, Err(why) => panic!("Error creating temp dir: {:?}", why), }; match DB::create(tdir.path()) { Ok(db) => db, Err(why) => panic!("Error creating DB: {:?}", why), } } #[test] fn test_can_get_version() { let major_ver = unsafe { ffi::leveldb_major_version() }; let minor_ver = unsafe { ffi::leveldb_minor_version() }; assert!(major_ver >= 1); assert!(minor_ver >= 0); } #[test] fn test_can_create() { let tdir = match TempDir::new("create") { Ok(t) => t, Err(why) => panic!("Error creating temp dir: {:?}", why), }; let _db = match DB::create(tdir.path()) { Ok(db) => db, Err(why) => panic!("Error creating DB: {:?}", why), }; } #[test] fn test_put() { let mut db = new_temp_db("put"); match db.put(b"foo", b"bar") { Ok(_) => {}, Err(why) => panic!("Error putting into DB: {:?}", why), }; } #[test] fn test_put_and_get() { let mut db = new_temp_db("put-and-get"); match db.put(b"foo", b"bar") { Ok(_) => {}, Err(why) => panic!("Error putting into DB: {:?}", why), }; match db.get(b"foo") { Ok(v) => assert_eq!(v.expect("Value not found"), b"bar"), Err(why) => panic!("Error getting from DB: {:?}", why), }; } #[test] fn test_delete() { let mut db = new_temp_db("delete"); db.put(b"foo", b"bar").unwrap(); db.put(b"abc", b"123").unwrap(); // Note: get --> unwrap Result --> expect Option --> convert Vec to slice assert_eq!(db.get(b"foo").unwrap().expect("Value not found"), b"bar"); assert_eq!(db.get(b"abc").unwrap().expect("Value not found"), b"123"); match db.delete(b"foo") { Ok(_) => {}, Err(why) => panic!("Error deleting from DB: {:?}", why), } assert_eq!(db.get(b"foo").unwrap(), None); assert_eq!(db.get(b"abc").unwrap().expect("Value not found"), b"123"); } #[test] fn test_write_batch() { let mut db = new_temp_db("write-batch"); db.put(b"foo", b"bar").unwrap(); db.put(b"abc", b"123").unwrap(); // Test putting into a write batch let mut batch = DBWriteBatch::new().expect("Error creating batch"); batch.put(b"def", b"456"); batch.put(b"zzz", b"asdfgh"); batch.delete(b"abc"); batch.put(b"zzz", b"qwerty"); // Test iteration let mut puts: Vec<(Vec<u8>, Vec<u8>)> = vec![]; let mut deletes: Vec<Vec<u8>> = vec![]; batch.iterate(|k, v| { puts.push((k.to_vec(), v.to_vec())); }, |k| { deletes.push(k.to_vec()); }); assert_eq!(puts.len(), 3); assert_eq!(deletes.len(), 1); // Test writing match db.write(batch) { Ok(_) => {}, Err(why) => panic!("Error writing to DB: {:?}", why), }; assert_eq!(db.get(b"foo").unwrap().expect("Value not found"), b"bar"); assert_eq!(db.get(b"def").unwrap().expect("Value not found"), b"456"); assert_eq!(db.get(b"zzz").unwrap().expect("Value not found"), b"qwerty"); } #[test] fn test_iteration() { let mut db = new_temp_db("iteration"); db.put(b"foo", b"bar").unwrap(); db.put(b"abc", b"123").unwrap(); let mut it = db.iter().unwrap(); let t1 = match it.next() { Some((key, val)) => { (key.to_vec(), val.to_vec()) }, None => panic!("Expected item 1"), }; let t2 = match it.next() { Some((key, val)) => { (key.to_vec(), val.to_vec()) }, None => panic!("Expected item 2"), }; let t3 = it.next(); // Keys are stored ordered, despite the fact we inserted unordered. assert_eq!((&t1.0), b"abc"); assert_eq!((&t1.1), b"123"); assert_eq!((&t2.0), b"foo"); assert_eq!((&t2.1), b"bar"); assert!(t3.is_none()); } #[test] fn test_iteration_alloc() { let mut db = new_temp_db("iteration"); db.put(b"foo", b"bar").unwrap(); db.put(b"abc", b"123").unwrap(); let items: Vec<(Vec<u8>, Vec<u8>)> = db.iter().unwrap().alloc().collect(); assert_eq!(items.len(), 2); assert_eq!((&items[0].0), b"abc"); assert_eq!((&items[0].1), b"123"); assert_eq!((&items[1].0), b"foo"); assert_eq!((&items[1].1), b"bar"); } #[test] fn test_comparator_create() { let _c = DBComparator::new("comparator-create", |a, b| { a.cmp(b) }); } #[test] fn test_comparator() { let c = DBComparator::new("foo", |a, b| { // Compare inverse b.cmp(a) }); let mut opts = DBOptions::new().expect("error creating options"); opts.set_comparator(c).set_create_if_missing(true); let tdir = match TempDir::new("comparator") { Ok(t) => t, Err(why) => panic!("Error creating temp dir: {:?}", why), }; let mut db = match DB::open_with_opts(tdir.path(), opts) { Ok(db) => db, Err(why) => panic!("Error creating DB: {:?}", why), }; // Insert into the DB some values. db.put(b"aaaa", b"foo").unwrap(); db.put(b"zzzz", b"bar").unwrap(); // Extract the values as an ordered vector. let items: Vec<(Vec<u8>, Vec<u8>)> = db.iter().unwrap().alloc().collect(); // Values should be in reverse order. assert_eq!(items.len(), 2); assert_eq!((&items[0].0), b"zzzz"); assert_eq!((&items[0].1), b"bar"); assert_eq!((&items[1].0), b"aaaa"); assert_eq!((&items[1].1), b"foo"); } #[test] fn test_snapshot() { let mut db = new_temp_db("snapshot"); db.put(b"foo", b"bar").unwrap(); db.put(b"abc", b"123").unwrap(); let snap = db.snapshot(); db.put(b"abc", b"456").unwrap(); let snap_val = match snap.get(b"abc") { Ok(val) => val.expect("Expected to find key 'abc'"), Err(why) => panic!("Error getting from DB: {:?}", why), }; assert_eq!(snap_val, b"123"); let val = match db.get(b"abc") { Ok(val) => val.expect("Expected to find key 'abc'"), Err(why) => panic!("Error getting from DB: {:?}", why), }; assert!(val == b"456"); let iter_items: Vec<(Vec<u8>, Vec<u8>)> = snap.iter().unwrap().alloc().collect(); let db_items: Vec<(Vec<u8>, Vec<u8>)> = db.iter().unwrap().alloc().collect(); assert_eq!(iter_items.len(), 2); assert_eq!(db_items.len(), 2); assert_eq!((&iter_items[0].0), b"abc"); assert_eq!((&iter_items[0].1), b"123"); assert_eq!((&iter_items[1].0), b"foo"); assert_eq!((&iter_items[1].1), b"bar"); assert_eq!((&db_items[0].0), b"abc"); assert_eq!((&db_items[0].1), b"456"); assert_eq!((&db_items[1].0), b"foo"); assert_eq!((&db_items[1].1), b"bar"); } }
use std::ops::{Index, IndexMut}; use std::fmt::Debug; use std::slice; use std::iter; const ORDER: usize = 4; // Must be at least 2 const BRANCHING_FACTOR: usize = ORDER - 1; #[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] struct InnerIdx(usize); #[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] struct LeafIdx(usize); #[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] enum NodeIndex { Inner(InnerIdx), Leaf(LeafIdx), } impl From<InnerIdx> for NodeIndex { fn from(idx: InnerIdx) -> Self { NodeIndex::Inner(idx) } } impl From<LeafIdx> for NodeIndex { fn from(idx: LeafIdx) -> Self { NodeIndex::Leaf(idx) } } // None can safely act like its copy macro_rules! option_arr { ( $ty: ty; $size: ident ) => { unsafe { let mut data: [Option<$ty>; $size] = std::mem::uninitialized(); for d in &mut data { std::ptr::write(d, None); } data } }; } #[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] struct Inner<K> { parent: Option<InnerIdx>, keys: [Option<K>; BRANCHING_FACTOR], pointers: [Option<NodeIndex>; BRANCHING_FACTOR], right: Option<NodeIndex>, } impl<K: Ord + Copy + Debug> Inner<K> { fn new() -> Inner<K> { Inner { parent: None, keys: [None; BRANCHING_FACTOR], pointers: [None; BRANCHING_FACTOR], right: None, } } fn find_node_index(&self, key: K) -> NodeIndex { for (k, p) in self.keys.iter().zip(self.pointers.iter()) { match (*k, *p) { (Some(k), Some(p)) if key <= k => return p, (None, Some(p)) => panic!(format!("pointer: {:?} is missing its key", p)), (Some(k), None) => panic!(format!("key: {:?} is missing its pointer", k)), (_, _) => continue, } } self.right.expect("key is greater than all contained keys") } // splits the node and returns a new one if there was no room in the // node fn insert(&mut self, key: K, pointer: NodeIndex) -> Option<Inner<K>> { let mut insert_key = Some(key); let insert_ptr = Some(pointer); // TODO push key into right location for (k, p) in self.keys.iter_mut().zip(self.pointers.iter_mut()) { if k.is_none() { *k = insert_key; *p = insert_ptr; return None; } else if k >= &mut insert_key { *p = insert_ptr; *k = insert_key; } } match (insert_ptr, insert_key) { (Some(ptr), Some(key)) => { let mut new = self.split(insert_ptr); assert!(new.insert(key, ptr).is_none(), "Split should always have room for new key"); Some(new) } (Some(ptr), None) => panic!("{:?} has no associated data", ptr), (None, key @ Some(_)) => { self.keys.last_mut().map(|k| *k = key); None } (None, None) => None, } } fn split(&mut self, ptr: Option<NodeIndex>) -> Inner<K> { let mut new = Inner::new(); new.parent = self.parent; let len = self.keys.len() + 1; // Non-Lexical Lifetime's can't come fast enough let (_, ref mut old_keys) = self.keys.split_at_mut(len / 2); for (old, new) in old_keys.iter_mut().zip(new.keys.iter_mut()) { *new = old.take(); } let (_, ref mut old_ptrs) = self.pointers.split_at_mut(len / 2); for (old, new) in old_ptrs.iter_mut().zip(new.pointers.iter_mut()) { let old = old.take(); // Leaf most inner nodes have all keys pointing to the same leaf if let Some(NodeIndex::Leaf(_)) = ptr { *new = ptr; } else { *new = old; } } new } } #[derive(Default, Debug, PartialEq, PartialOrd, Ord, Eq, Hash)] struct Inners<K>(Vec<Inner<K>>); impl<K: Ord + Copy + Debug> Inners<K> { /// Finds the index of leaf node that most closely matches key fn find_leaf_index(&self, prev: InnerIdx, key: K) -> LeafIdx { if self.0.is_empty() { return LeafIdx(0); // to handle lazy alloc of inners } let mut prev = prev; loop { match self[prev].find_node_index(key) { NodeIndex::Inner(x) => prev = x, NodeIndex::Leaf(x) => return x, }; } } fn get(&self, InnerIdx(idx): InnerIdx) -> Option<&Inner<K>> { self.0.get(idx) } fn get_mut(&mut self, InnerIdx(idx): InnerIdx) -> Option<&mut Inner<K>> { self.0.get_mut(idx) } } impl<K: Copy + Ord + Debug> Index<InnerIdx> for Inners<K> { type Output = Inner<K>; fn index(&self, idx: InnerIdx) -> &Self::Output { self.get(idx).unwrap() } } impl<K: Copy + Ord + Debug> IndexMut<InnerIdx> for Inners<K> { fn index_mut(&mut self, idx: InnerIdx) -> &mut Self::Output { self.get_mut(idx).unwrap() } } #[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash)] struct Leaf<K, V> { parent: Option<InnerIdx>, keys: [Option<K>; BRANCHING_FACTOR], data: [Option<V>; BRANCHING_FACTOR], next: Option<LeafIdx>, } type LeafIter<'a, K: 'a, V: 'a> = iter::Zip<slice::Iter<'a, Option<K>>, slice::Iter<'a, Option<V>>>; type LeafIterMut<'a, K: 'a, V: 'a> = iter::Zip<slice::Iter<'a, Option<K>>, slice::IterMut<'a, Option<V>>>; enum LeafInsertResult<K, V> { Split(Leaf<K, V>), Inserted(Option<V>), } impl<K: Ord + Copy + Debug, V: Debug> Leaf<K, V> { fn new(next: Option<LeafIdx>) -> Leaf<K, V> { Leaf { parent: None, keys: [None; BRANCHING_FACTOR], data: option_arr![V; BRANCHING_FACTOR], next: next, } } fn get(&self, key: K) -> Option<&V> { self.keys .iter() .position(|k| &Some(key) == k) .and_then(|i| self.data.get(i).and_then(|v| v.as_ref())) } fn get_mut(&mut self, key: K) -> Option<&mut V> { self.keys .iter() .position(|k| &Some(key) == k) .and_then(move |i| self.data.get_mut(i).and_then(|v| v.as_mut())) } fn insert(&mut self, key: K, data: V, index: Option<LeafIdx>) -> LeafInsertResult<K, V> { let mut insert_key = Some(key); let mut insert_data = Some(data); for (k, p) in self.keys.iter_mut().zip(self.data.iter_mut()) { if k.is_none() { *k = insert_key; *p = insert_data; return LeafInsertResult::Inserted(None); } else if k == &mut insert_key { std::mem::swap(p, &mut insert_data); assert!(insert_data.is_some(), "Key missing data pair"); return LeafInsertResult::Inserted(insert_data); } } match (insert_key, insert_data) { (Some(key), Some(data)) => { let mut new = self.split(); self.next = index; match new.insert(key, data, None) { LeafInsertResult::Inserted(None) => LeafInsertResult::Split(new), _ => panic!("New leaf shouldn't have to split"), } } (None, None) => LeafInsertResult::Inserted(None), (_, _) => panic!("Incomplete key data pair"), } } fn split(&mut self) -> Leaf<K, V> { let mut new = Leaf::new(self.next); new.parent = self.parent; let len = self.keys.len() + 1; // Non-Lexical Lifetime's can't come fast enough let (_, ref mut old_keys) = self.keys.split_at_mut(len / 2); for (old, new) in old_keys.iter_mut().zip(new.keys.iter_mut()) { *new = old.take(); } let (_, ref mut old_data) = self.data.split_at_mut(len / 2); for (old, new) in old_data.iter_mut().zip(new.data.iter_mut()) { *new = old.take(); } new } fn iter(&self) -> LeafIter<K, V> { self.keys.iter().zip(self.data.iter()) } fn iter_mut(&mut self) -> LeafIterMut<K, V> { self.keys.iter().zip(self.data.iter_mut()) } } #[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd)] struct Leaves<K, V>(Vec<Leaf<K, V>>); enum LeavesInsertResult<V> { Inserted(Option<V>), Split(Option<InnerIdx>, LeafIdx), } impl<K: Ord + Copy + Debug, V: Debug> Leaves<K, V> { // if the leaf is full the key of the inserted value must be inserted into the parent // returns (parent, new node idx, replaced) fn insert(&mut self, key: K, value: V, target: LeafIdx) -> LeavesInsertResult<V> { if self.0.is_empty() { assert_eq!(target, LeafIdx(0), "should only be empty and receive a target 0"); let mut leaf = Leaf::new(None); match leaf.insert(key, value, None) { LeafInsertResult::Inserted(_) => (), _ => panic!("New leaf shouldn't have to split"), }; // let parent = leaf.parent; self.0.push(leaf); return LeavesInsertResult::Inserted(None); } let last = LeafIdx(self.0.len()); match self[target].insert(key, value, Some(last)) { LeafInsertResult::Inserted(r) => LeavesInsertResult::Inserted(r), LeafInsertResult::Split(new) => { let parent = new.parent; self.0.push(new); LeavesInsertResult::Split(parent, last) } } } fn get(&self, LeafIdx(idx): LeafIdx) -> Option<&Leaf<K, V>> { self.0.get(idx) } fn get_mut(&mut self, LeafIdx(idx): LeafIdx) -> Option<&mut Leaf<K, V>> { self.0.get_mut(idx) } fn iter(&self, start: LeafIdx) -> LeavesIter<K, V> { LeavesIter { leaves: &self.0, current: Some(start), } } fn iter_mut(&mut self, start: LeafIdx) -> LeavesIterMut<K, V> { LeavesIterMut { leaves: &mut self.0, current: Some(start), } } } struct LeavesIter<'a, K: 'a, V: 'a> { leaves: &'a [Leaf<K, V>], current: Option<LeafIdx>, } impl<'a, K, V> Iterator for LeavesIter<'a, K, V> { type Item = &'a Leaf<K, V>; fn next(&mut self) -> Option<Self::Item> { if let Some(i) = self.current { let next = &self.leaves[i.0]; self.current = next.next; Some(next) } else { None } } } struct LeavesIterMut<'a, K: 'a, V: 'a> { leaves: &'a mut [Leaf<K, V>], current: Option<LeafIdx>, } impl<'a, K, V> Iterator for LeavesIterMut<'a, K, V> { type Item = &'a mut Leaf<K, V>; fn next(&mut self) -> Option<Self::Item> { if let Some(LeafIdx(i)) = self.current { unsafe { let leaf_ptr = self.leaves.as_mut_ptr(); if i < self.leaves.len() { Some(leaf_ptr.offset(i as isize).as_mut().expect("no null elems")) } else { None } } } else { None } } } impl<K: Copy + Ord + Debug, V: Debug> Index<LeafIdx> for Leaves<K, V> { type Output = Leaf<K, V>; fn index(&self, idx: LeafIdx) -> &Self::Output { self.get(idx).unwrap() } } impl<K: Copy + Ord + Debug, V: Debug> IndexMut<LeafIdx> for Leaves<K, V> { fn index_mut(&mut self, idx: LeafIdx) -> &mut Self::Output { self.get_mut(idx).unwrap() } } #[derive(Debug, Hash, Ord, PartialOrd, Eq, PartialEq)] pub struct BPlusTree<K, V> { inners: Inners<K>, leaves: Leaves<K, V>, root: NodeIndex, } impl<K: Copy + Ord + Debug, V: Debug> Default for BPlusTree<K, V> { fn default() -> BPlusTree<K, V> { Self::new() } } impl<K: Copy + Ord + Debug, V: Debug> BPlusTree<K, V> { pub fn new() -> BPlusTree<K, V> { BPlusTree { root: NodeIndex::Leaf(LeafIdx(0)), inners: Inners(Vec::new()), leaves: Leaves(Vec::new()), } } fn find_leaf_index(&self, key: K) -> LeafIdx { match self.root { NodeIndex::Leaf(x) => x, NodeIndex::Inner(x) => self.inners.find_leaf_index(x, key), } } pub fn get(&self, key: K) -> Option<&V> { self.leaves[self.find_leaf_index(key)].get(key) } pub fn get_mut(&mut self, key: K) -> Option<&mut V> { let idx = self.find_leaf_index(key); self.leaves[idx].get_mut(key) } pub fn clear(&mut self) { self.inners.0.clear(); self.leaves.0.clear(); } pub fn contains_key(&self, key: K) -> bool { // TODO can short circuit -- keys can be duplicated self.get(key).is_some() } pub fn insert(&mut self, key: K, value: V) -> Option<V> { let leaf_idx = self.find_leaf_index(key); let (mut parent, new_node_idx) = match self.leaves.insert(key, value, leaf_idx) { LeavesInsertResult::Inserted(r) => return r, LeavesInsertResult::Split(p, n) => (p, n), }; let mut last = NodeIndex::Leaf(leaf_idx); let mut new_node_idx = NodeIndex::Leaf(new_node_idx); while let Some(p) = parent { let new = self.inners[p].insert(key, new_node_idx); if let Some(new) = new { // we had a split, shuffle to the parent last = new_node_idx; new_node_idx = NodeIndex::Inner(InnerIdx(self.inners.0.len())); parent = new.parent; self.inners.0.push(new); } else { // it fit, we are done inserting return None; } } // single leaf is the root if self.leaves.0.len() == 1 { return None; } let mut new_root = Inner::new(); new_root.right = Some(new_node_idx); self.inners.0.push(new_root); let new_root_idx = InnerIdx(self.inners.0.len() - 1); self.root = NodeIndex::Inner(new_root_idx); match (new_node_idx, last) { (NodeIndex::Leaf(new), NodeIndex::Leaf(last)) => { self.leaves[last].parent = Some(new_root_idx); self.leaves[new].parent = Some(new_root_idx); } (NodeIndex::Inner(new), NodeIndex::Inner(last)) => { self.inners[last].parent = Some(new_root_idx); self.inners[new].parent = Some(new_root_idx); } (_, _) => panic!("mismatched node index types"), } None } fn leftmost_leaf(&self) -> LeafIdx { let mut first_leaf = self.root; while let NodeIndex::Inner(x) = first_leaf { first_leaf = self.inners[x].pointers[0].expect("should always have at least one key"); } if let NodeIndex::Leaf(x) = first_leaf { x } else { unreachable!() } } pub fn iter(&self) -> Iter<K, V> { Iter(self.leaves.iter(self.leftmost_leaf()).flat_map(Leaf::iter)) } pub fn iter_mut(&mut self) -> IterMut<K, V> { let start = self.leftmost_leaf(); IterMut(self.leaves.iter_mut(start).flat_map(Leaf::iter_mut)) } } impl<K: Copy + Ord + Debug, V: Debug> Index<K> for BPlusTree<K, V> { type Output = V; fn index(&self, key: K) -> &Self::Output { self.get(key).unwrap() } } impl<K: Copy + Ord + Debug, V: Debug> IndexMut<K> for BPlusTree<K, V> { fn index_mut(&mut self, key: K) -> &mut Self::Output { self.get_mut(key).unwrap() } } type PageIter<'a, K, V> = iter::FlatMap<LeavesIter<'a, K, V>, LeafIter<'a, K, V>, fn(&'a Leaf<K, V>) -> LeafIter<'a, K, V>>; // impl trait methods would be killer here pub struct Iter<'a, K: 'a, V: 'a>(PageIter<'a, K, V>); impl<'a, K, V> Iterator for Iter<'a, K, V> where K: 'a + Copy + Ord + Debug, V: 'a + Debug { type Item = (&'a K, &'a V); fn next(&mut self) -> Option<Self::Item> { self.0.next().and_then(|(k, d)| { match (k, d) { (&Some(ref k), &Some(ref d)) => Some((k, d)), (_, _) => None, } }) } } type PageIterMut<'a, K, V> = iter::FlatMap<LeavesIterMut<'a, K, V>, LeafIterMut<'a, K, V>, fn(&'a mut Leaf<K, V>) -> LeafIterMut<'a, K, V>>; pub struct IterMut<'a, K: 'a, V: 'a>(PageIterMut<'a, K, V>); impl<'a, K, V> Iterator for IterMut<'a, K, V> where K: 'a + Copy + Ord + Debug, V: 'a + Debug { type Item = (&'a K, &'a mut V); fn next(&mut self) -> Option<Self::Item> { self.0.next().and_then(|(k, d)| { match (k, d) { (&Some(ref k), &mut Some(ref mut d)) => Some((k, d)), (_, _) => None, } }) } } impl<K, V> std::iter::FromIterator<(K, V)> for BPlusTree<K, V> where K: Copy + Ord + Debug, V: Debug { fn from_iter<I: IntoIterator<Item = (K, V)>>(iter: I) -> Self { let mut tree = BPlusTree::new(); for (k, v) in iter { tree.insert(k, v); } tree } } impl<K, V> Extend<(K, V)> for BPlusTree<K, V> where K: Copy + Ord + Debug, V: Debug { fn extend<T>(&mut self, iter: T) where T: IntoIterator<Item = (K, V)> { for (k, v) in iter { self.insert(k, v); } } } #[cfg(test)] mod test { use super::*; #[test] fn smoke() { let mut tree = BPlusTree::new(); for i in 0..10 { tree.insert(i, i); assert_eq!(tree[i], i); assert_eq!(tree.insert(i, i + 1), Some(i)); assert_eq!(tree[i], i + 1); } } }
fn multiply(a:i32, b:i32) -> i32 { return a * b; }
#[cfg(feature = "test-util")] use std::sync::atomic::AtomicBool; use std::{ any::TypeId, collections::HashMap, fmt::Debug, sync::{ atomic::{AtomicU32, Ordering}, Arc, }, }; use async_lock::Mutex; use async_trait::async_trait; use bonsaidb_core::{ connection::{Database, ServerConnection}, custodian_password::{ ClientConfig, ClientFile, ClientLogin, LoginFinalization, LoginRequest, LoginResponse, }, custom_api::CustomApi, networking::{self, Payload, Request, Response, ServerRequest, ServerResponse}, permissions::Permissions, schema::{NamedReference, Schema, SchemaName, Schematic}, PASSWORD_CONFIG, }; use flume::Sender; #[cfg(not(target_arch = "wasm32"))] use tokio::task::JoinHandle; use url::Url; pub use self::remote_database::RemoteDatabase; #[cfg(feature = "pubsub")] pub use self::remote_database::RemoteSubscriber; use crate::{error::Error, Builder}; #[cfg(not(target_arch = "wasm32"))] mod quic_worker; mod remote_database; #[cfg(all(feature = "websockets", not(target_arch = "wasm32")))] mod tungstenite_worker; #[cfg(all(feature = "websockets", target_arch = "wasm32"))] mod wasm_websocket_worker; #[cfg(feature = "pubsub")] type SubscriberMap = Arc<Mutex<HashMap<u64, flume::Sender<Arc<Message>>>>>; #[cfg(feature = "pubsub")] use bonsaidb_core::{circulate::Message, networking::DatabaseRequest}; #[cfg(all(feature = "websockets", not(target_arch = "wasm32")))] pub type WebSocketError = tokio_tungstenite::tungstenite::Error; #[cfg(all(feature = "websockets", target_arch = "wasm32"))] pub type WebSocketError = wasm_websocket_worker::WebSocketError; /// Client for connecting to a `BonsaiDb` server. #[derive(Debug)] pub struct Client<A: CustomApi = ()> { pub(crate) data: Arc<Data<A>>, } impl<A: CustomApi> Clone for Client<A> { fn clone(&self) -> Self { Self { data: self.data.clone(), } } } #[allow(type_alias_bounds)] // Causes compilation errors without it type BackendPendingRequest<A: CustomApi> = PendingRequest<<A as CustomApi>::Request, <A as CustomApi>::Response>; #[derive(Debug)] pub struct Data<A: CustomApi> { request_sender: Sender<BackendPendingRequest<A>>, #[cfg(not(target_arch = "wasm32"))] worker: CancellableHandle<Result<(), Error>>, effective_permissions: Mutex<Option<Permissions>>, schemas: Mutex<HashMap<TypeId, Arc<Schematic>>>, request_id: AtomicU32, #[cfg(feature = "pubsub")] subscribers: SubscriberMap, #[cfg(feature = "test-util")] background_task_running: Arc<AtomicBool>, } impl Client<()> { /// Initialize a client connecting to `url`. This client can be shared by /// cloning it. All requests are done asynchronously over the same /// connection. /// /// If the client has an error connecting, the first request made will /// present that error. If the client disconnects while processing requests, /// all requests being processed will exit and return /// [`Error::Disconnected`]. The client will automatically try reconnecting. /// /// The goal of this design of this reconnection strategy is to make it /// easier to build resilliant apps. By allowing existing Client instances /// to recover and reconnect, each component of the apps built can adopt a /// "retry-to-recover" design, or "abort-and-fail" depending on how critical /// the database is to operation. pub async fn new(url: Url) -> Result<Self, Error> { Self::new_from_parts( url, #[cfg(not(target_arch = "wasm32"))] None, None, ) .await } /// Returns a builder for a new client connecting to `url`. pub fn build(url: Url) -> Builder<()> { Builder::new(url) } } impl<A: CustomApi> Client<A> { /// Initialize a client connecting to `url` with `certificate` being used to /// validate and encrypt the connection. This client can be shared by /// cloning it. All requests are done asynchronously over the same /// connection. /// /// If the client has an error connecting, the first request made will /// present that error. If the client disconnects while processing requests, /// all requests being processed will exit and return /// [`Error::Disconnected`]. The client will automatically try reconnecting. /// /// The goal of this design of this reconnection strategy is to make it /// easier to build resilliant apps. By allowing existing Client instances /// to recover and reconnect, each component of the apps built can adopt a /// "retry-to-recover" design, or "abort-and-fail" depending on how critical /// the database is to operation. pub(crate) async fn new_from_parts( url: Url, custom_api_callback: Option<Arc<dyn CustomApiCallback<A::Response>>>, #[cfg(not(target_arch = "wasm32"))] certificate: Option<fabruic::Certificate>, ) -> Result<Self, Error> { match url.scheme() { #[cfg(not(target_arch = "wasm32"))] "bonsaidb" => Ok(Self::new_bonsai_client( url, certificate, custom_api_callback, )), #[cfg(feature = "websockets")] "wss" | "ws" => Self::new_websocket_client(url, custom_api_callback).await, other => { return Err(Error::InvalidUrl(format!("unsupported scheme {}", other))); } } } #[cfg(not(target_arch = "wasm32"))] fn new_bonsai_client( url: Url, certificate: Option<fabruic::Certificate>, custom_api_callback: Option<Arc<dyn CustomApiCallback<A::Response>>>, ) -> Self { let (request_sender, request_receiver) = flume::unbounded(); #[cfg(feature = "pubsub")] let subscribers = SubscriberMap::default(); let worker = tokio::task::spawn(quic_worker::reconnecting_client_loop( url, certificate, request_receiver, custom_api_callback, #[cfg(feature = "pubsub")] subscribers.clone(), )); #[cfg(feature = "test-util")] let background_task_running = Arc::new(AtomicBool::new(true)); Self { data: Arc::new(Data { request_sender, worker: CancellableHandle { worker, #[cfg(feature = "test-util")] background_task_running: background_task_running.clone(), }, schemas: Mutex::default(), request_id: AtomicU32::default(), effective_permissions: Mutex::default(), #[cfg(feature = "pubsub")] subscribers, #[cfg(feature = "test-util")] background_task_running, }), } } #[cfg(all(feature = "websockets", not(target_arch = "wasm32")))] async fn new_websocket_client( url: Url, custom_api_callback: Option<Arc<dyn CustomApiCallback<A::Response>>>, ) -> Result<Self, Error> { let (request_sender, request_receiver) = flume::unbounded(); #[cfg(feature = "pubsub")] let subscribers = SubscriberMap::default(); let worker = tokio::task::spawn(tungstenite_worker::reconnecting_client_loop( url, request_receiver, custom_api_callback, #[cfg(feature = "pubsub")] subscribers.clone(), )); #[cfg(feature = "test-util")] let background_task_running = Arc::new(AtomicBool::new(true)); let client = Self { data: Arc::new(Data { request_sender, #[cfg(not(target_arch = "wasm32"))] worker: CancellableHandle { worker, #[cfg(feature = "test-util")] background_task_running: background_task_running.clone(), }, schemas: Mutex::default(), request_id: AtomicU32::default(), effective_permissions: Mutex::default(), #[cfg(feature = "pubsub")] subscribers, #[cfg(feature = "test-util")] background_task_running, }), }; Ok(client) } #[cfg(all(feature = "websockets", target_arch = "wasm32"))] async fn new_websocket_client( url: Url, custom_api_callback: Option<Arc<dyn CustomApiCallback<A::Response>>>, ) -> Result<Self, Error> { let (request_sender, request_receiver) = flume::unbounded(); #[cfg(feature = "pubsub")] let subscribers = SubscriberMap::default(); wasm_websocket_worker::spawn_client( Arc::new(url), request_receiver, custom_api_callback.clone(), #[cfg(feature = "pubsub")] subscribers.clone(), ); #[cfg(feature = "test-util")] let background_task_running = Arc::new(AtomicBool::new(true)); let client = Self { data: Arc::new(Data { request_sender, #[cfg(not(target_arch = "wasm32"))] worker: CancellableHandle { worker, #[cfg(feature = "test-util")] background_task_running: background_task_running.clone(), }, schemas: Mutex::default(), request_id: AtomicU32::default(), effective_permissions: Mutex::default(), #[cfg(feature = "pubsub")] subscribers, #[cfg(feature = "test-util")] background_task_running, }), }; Ok(client) } /// Returns a structure representing a remote database. No validations are /// done when this method is executed. The server will validate the schema /// and database name when a [`Connection`](bonsaidb_core::connection::Connection) function is called. pub async fn database<DB: Schema>(&self, name: &str) -> Result<RemoteDatabase<DB, A>, Error> { let mut schemas = self.data.schemas.lock().await; let type_id = TypeId::of::<DB>(); let schematic = if let Some(schematic) = schemas.get(&type_id) { schematic.clone() } else { let schematic = Arc::new(DB::schematic()?); schemas.insert(type_id, schematic.clone()); schematic }; Ok(RemoteDatabase::new( self.clone(), name.to_string(), schematic, )) } /// Logs in as a user with a password, using `custodian-password` to login using `OPAQUE-PAKE`. pub async fn login_with_password( &self, username: &str, login_request: LoginRequest, ) -> Result<LoginResponse, bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::LoginWithPassword { username: username.to_string(), login_request, })) .await? { Response::Server(ServerResponse::PasswordLoginResponse { response }) => Ok(*response), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } /// Finishes setting a user's password by finishing the `OPAQUE-PAKE` /// login. pub async fn finish_login_with_password( &self, login_finalization: LoginFinalization, ) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::FinishPasswordLogin { login_finalization, })) .await? { Response::Server(ServerResponse::LoggedIn { permissions }) => { let mut effective_permissions = self.data.effective_permissions.lock().await; *effective_permissions = Some(permissions); Ok(()) } Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } /// Authenticates as a user with a provided password. The password provided /// will never leave the machine that is calling this function. Internally /// uses `login_with_password` and `finish_login_with_password` in /// conjunction with `custodian-password`. pub async fn login_with_password_str( &self, username: &str, password: &str, previous_file: Option<ClientFile>, ) -> Result<ClientFile, bonsaidb_core::Error> { let (login, request) = ClientLogin::login( &ClientConfig::new(PASSWORD_CONFIG, None)?, previous_file, password, )?; let response = self.login_with_password(username, request).await?; let (new_file, login_finalization, _export_key) = login.finish(response)?; self.finish_login_with_password(login_finalization).await?; Ok(new_file) } async fn send_request( &self, request: Request<<A as CustomApi>::Request>, ) -> Result<Response<<A as CustomApi>::Response>, Error> { let (result_sender, result_receiver) = flume::bounded(1); let id = self.data.request_id.fetch_add(1, Ordering::SeqCst); self.data.request_sender.send(PendingRequest { request: Payload { id: Some(id), wrapped: request, }, responder: result_sender.clone(), })?; result_receiver.recv_async().await? } /// Sends an api `request`. pub async fn send_api_request( &self, request: <A as CustomApi>::Request, ) -> Result<<A as CustomApi>::Response, Error> { match self.send_request(Request::Api(request)).await? { Response::Api(response) => Ok(response), Response::Error(err) => Err(Error::Core(err)), other => Err(Error::Network(networking::Error::UnexpectedResponse( format!("{:?}", other), ))), } } /// Returns the current effective permissions for the client. Returns None /// if unauthenticated. pub async fn effective_permissions(&self) -> Option<Permissions> { let effective_permissions = self.data.effective_permissions.lock().await; effective_permissions.clone() } #[cfg(feature = "test-util")] #[doc(hidden)] #[must_use] pub fn background_task_running(&self) -> Arc<AtomicBool> { self.data.background_task_running.clone() } #[cfg(feature = "pubsub")] pub(crate) async fn register_subscriber(&self, id: u64, sender: flume::Sender<Arc<Message>>) { let mut subscribers = self.data.subscribers.lock().await; subscribers.insert(id, sender); } #[cfg(feature = "pubsub")] pub(crate) async fn unregister_subscriber(&self, database: String, id: u64) { drop( self.send_request(Request::Database { database, request: DatabaseRequest::UnregisterSubscriber { subscriber_id: id }, }) .await, ); let mut subscribers = self.data.subscribers.lock().await; subscribers.remove(&id); } } #[async_trait] impl ServerConnection for Client { async fn create_database_with_schema( &self, name: &str, schema: SchemaName, ) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::CreateDatabase(Database { name: name.to_string(), schema, }))) .await? { Response::Server(ServerResponse::DatabaseCreated { .. }) => Ok(()), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn delete_database(&self, name: &str) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::DeleteDatabase { name: name.to_string(), })) .await? { Response::Server(ServerResponse::DatabaseDeleted { .. }) => Ok(()), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn list_databases(&self) -> Result<Vec<Database>, bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::ListDatabases)) .await? { Response::Server(ServerResponse::Databases(databases)) => Ok(databases), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn list_available_schemas(&self) -> Result<Vec<SchemaName>, bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::ListAvailableSchemas)) .await? { Response::Server(ServerResponse::AvailableSchemas(schemas)) => Ok(schemas), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn create_user(&self, username: &str) -> Result<u64, bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::CreateUser { username: username.to_string(), })) .await? { Response::Server(ServerResponse::UserCreated { id }) => Ok(id), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn set_user_password<'user, U: Into<NamedReference<'user>> + Send + Sync>( &self, user: U, password_request: bonsaidb_core::custodian_password::RegistrationRequest, ) -> Result<bonsaidb_core::custodian_password::RegistrationResponse, bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::SetPassword { user: user.into().into_owned(), password_request, })) .await? { Response::Server(ServerResponse::FinishSetPassword { password_reponse }) => { Ok(*password_reponse) } Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn finish_set_user_password<'user, U: Into<NamedReference<'user>> + Send + Sync>( &self, user: U, password_finalization: bonsaidb_core::custodian_password::RegistrationFinalization, ) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::FinishSetPassword { user: user.into().into_owned(), password_finalization, })) .await? { Response::Ok => Ok(()), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn add_permission_group_to_user< 'user, 'group, U: Into<NamedReference<'user>> + Send + Sync, G: Into<NamedReference<'group>> + Send + Sync, >( &self, user: U, permission_group: G, ) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server( ServerRequest::AlterUserPermissionGroupMembership { user: user.into().into_owned(), group: permission_group.into().into_owned(), should_be_member: true, }, )) .await? { Response::Ok => Ok(()), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn remove_permission_group_from_user< 'user, 'group, U: Into<NamedReference<'user>> + Send + Sync, G: Into<NamedReference<'group>> + Send + Sync, >( &self, user: U, permission_group: G, ) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server( ServerRequest::AlterUserPermissionGroupMembership { user: user.into().into_owned(), group: permission_group.into().into_owned(), should_be_member: false, }, )) .await? { Response::Ok => Ok(()), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn add_role_to_user< 'user, 'group, U: Into<NamedReference<'user>> + Send + Sync, G: Into<NamedReference<'group>> + Send + Sync, >( &self, user: U, role: G, ) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::AlterUserRoleMembership { user: user.into().into_owned(), role: role.into().into_owned(), should_be_member: true, })) .await? { Response::Ok => Ok(()), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } async fn remove_role_from_user< 'user, 'group, U: Into<NamedReference<'user>> + Send + Sync, G: Into<NamedReference<'group>> + Send + Sync, >( &self, user: U, role: G, ) -> Result<(), bonsaidb_core::Error> { match self .send_request(Request::Server(ServerRequest::AlterUserRoleMembership { user: user.into().into_owned(), role: role.into().into_owned(), should_be_member: false, })) .await? { Response::Ok => Ok(()), Response::Error(err) => Err(err), other => Err(bonsaidb_core::Error::Networking( networking::Error::UnexpectedResponse(format!("{:?}", other)), )), } } } type OutstandingRequestMap<R, O> = HashMap<u32, PendingRequest<R, O>>; type OutstandingRequestMapHandle<R, O> = Arc<Mutex<OutstandingRequestMap<R, O>>>; #[derive(Debug)] pub struct PendingRequest<R, O> { request: Payload<Request<R>>, responder: Sender<Result<Response<O>, Error>>, } #[cfg(not(target_arch = "wasm32"))] #[derive(Debug)] struct CancellableHandle<T> { worker: JoinHandle<T>, #[cfg(feature = "test-util")] background_task_running: Arc<AtomicBool>, } #[cfg(not(target_arch = "wasm32"))] impl<T> Drop for CancellableHandle<T> { fn drop(&mut self) { self.worker.abort(); #[cfg(feature = "test-util")] self.background_task_running.store(false, Ordering::Release); } } async fn process_response_payload<R: Send + Sync + 'static, O: Send + Sync + 'static>( payload: Payload<Response<O>>, outstanding_requests: &OutstandingRequestMapHandle<R, O>, custom_api_callback: Option<&Arc<dyn CustomApiCallback<O>>>, #[cfg(feature = "pubsub")] subscribers: &SubscriberMap, ) { if let Some(payload_id) = payload.id { if let Response::Api(response) = &payload.wrapped { if let Some(custom_api_callback) = custom_api_callback { custom_api_callback .request_response_received(response) .await; } } let request = { let mut outstanding_requests = outstanding_requests.lock().await; outstanding_requests .remove(&payload_id) .expect("missing responder") }; drop(request.responder.send(Ok(payload.wrapped))); } else { match payload.wrapped { Response::Api(response) => { if let Some(custom_api_callback) = custom_api_callback { custom_api_callback.response_received(response).await; } } #[cfg(feature = "pubsub")] Response::Database(bonsaidb_core::networking::DatabaseResponse::MessageReceived { subscriber_id, topic, payload, }) => { let mut subscribers = subscribers.lock().await; if let Some(sender) = subscribers.get(&subscriber_id) { if sender .send(std::sync::Arc::new(bonsaidb_core::circulate::Message { topic, payload, })) .is_err() { subscribers.remove(&subscriber_id); } } } _ => { log::error!("unexpected adhoc response"); } } } } /// A handler of [`CustomApi`] responses. #[async_trait] pub trait CustomApiCallback<T: Send + Sync>: Send + Sync + 'static { /// An out-of-band `response` was received. This happens when the server /// sends a response that isn't in response to a request. async fn response_received(&self, response: T); /// A response was received. Unlike in `response_received` this response /// will be returned to the original requestor. This is invoked before the /// requestor recives the response. #[allow(unused_variables)] async fn request_response_received(&self, response: &T) { // This is provided in case you'd like to see a response always, even if // it is also being handled by the code that made the request. } } #[async_trait] impl<F, T> CustomApiCallback<T> for F where F: Fn(T) + Send + Sync + 'static, T: Send + Sync + 'static, { async fn response_received(&self, response: T) { self(response); } } #[async_trait] impl<T> CustomApiCallback<T> for () where T: Send + Sync + 'static, { async fn response_received(&self, _response: T) {} }
use std::env; use std::fs::File; use std::io::prelude::*; use std::io::BufReader; use std::process; fn seat_val(line: &str) -> u32 { let row = u32::from_str_radix(&line[0..7].replace("F", "0").replace("B", "1"), 2).unwrap(); let col = u32::from_str_radix(&line[7..10].replace("L", "0").replace("R", "1"), 2).unwrap(); return row * 8 + col; } fn main() { let args: Vec<String> = env::args().collect(); if args.len() != 2 { eprintln!("Invalid arguments, expected 1 args"); process::exit(1); } let file = File::open(&args[1]).unwrap(); let reader = BufReader::new(file); let mut seats: Vec<u32> = reader .lines() .map(|line| seat_val(&line.unwrap())) .collect(); seats.sort(); for i in 0..(seats.len() - 1) { if seats[i] == (seats[i + 1] - 2) { println!("seat = {} {} ", seats[i], seats[i + 1]); } } }
use super::*; use chrono::*; pub mod report_new_symptoms { use super::*; #[derive(Serialize, Deserialize, Debug)] pub struct Req<'a> { #[serde(borrow)] symptoms: Vec<&'a str>, #[serde(borrow)] case_id: &'a str, } #[derive(Serialize, Deserialize, Debug)] pub struct Resp { success: bool, ts: chrono::DateTime<Utc>, } impl<'a> Req<'a> { pub async fn handle(self) -> Result<Resp, Error> { let Req { symptoms, case_id } = self; let ts = chrono::Utc::now(); let time_key = &time_key(ts); let mut pipe = redis::pipe(); // for case_id in case_ids { let case_key = &case_key(case_id); for symptom in symptoms { pipe.rpush(case_key, (time_to_str(ts), symptom)); } pipe.sadd(time_key, case_id); // } let mut conn = POOL.get().await?; let success = pipe .query_async::<_, ()>(conn.deref_mut().deref_mut()) .await .is_ok(); Ok(Resp { success, ts }) } } } pub mod get_symptoms { use super::*; #[derive(Serialize, Deserialize, Debug)] pub struct Req<'a> { case_id: &'a str, } #[derive(Serialize, Deserialize, Debug)] pub struct Resp { symptoms: Vec<(chrono::DateTime<Utc>, String)>, } impl<'a> Req<'a> { pub async fn handle(self) -> Result<Resp, Error> { let Req { case_id } = self; let case_key = case_key(case_id); let cmd = redis::Cmd::lrange(case_key, 0, -1); let mut conn = POOL.get().await?; let reports: Vec<(String, String)> = cmd.query_async(conn.deref_mut().deref_mut()).await?; Ok(Resp { symptoms: reports .into_iter() .map(|(ts, sym)| Ok((str_to_time(&ts)?, sym))) .collect::<Result<Vec<_>, Error>>()?, }) } } } pub mod get_cases { use super::*; #[derive(Serialize, Deserialize, Debug)] pub struct Req { since: chrono::DateTime<Utc>, } #[derive(Serialize, Deserialize, Debug)] pub struct Resp { case_ids: HashSet<String>, } impl Req { pub async fn handle(self) -> Result<Resp, Error> { let Req { since } = self; let keys = time_keys_since(since); let mut pipe = redis::pipe(); for key in keys { pipe.smembers(key); } let mut conn = POOL.get().await?; let cases: Vec<Vec<String>> = pipe.query_async(conn.deref_mut().deref_mut()).await?; Ok(Resp { case_ids: cases.into_iter().flatten().collect(), }) } } } fn case_key(id: &str) -> String { format!("case:{}", id) } fn time_key(ts: chrono::DateTime<Utc>) -> String { format!("{}", ts.format("time:%Y:%j:%H")) } fn time_to_str<Tz>(ts: chrono::DateTime<Tz>) -> String where Tz: TimeZone, Tz::Offset: std::fmt::Display, { ts.to_rfc3339() } fn str_to_time(st: &str) -> Result<chrono::DateTime<Utc>, Error> { Ok(DateTime::parse_from_rfc3339(st)?.into()) } fn time_keys_since(ts: chrono::DateTime<Utc>) -> impl Iterator<Item = String> { let num_hours = Utc::now().signed_duration_since(ts).num_hours(); (0..num_hours) .map(chrono::Duration::hours) .map(move |dur| time_key(ts + dur)) }
mod renderer; pub use renderer::*; mod renderer2d; pub use renderer2d::*; mod ortho_camera; pub use ortho_camera::*; mod quad; pub use quad::*;
use proconio::input; use std::collections::HashMap; fn main() { input! { n: u8, s: [String; n], } let mut map = HashMap::new(); for c in "indeednow".chars() { let count = map.entry(c).or_insert(0); *count += 1; } for si in s.iter() { let mut si_map = HashMap::new(); for c in si.chars() { let count = si_map.entry(c).or_insert(0); *count += 1; } let res = if map == si_map { "YES" } else { "NO" }; println!("{}", res); } }
// Copyright 2018-2019 Mozilla // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. use lmdb::Transaction; use super::{DatabaseImpl, ErrorImpl, RoCursorImpl, WriteFlagsImpl}; use crate::backend::traits::{ BackendRoCursorTransaction, BackendRoTransaction, BackendRwCursorTransaction, BackendRwTransaction, }; #[derive(Debug)] pub struct RoTransactionImpl<'t>(pub(crate) lmdb::RoTransaction<'t>); impl<'t> BackendRoTransaction for RoTransactionImpl<'t> { type Database = DatabaseImpl; type Error = ErrorImpl; fn get(&self, db: &Self::Database, key: &[u8]) -> Result<&[u8], Self::Error> { self.0.get(db.0, &key).map_err(ErrorImpl::LmdbError) } fn abort(self) { self.0.abort() } } impl<'t> BackendRoCursorTransaction<'t> for RoTransactionImpl<'t> { type RoCursor = RoCursorImpl<'t>; fn open_ro_cursor(&'t self, db: &Self::Database) -> Result<Self::RoCursor, Self::Error> { self.0 .open_ro_cursor(db.0) .map(RoCursorImpl) .map_err(ErrorImpl::LmdbError) } } #[derive(Debug)] pub struct RwTransactionImpl<'t>(pub(crate) lmdb::RwTransaction<'t>); impl<'t> BackendRwTransaction for RwTransactionImpl<'t> { type Database = DatabaseImpl; type Error = ErrorImpl; type Flags = WriteFlagsImpl; fn get(&self, db: &Self::Database, key: &[u8]) -> Result<&[u8], Self::Error> { self.0.get(db.0, &key).map_err(ErrorImpl::LmdbError) } fn put( &mut self, db: &Self::Database, key: &[u8], value: &[u8], flags: Self::Flags, ) -> Result<(), Self::Error> { self.0 .put(db.0, &key, &value, flags.0) .map_err(ErrorImpl::LmdbError) } #[cfg(not(feature = "db-dup-sort"))] fn del(&mut self, db: &Self::Database, key: &[u8]) -> Result<(), Self::Error> { self.0.del(db.0, &key, None).map_err(ErrorImpl::LmdbError) } #[cfg(feature = "db-dup-sort")] fn del( &mut self, db: &Self::Database, key: &[u8], value: Option<&[u8]>, ) -> Result<(), Self::Error> { self.0.del(db.0, &key, value).map_err(ErrorImpl::LmdbError) } fn clear_db(&mut self, db: &Self::Database) -> Result<(), Self::Error> { self.0.clear_db(db.0).map_err(ErrorImpl::LmdbError) } fn commit(self) -> Result<(), Self::Error> { self.0.commit().map_err(ErrorImpl::LmdbError) } fn abort(self) { self.0.abort() } } impl<'t> BackendRwCursorTransaction<'t> for RwTransactionImpl<'t> { type RoCursor = RoCursorImpl<'t>; fn open_ro_cursor(&'t self, db: &Self::Database) -> Result<Self::RoCursor, Self::Error> { self.0 .open_ro_cursor(db.0) .map(RoCursorImpl) .map_err(ErrorImpl::LmdbError) } }
use crate::lapoint::LazPoint; pub trait LazFilter { fn apply(&self, point: &LazPoint) -> bool; } pub struct IntensityFilter { pub min: Option<u16>, pub max: Option<u16>, } impl LazFilter for IntensityFilter { fn apply(&self, point: &LazPoint) -> bool { if let Some(imax) = self.max { if point.intensity > imax { return false; } } if let Some(imin) = self.min { if point.intensity < imin { return false; } } true } }
use serde::{Deserialize, Serialize}; #[derive(PartialEq, Debug, Clone, Copy, Serialize, Deserialize)] pub struct Position(pub usize, pub usize); impl Default for Position { fn default() -> Self { Self(0, 0) } } // impl std::fmt::Display for Position{ // fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // write!( // f, // "start: line {}, col {}\n\tend: line {} col {}", // self.start.0, self.start.1, self.end.0, self.end.1 // ) // } // } ///A position consisting of two positions. This is for tracking start and end for complex data. #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)] pub struct BiPos { pub start: Position, pub end: Position, pub offset: Position, ///This is a region in source code where the previous line and the next line are memoized for source snipping purposes pub line_region: Position } impl std::fmt::Display for BiPos { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{{{{{},{}}}, {{{}, {}}}}}", self.start.0 + 1, self.start.1 + 1, self.end.0 + 1, self.end.1 + 1) } } impl Default for BiPos { fn default() -> Self { Self { start: Position::default(), end: Position::default(), offset: Position::default(), line_region: Position(0, 2) } } } impl BiPos { pub fn next_line(&mut self) { self.start.0 += 1; self.start.1 = 0; self.end = self.start; self.offset.1 += 1; self.offset.0 = self.offset.1; self.line_region.0 += 1; self.line_region.1 += 1; } pub fn next_col(&mut self) { self.start = self.end; self.start.1 += 1; self.end.1 += 1; self.offset.0 = self.offset.1; self.offset.0 += 1; self.offset.1 += 1; } pub fn next_col_end(&mut self) { self.end.1 += 1; self.offset.1 += 1; } pub fn meet(&self, other: &BiPos) -> Self{ BiPos{ start: self.start, end: other.end, offset: Position(self.offset.0, other.offset.1), line_region: Position(self.line_region.0, other.line_region.1) } } pub fn range_to(&self, other: &BiPos) -> Self{ let start = Position(other.start.0 - self.start.0, other.start.1 - self.start.1); let end = Position(other.end.0 - self.end.0, other.end.1 - self.end.1); BiPos{ start, end, offset: Position(self.offset.0, other.offset.1), line_region: Position(self.line_region.0, other.line_region.1) } } pub fn col_range(&self) -> (usize, usize){ (self.start.1, self.end.1) } }
use crate::linefile::LineFile; use crate::notebook::parse; use serde::Deserialize; use std::error::Error; #[derive(Debug)] pub struct Page { pub metadata: Metadata, pub linefile: LineFile, } impl Page { pub fn load(path: &str, id: &str) -> Result<Page, Box<dyn Error>> { let page_path = format!("{}/{}", path, id); let linefile = LineFile::parse(&format!("{}.rm", page_path))?; match parse::<Metadata>(&page_path, "-metadata.json") { Ok(metadata) => Ok(Page { metadata, linefile }), Err(_) => { eprintln!("WARNING: Failed to load metadata for page {}", id); Ok(Page { metadata: Metadata { layers: vec![] }, linefile, }) } } } } #[derive(Debug, Deserialize)] pub struct Metadata { layers: Vec<Layer>, } #[derive(Debug, Deserialize)] struct Layer { name: String, }
use oxygengine::prelude::*; #[derive(Debug, Copy, Clone)] pub struct Lifetime(pub Scalar); impl Component for Lifetime { type Storage = VecStorage<Self>; }
#![allow(dead_code)] use super::devkit; use super::memory::TestMemory; use arm::{Cpu, CpuMode, Isa}; /// An opcode that is actually an undefined instruction that is /// used for signaling the end of execution in ARM mode. const ARM_END_OPCODE: u32 = 0xF777F777; /// An opcode that is used to signal the end of execution in THUMB mode. /// By itself this is an undefined instruction. (2 of them make a branch with link but w/e) const THUMB_END_OPCODE: u16 = 0xF777; pub fn execute_arm(name: &str, source: &str) -> (Cpu, TestMemory) { let mut exec = Executor::new(name, arm::Isa::Arm); exec.push(source); (exec.cpu, exec.mem) } pub struct Executor { pub cpu: Cpu, pub mem: TestMemory, pub name: String, data: String, source: String, base_isa: Isa, count: u32, } impl Executor { pub fn new(name: impl Into<String>, base_isa: Isa) -> Self { let mut mem = TestMemory::with_padding(Vec::new(), 8); let cpu = Cpu::new(base_isa, CpuMode::System, &mut mem); Executor { cpu, mem, name: name.into(), source: String::new(), data: String::new(), base_isa, count: 0, } } pub fn clear_source(&mut self) { self.source.clear(); } pub fn data(&mut self, data_source: &str) { self.data.push_str(data_source); self.data.push('\n'); } pub fn push_no_exec(&mut self, source: &str) { self.source.push_str(source); self.source.push('\n'); self.count += 1; } pub fn push(&mut self, source: &str) { self.push_no_exec(source); self.execute(); } fn execute(&mut self) { let name = format!("{}-{}", self.name, self.count); let mut source = String::new(); if !self.data.is_empty() { source.push_str(".data\n"); source.push_str(&self.data); } source.push_str(".text\n"); source.push_str(&self.source); source.push_str(".text\n"); source.push_str("_exit:\n"); source.push_str(".word 0xF777F777\n"); let bin = devkit::assemble(self.base_isa, &name, &source).unwrap(); let min_len = bin.len() + 8; self.mem.set_memory_with_padding(bin, min_len); self.cpu.registers.putf_t(self.base_isa == Isa::Thumb); self.cpu.branch(0, &mut self.mem); loop { let next_pc = self.cpu.next_exec_pc(); // break in ARM mode if !self.cpu.registers.getf_t() && self.mem.view32(next_pc) == ARM_END_OPCODE { break; } // break in THUMB mode if self.cpu.registers.getf_t() && self.mem.view16(next_pc) == THUMB_END_OPCODE { break; } self.cpu.step(&mut self.mem); } } }
use super::*; impl IndexBuilder for PostgresQueryBuilder { fn prepare_table_index_expression(&self, create: &IndexCreateStatement, sql: &mut SqlWriter) { self.prepare_index_prefix(create, sql); write!(sql, "KEY ").unwrap(); self.prepare_index_name(&create.index.name, sql); // self.prepare_index_type(&create.index_type, sql); self.prepare_index_columns(&create.index.columns, sql); } fn prepare_index_create_statement(&self, create: &IndexCreateStatement, sql: &mut SqlWriter) { write!(sql, "CREATE ").unwrap(); self.prepare_index_prefix(create, sql); write!(sql, "INDEX ").unwrap(); self.prepare_index_name(&create.index.name, sql); write!(sql, " ON ").unwrap(); if let Some(table) = &create.table { table.prepare(sql, '"'); } self.prepare_index_type(&create.index_type, sql); self.prepare_index_columns(&create.index.columns, sql); } fn prepare_index_drop_statement(&self, drop: &IndexDropStatement, sql: &mut SqlWriter) { write!(sql, "DROP INDEX ").unwrap(); if let Some(name) = &drop.index.name { write!(sql, "\"{}\"", name).unwrap(); } } fn prepare_index_type(&self, col_index_type: &Option<IndexType>, sql: &mut SqlWriter) { if let Some(index_type) = col_index_type { write!( sql, " USING {}", match index_type { IndexType::BTree => "BTREE".to_owned(), IndexType::FullText => "GIN".to_owned(), IndexType::Hash => "HASH".to_owned(), IndexType::Custom(custom) => custom.to_string(), } ) .unwrap(); } } fn prepare_index_prefix(&self, create: &IndexCreateStatement, sql: &mut SqlWriter) { if create.primary { write!(sql, "PRIMARY ").unwrap(); } if create.unique { write!(sql, "UNIQUE ").unwrap(); } } }
// See README for example gdb commands to generate logs for this program. // TODO: This ccurrently assumes if an object is not moved in a GC it dies, which is not correct. // E.g. an object in the oldest generation is not moved in minor GCs. // TODO: The gdb script below does not print x->x when compacting GC skips an object because it's // new location is the same as the current one. // NOTE: All indices below are 0-based, but when printing GC indices we print 1-based, so the first // GC is printed as "1". #![feature(or_patterns)] use std::collections::HashMap; use std::fmt; use std::fs::File; use std::io::{BufRead, BufReader}; use ansi_term::{Color, Style}; use clap::{App, Arg}; use rustyline::error::ReadlineError; use rustyline::Editor; static LINE_START: &str = ">>> "; #[derive(PartialEq, Eq, Hash, Clone, Copy)] struct Addr(u64); impl fmt::Debug for Addr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::LowerHex::fmt(&self.0, f) } } #[derive(Debug, PartialEq, Eq)] struct AddrSize { addr: Addr, size: u64, } #[derive(Debug)] struct GC { /// Is this a major GC? major: bool, /// All moves in this GC. Note that in compacting GC we can see moves that are normally invalid /// in two-space copying GC, e.g. `y -> z; x -> y`. moves_fwd: HashMap<Addr, AddrSize>, /// Reverse of `moves_fwd`. It's possible to revert `moves_fwd` because in a single GC we can't /// move two objects to the same location. /// E.g. we'll never see something like `x -> y; z -> y`. /// /// In other words, in a GC, an object is moved at most once, and a location gets at most one /// object. moves_bwd: HashMap<Addr, AddrSize>, } impl GC { fn new(major: bool) -> GC { GC { major, moves_fwd: HashMap::new(), moves_bwd: HashMap::new(), } } } #[derive(Debug, PartialEq, Eq)] struct Moves { /// The location we searched for. loc: Addr, /// The first GC in which we've made a move `x -> y`, and the moves `y -> z`, ... eventually /// reached `loc`. first_move: usize, /// All the moves of the objects. First move happens at `gc`th GC. moves: Vec<Addr>, } fn main() { let args = App::new("obj-loc") .arg( Arg::with_name("gdb-out-file") .help("Path to gdb output") .takes_value(true) .required(true), ) .get_matches(); let path = args.value_of("gdb-out-file").unwrap(); let file = File::open(path).unwrap_or_else(|_| panic!("Unable to open file: {}", path)); let reader = BufReader::new(file); let gcs = parse(reader); repl(&gcs); } fn parse<B: BufRead>(reader: B) -> Vec<GC> { let mut gcs: Vec<GC> = vec![]; let mut current_gc: Option<GC> = None; for line in reader.lines() { let line = line.unwrap(); let start_idx = match line.find(LINE_START) { None => { continue; } Some(start_idx) => start_idx, }; let line = &line[start_idx..]; let line = &line[LINE_START.len()..]; let words: Vec<&str> = line.split_whitespace().collect(); if words[0] == "GC" { if let Some(gc) = current_gc.take() { gcs.push(gc); } let major = words[1].parse::<u8>().unwrap() == 1; current_gc = Some(GC::new(major)); } else { assert!(current_gc.is_some()); // from '->' to 'size:' size let from = Addr(parse_hex_fail(words[0])); let to = Addr(parse_hex_fail(words[2])); let size = str::parse::<u64>(words[4]) .unwrap_or_else(|_| panic!("Unable to parse size: {}", words[4])); let current_gc = current_gc.as_mut().unwrap(); insert_new(&mut current_gc.moves_fwd, from, AddrSize { addr: to, size }); insert_new(&mut current_gc.moves_bwd, to, AddrSize { addr: from, size }); } } if let Some(gc) = current_gc.take() { gcs.push(gc); } gcs } fn parse_hex(s: &str) -> Option<u64> { u64::from_str_radix(&s[2..], 16).ok() } fn parse_hex_fail(s: &str) -> u64 { parse_hex(s).unwrap_or_else(|| panic!("Unable to parse hex: {}", s)) } fn insert_new<K, V>(m: &mut HashMap<K, V>, k: K, v: V) where K: Eq + std::hash::Hash, { let ret = m.insert(k, v); assert!(ret.is_none()); } fn repl(gcs: &[GC]) { let mut last_major_gc = 0; for (gc_idx, gc) in gcs.iter().enumerate().rev() { if gc.major { last_major_gc = gc_idx + 1; break; } } println!("Total GCs: {}", gcs.len()); println!("Last major GC: {}", last_major_gc); println!("`N: addr` means by the beginning on Nth GC the object lived at addr"); let mut rl = Editor::<()>::new(); let bold = Style::new().bold(); let blue = Color::Blue; loop { match rl.readline(">>> ") { Ok(line) if line.trim().is_empty() => {} Ok(line) => match parse_hex(&line) { None => { println!("Unable to parse address: {}", line); } Some(addr) => { rl.add_history_entry(line); for moves in find_moves(gcs, addr) { // Nth GC, 0-based let mut gc_n = moves.first_move; for move_ in moves.moves { // When the object lives at the end of the run gc_n will be gcs.len(), // handle that case let highlight_gc = gc_n < gcs.len() && gcs[gc_n].major; let highlight_addr = move_.0 == addr; if highlight_gc { print!("{}", bold.paint(format!("{}: ", gc_n + 1))); } else { print!("{}: ", gc_n + 1); } if highlight_addr { println!("{}", blue.paint(format!("{:#?}", move_))); } else { println!("{:#?}", move_); } gc_n += 1; } println!(); } } }, Err(ReadlineError::Interrupted | ReadlineError::Eof) => { break; } err @ Err(ReadlineError::Io(_) | ReadlineError::Utf8Error | ReadlineError::Errno(_)) => { println!("Error while reading line: {:?}", err); println!("Aborting."); break; } } } } /// Find all moves of an object. fn find_moves(gcs: &[GC], addr: u64) -> Vec<Moves> { let addr = Addr(addr); let mut ret = vec![]; // Searching for 'addr'. Two cases: // // - We see `addr -> y`: // - Follow 'y' starting from next GC. // - Reverse follow 'addr' starting from previous GC. // // - We see `y -> addr`: // - Follow 'addr' starting from next GC. // - Reverse follow 'y' starting from previous GC. // // First case happens when `addr` is allocated by a mutator rather than GC. // In the second case we will follow 'addr' in the next GC so we should skip the first case in // the next iteration: let mut skip_first_case = false; for (gc_n, gc) in gcs.iter().enumerate() { if !skip_first_case { // First case, 'x -> y', `next_addr` is 'y' if let Some(next_addr) = gc.moves_fwd.get(&addr) { let fwd_moves = follow_fwd(&gcs[gc_n + 1..], next_addr.addr); let mut bwd_moves = follow_bwd(&gcs[0..gc_n], addr); let first_move = gc_n - bwd_moves.len(); bwd_moves.reverse(); bwd_moves.push(addr); bwd_moves.push(next_addr.addr); bwd_moves.extend_from_slice(&fwd_moves); ret.push(Moves { loc: addr, first_move, moves: bwd_moves, }); } } skip_first_case = false; // Second case, 'y -> x', `prev_addr` is 'y' if let Some(prev_addr) = gc.moves_bwd.get(&addr) { let fwd_moves = follow_fwd(&gcs[gc_n + 1..], addr); let mut bwd_moves = follow_bwd(&gcs[0..gc_n], prev_addr.addr); let first_move = gc_n - bwd_moves.len(); bwd_moves.reverse(); bwd_moves.push(prev_addr.addr); bwd_moves.push(addr); bwd_moves.extend_from_slice(&fwd_moves); ret.push(Moves { loc: addr, first_move, moves: bwd_moves, }); skip_first_case = true; } } ret } fn follow_fwd(gcs: &[GC], addr: Addr) -> Vec<Addr> { // println!("follow_fwd: gcs={:#?}, addr={:#?}", gcs, addr); let mut ret = vec![]; for gc in gcs { match gc.moves_fwd.get(&addr) { None => { break; } Some(next_addr) => { ret.push(next_addr.addr); } } } ret } fn follow_bwd(gcs: &[GC], addr: Addr) -> Vec<Addr> { // println!("follow_bwd: gcs={:#?}, addr={:#?}", gcs, addr); let mut ret = vec![]; for gc in gcs.iter().rev() { match gc.moves_bwd.get(&addr) { None => { break; } Some(prev_addr) => { ret.push(prev_addr.addr); } } } ret } // // Tests // #[test] fn parse_test() { let input = "\ >>> GC 1\n\ >>> 0x123 -> 0x124 size: 1\n\ >>> 0x122 -> 0x123 size: 2\n\ >>> GC 2\n\ "; let gcs = parse(input.as_bytes()); assert_eq!(gcs.len(), 2); assert_eq!( gcs[0].moves_fwd.get(&Addr(0x123)), Some(&AddrSize { addr: Addr(0x124), size: 1 }) ); assert_eq!( gcs[0].moves_fwd.get(&Addr(0x122)), Some(&AddrSize { addr: Addr(0x123), size: 2 }) ); assert_eq!( gcs[0].moves_bwd.get(&Addr(0x124)), Some(&AddrSize { addr: Addr(0x123), size: 1 }) ); assert_eq!( gcs[0].moves_bwd.get(&Addr(0x123)), Some(&AddrSize { addr: Addr(0x122), size: 2 }) ); } #[test] fn find_moves_test() { let input = "\ >>> GC 1\n\ >>> 0x123 -> 0x124 size: 1\n\ >>> GC 2\n\ >>> 0x124 -> 0x125 size: 2\n\ >>> 0x100 -> 0x101 size: 3\n\ "; let gcs = parse(input.as_bytes()); // // Test fwd search // assert_eq!( find_moves(&gcs, 0x123), vec![Moves { loc: Addr(0x123), first_move: 0, moves: vec![Addr(0x123), Addr(0x124), Addr(0x125)], }] ); assert_eq!( find_moves(&gcs, 0x100), vec![Moves { loc: Addr(0x100), first_move: 1, moves: vec![Addr(0x100), Addr(0x101)], }] ); // // Test bwd search // assert_eq!( find_moves(&gcs, 0x101), vec![Moves { loc: Addr(0x101), first_move: 1, moves: vec![Addr(0x100), Addr(0x101)], }] ); assert_eq!( find_moves(&gcs, 0x125), vec![Moves { loc: Addr(0x125), first_move: 0, moves: vec![Addr(0x123), Addr(0x124), Addr(0x125)], }] ); assert_eq!( find_moves(&gcs, 0x124), vec![Moves { loc: Addr(0x124), first_move: 0, moves: vec![Addr(0x123), Addr(0x124), Addr(0x125)], }] ); } #[test] fn complicated_test() { // An interesting case that can legitemately happen in compacting GC: We move x to y, and z to // x, in the same GC. Make sure we handle this correctly. let input = "\ x >>> GC 1\n\ λ:2> unload [] >>> 0x124 -> 0x125 size: 2\n\ >>> 0x123 -> 0x124 size: 2\n\ "; let gcs = parse(input.as_bytes()); assert_eq!( find_moves(&gcs, 0x124), vec![ Moves { loc: Addr(0x124), first_move: 0, moves: vec![Addr(0x124), Addr(0x125)], }, Moves { loc: Addr(0x124), first_move: 0, moves: vec![Addr(0x123), Addr(0x124)], } ] ); assert_eq!( find_moves(&gcs, 0x125), vec![Moves { loc: Addr(0x125), first_move: 0, moves: vec![Addr(0x124), Addr(0x125)], }] ); assert_eq!( find_moves(&gcs, 0x123), vec![Moves { loc: Addr(0x123), first_move: 0, moves: vec![Addr(0x123), Addr(0x124)], }] ); }
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT license. */ use std::cmp::Ordering; use bit_vec::BitVec; pub trait BitVecExtension { fn resize(&mut self, new_len: usize, value: bool); } impl BitVecExtension for BitVec { fn resize(&mut self, new_len: usize, value: bool) { let old_len = self.len(); match new_len.cmp(&old_len) { Ordering::Less => self.truncate(new_len), Ordering::Greater => self.grow(new_len - old_len, value), Ordering::Equal => {} } } } #[cfg(test)] mod bit_vec_extension_test { use super::*; #[test] fn resize_test() { let mut bitset = BitVec::new(); bitset.resize(10, false); assert_eq!(bitset.len(), 10); assert!(bitset.none()); bitset.resize(11, true); assert_eq!(bitset.len(), 11); assert!(bitset[10]); bitset.resize(5, false); assert_eq!(bitset.len(), 5); assert!(bitset.none()); } }
pub fn is_keyword(s: &str) -> bool { return s == "def" || s == "else" || s == "for" || s == "if" || s == "in" || s == "return" || s == "view" }
extern crate aesti; #[macro_use] extern crate arrayref; extern crate bytes; extern crate clap; extern crate cookie_factory; extern crate data_encoding; extern crate ed25519_dalek; extern crate env_logger; extern crate flate2; #[macro_use] extern crate futures; extern crate itertools; #[macro_use] extern crate lazy_static; #[macro_use] extern crate log; #[macro_use] extern crate nom; extern crate num; extern crate rand; extern crate sha2; extern crate tokio; extern crate tokio_io; extern crate tokio_timer; #[cfg(test)] #[macro_use] extern crate pretty_assertions; use clap::{App, Arg, ArgMatches, SubCommand}; use futures::{Future, Sink}; mod constants; mod crypto; mod data; mod i2np; mod transport; fn main() { env_logger::init(); let exit_code = inner_main(); std::process::exit(exit_code); } fn inner_main() -> i32 { let matches = App::new("ire") .version("0.0.1") .author("Jack Grigg <str4d@i2pmail.org>") .about("The I2P Rust engine") .subcommand( SubCommand::with_name("cli") .subcommand( SubCommand::with_name("gen") .arg( Arg::with_name("routerKeys") .help("Path to write the router.keys.dat to"), ) .arg(Arg::with_name("routerInfo").help("Path to write the router.info to")) .arg(Arg::with_name("bind").help("Address:Port to bind to")), ) .subcommand( SubCommand::with_name("server") .arg( Arg::with_name("routerKeys") .help("Path to the server's router.keys.dat"), ) .arg(Arg::with_name("bind").help("Address:Port to bind to")), ) .subcommand( SubCommand::with_name("client") .arg( Arg::with_name("routerKeys") .help("Path to the client's router.keys.dat"), ) .arg( Arg::with_name("peerInfo").help("Path to the peer's router.info file"), ), ), ) .get_matches(); match matches.subcommand() { ("cli", Some(matches)) => match matches.subcommand() { ("gen", Some(matches)) => cli_gen(matches), ("server", Some(matches)) => cli_server(matches), ("client", Some(matches)) => cli_client(matches), (&_, _) => panic!("Invalid matches for cli subcommand"), }, _ => 1, } } fn cli_gen(args: &ArgMatches) -> i32 { let addr = args.value_of("bind").unwrap().parse().unwrap(); let ra = data::RouterAddress::new(&transport::ntcp::NTCP_STYLE, addr); let pkf = data::RouterSecretKeys::new(); let mut ri = data::RouterInfo::new(pkf.rid.clone()); ri.set_addresses(vec![ra]); ri.sign(&pkf.signing_private_key); ri.to_file(args.value_of("routerInfo").unwrap()); pkf.to_file(args.value_of("routerKeys").unwrap()); 0 } fn cli_server(args: &ArgMatches) -> i32 { let rsk = data::RouterSecretKeys::from_file(args.value_of("routerKeys").unwrap()); let addr = args.value_of("bind").unwrap().parse().unwrap(); // Accept all incoming sockets info!("Listening on {}", addr); let ntcp = transport::ntcp::Engine::new(); let listener = ntcp.listen(rsk.rid, rsk.signing_private_key, &addr); tokio::run(listener.map_err(|e| error!("Listener error: {}", e))); 0 } fn cli_client(args: &ArgMatches) -> i32 { let rsk = data::RouterSecretKeys::from_file(args.value_of("routerKeys").unwrap()); let peer_ri = data::RouterInfo::from_file(args.value_of("peerInfo").unwrap()); info!("Connecting to {}", peer_ri.router_id.hash()); let ntcp = transport::ntcp::Engine::new(); let conn = ntcp.connect(rsk.rid, rsk.signing_private_key, peer_ri) .and_then(move |t| { info!("Connection established!"); t.send(transport::ntcp::Frame::TimeSync(42)) }) .and_then(|t| { t.send(transport::ntcp::Frame::Standard( i2np::Message::dummy_data(), )) }) .and_then(|_| { info!("Dummy data sent!"); Ok(()) }) .map_err(|e| error!("Connection error: {}", e)); tokio::run(conn); 0 }
/// Rendering logic. pub fn render_vec<'a, II>(xs: II, max: f64) -> String where II: IntoIterator<Item = &'a f64>, { let mut r = String::new(); for x in xs.into_iter() { let p = *x / max; r.push(float_bar(p)); } r } const BARS: [char; 8] = ['▁', '▂', '▃', '▄', '▅', '▆', '▇', '█']; // f must be between 0 and 1. fn float_bar(mut f: f64) -> char { f = f.min(1.); // cpu usage can be > 1. do something special? if f < 0.03 { return ' '; } let sub_seg = 1. / BARS.len() as f64; let mut i = 0; while f > sub_seg { f -= sub_seg; i += 1; } BARS[i] }
//! Parsers for applying parsers multiple times /// `separated_list!(I -> IResult<I,T>, I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// separated_list(sep, X) returns Vec<X> will return Incomplete if there may be more elements #[macro_export] macro_rules! separated_list( ($i:expr, $sep:ident!( $($args:tt)* ), $submac:ident!( $($args2:tt)* )) => ( { use $crate::InputLength; //FIXME: use crate vec let mut res = ::std::vec::Vec::new(); let mut input = $i.clone(); // get the first element let input_ = input.clone(); match $submac!(input_, $($args2)*) { $crate::IResult::Error(_) => $crate::IResult::Done(input, ::std::vec::Vec::new()), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(i,o) => { if i.input_len() == input.input_len() { $crate::IResult::Error(error_position!($crate::ErrorKind::SeparatedList,input)) } else { res.push(o); input = i; let ret; loop { // get the separator first let input_ = input.clone(); match $sep!(input_, $($args)*) { $crate::IResult::Error(_) => { ret = $crate::IResult::Done(input, res); break; } $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(needed)) => { let (size,overflowed) = needed.overflowing_add(($i).input_len() - input.input_len()); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; }, $crate::IResult::Done(i2,_) => { if i2.input_len() == input.input_len() { ret = $crate::IResult::Done(input, res); break; } // get the element next match $submac!(i2, $($args2)*) { $crate::IResult::Error(_) => { ret = $crate::IResult::Done(input, res); break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(needed)) => { let (size,overflowed) = needed.overflowing_add(($i).input_len() - i2.input_len()); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; }, $crate::IResult::Done(i3,o3) => { if i3.input_len() == i2.input_len() { ret = $crate::IResult::Done(input, res); break; } res.push(o3); input = i3; } } } } } ret } }, } } ); ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => ( separated_list!($i, $submac!($($args)*), call!($g)); ); ($i:expr, $f:expr, $submac:ident!( $($args:tt)* )) => ( separated_list!($i, call!($f), $submac!($($args)*)); ); ($i:expr, $f:expr, $g:expr) => ( separated_list!($i, call!($f), call!($g)); ); ); /// `separated_nonempty_list!(I -> IResult<I,T>, I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// separated_nonempty_list(sep, X) returns Vec<X> will return Incomplete if there may be more elements #[macro_export] macro_rules! separated_nonempty_list( ($i:expr, $sep:ident!( $($args:tt)* ), $submac:ident!( $($args2:tt)* )) => ( { use $crate::InputLength; let mut res = ::std::vec::Vec::new(); let mut input = $i.clone(); // get the first element let input_ = input.clone(); match $submac!(input_, $($args2)*) { $crate::IResult::Error(a) => $crate::IResult::Error(a), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(i,o) => { if i.input_len() == input.len() { $crate::IResult::Error(error_position!($crate::ErrorKind::SeparatedNonEmptyList,input)) } else { res.push(o); input = i; let ret; loop { // get the separator first let input_ = input.clone(); match $sep!(input_, $($args)*) { $crate::IResult::Error(_) => { ret = $crate::IResult::Done(input, res); break; } $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(needed)) => { let (size,overflowed) = needed.overflowing_add(($i).input_len() - input.input_len()); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; }, $crate::IResult::Done(i2,_) => { if i2.input_len() == input.input_len() { ret = $crate::IResult::Done(input, res); break; } // get the element next match $submac!(i2, $($args2)*) { $crate::IResult::Error(_) => { ret = $crate::IResult::Done(input, res); break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(needed)) => { let (size,overflowed) = needed.overflowing_add(($i).input_len() - i2.input_len()); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; }, $crate::IResult::Done(i3,o3) => { if i3.input_len() == i2.input_len() { ret = $crate::IResult::Done(input, res); break; } res.push(o3); input = i3; } } } } } ret } }, } } ); ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => ( separated_nonempty_list!($i, $submac!($($args)*), call!($g)); ); ($i:expr, $f:expr, $submac:ident!( $($args:tt)* )) => ( separated_nonempty_list!($i, call!($f), $submac!($($args)*)); ); ($i:expr, $f:expr, $g:expr) => ( separated_nonempty_list!($i, call!($f), call!($g)); ); ); /// `separated_list_complete!(I -> IResult<I,T>, I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// This is equivalent to the `separated_list!` combinator, except that it will return `Error` /// when either the separator or element subparser returns `Incomplete`. #[macro_export] macro_rules! separated_list_complete { ($i:expr, $sep:ident!( $($args:tt)* ), $submac:ident!( $($args2:tt)* )) => ({ separated_list!($i, complete!($sep!($($args)*)), complete!($submac!($($args2)*))) }); ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => ( separated_list_complete!($i, $submac!($($args)*), call!($g)); ); ($i:expr, $f:expr, $submac:ident!( $($args:tt)* )) => ( separated_list_complete!($i, call!($f), $submac!($($args)*)); ); ($i:expr, $f:expr, $g:expr) => ( separated_list_complete!($i, call!($f), call!($g)); ); } /// `separated_nonempty_list_complete!(I -> IResult<I,T>, I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// This is equivalent to the `separated_nonempty_list!` combinator, except that it will return /// `Error` when either the separator or element subparser returns `Incomplete`. #[macro_export] macro_rules! separated_nonempty_list_complete { ($i:expr, $sep:ident!( $($args:tt)* ), $submac:ident!( $($args2:tt)* )) => ({ separated_nonempty_list!($i, complete!($sep!($($args)*)), complete!($submac!($($args2)*))) }); ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => ( separated_nonempty_list_complete!($i, $submac!($($args)*), call!($g)); ); ($i:expr, $f:expr, $submac:ident!( $($args:tt)* )) => ( separated_nonempty_list_complete!($i, call!($f), $submac!($($args)*)); ); ($i:expr, $f:expr, $g:expr) => ( separated_nonempty_list_complete!($i, call!($f), call!($g)); ); } /// `many0!(I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// Applies the parser 0 or more times and returns the list of results in a Vec /// /// the embedded parser may return Incomplete /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::Done; /// # fn main() { /// named!(multi<&[u8], Vec<&[u8]> >, many0!( tag!( "abcd" ) ) ); /// /// let a = b"abcdabcdefgh"; /// let b = b"azerty"; /// /// let res = vec![&b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&a[..]), Done(&b"efgh"[..], res)); /// assert_eq!(multi(&b[..]), Done(&b"azerty"[..], Vec::new())); /// # } /// ``` /// 0 or more #[macro_export] macro_rules! many0( ($i:expr, $submac:ident!( $($args:tt)* )) => ( { use $crate::InputLength; let ret; let mut res = ::std::vec::Vec::new(); let mut input = $i.clone(); loop { if input.input_len() == 0 { ret = $crate::IResult::Done(input, res); break; } let input_ = input.clone(); match $submac!(input_, $($args)*) { $crate::IResult::Error(_) => { ret = $crate::IResult::Done(input, res); break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(i)) => { let (size,overflowed) = i.overflowing_add(($i).input_len() - input.input_len()); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; }, $crate::IResult::Done(i, o) => { // loop trip must always consume (otherwise infinite loops) if i == input { ret = $crate::IResult::Error(error_position!($crate::ErrorKind::Many0,input)); break; } res.push(o); input = i; } } } ret } ); ($i:expr, $f:expr) => ( many0!($i, call!($f)); ); ); /// `many1!(I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// Applies the parser 1 or more times and returns the list of results in a Vec /// /// the embedded parser may return Incomplete /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::{Done, Error}; /// # #[cfg(feature = "verbose-errors")] /// # use nom::Err::Position; /// # use nom::ErrorKind; /// # fn main() { /// named!(multi<&[u8], Vec<&[u8]> >, many1!( tag!( "abcd" ) ) ); /// /// let a = b"abcdabcdefgh"; /// let b = b"azerty"; /// /// let res = vec![&b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&a[..]), Done(&b"efgh"[..], res)); /// assert_eq!(multi(&b[..]), Error(error_position!(ErrorKind::Many1,&b[..]))); /// # } /// ``` #[macro_export] macro_rules! many1( ($i:expr, $submac:ident!( $($args:tt)* )) => ( { use $crate::InputLength; let i_ = $i.clone(); match $submac!(i_, $($args)*) { $crate::IResult::Error(_) => $crate::IResult::Error( error_position!($crate::ErrorKind::Many1,$i) ), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(i1,o1) => { if i1.input_len() == 0 { let mut res = ::std::vec::Vec::new(); res.push(o1); $crate::IResult::Done(i1,res) } else { let mut res = ::std::vec::Vec::with_capacity(4); res.push(o1); let mut input = i1; let mut incomplete: ::std::option::Option<$crate::Needed> = ::std::option::Option::None; loop { if input.input_len() == 0 { break; } let input_ = input.clone(); match $submac!(input_, $($args)*) { $crate::IResult::Error(_) => { break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { incomplete = ::std::option::Option::Some($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(i)) => { let (size,overflowed) = i.overflowing_add(($i).input_len() - input.input_len()); incomplete = ::std::option::Option::Some( match overflowed { true => $crate::Needed::Unknown, false => $crate::Needed::Size(size), } ); break; }, $crate::IResult::Done(i, o) => { if i.input_len() == input.input_len() { break; } res.push(o); input = i; } } } match incomplete { ::std::option::Option::Some(i) => $crate::IResult::Incomplete(i), ::std::option::Option::None => $crate::IResult::Done(input, res) } } } } } ); ($i:expr, $f:expr) => ( many1!($i, call!($f)); ); ); /// `many_till!(I -> IResult<I,O>, I -> IResult<I,P>) => I -> IResult<I, (Vec<O>, P)>` /// Applies the first parser until the second applies. Returns a tuple containing the list /// of results from the first in a Vec and the result of the second. /// /// The first embedded parser may return Incomplete /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::{Done, Error}; /// # #[cfg(feature = "verbose-errors")] /// # use nom::Err::Position; /// # use nom::ErrorKind; /// # fn main() { /// named!(multi<&[u8], (Vec<&[u8]>, &[u8]) >, many_till!( tag!( "abcd" ), tag!( "efgh" ) ) ); /// /// let a = b"abcdabcdefghabcd"; /// let b = b"efghabcd"; /// let c = b"azerty"; /// /// let res_a = (vec![&b"abcd"[..], &b"abcd"[..]], &b"efgh"[..]); /// let res_b: (Vec<&[u8]>, &[u8]) = (Vec::new(), &b"efgh"[..]); /// assert_eq!(multi(&a[..]), Done(&b"abcd"[..], res_a)); /// assert_eq!(multi(&b[..]), Done(&b"abcd"[..], res_b)); /// assert_eq!(multi(&c[..]), Error(error_node_position!(ErrorKind::ManyTill,&c[..],error_position(ErrorKind::Tag,&c[..])))); /// # } /// ``` #[macro_export] macro_rules! many_till( ($i:expr, $submac1:ident!( $($args1:tt)* ), $submac2:ident!( $($args2:tt)* )) => ( { use $crate::InputLength; let ret; let mut res = ::std::vec::Vec::new(); let mut input = $i.clone(); loop { match $submac2!(input, $($args2)*) { $crate::IResult::Done(i, o) => { ret = $crate::IResult::Done(i, (res, o)); break; }, _ => { match $submac1!(input, $($args1)*) { $crate::IResult::Error(err) => { ret = $crate::IResult::Error(error_node_position!($crate::ErrorKind::ManyTill,input, err)); break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(i)) => { let (size,overflowed) = i.overflowing_add(($i).input_len() - input.input_len()); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; }, $crate::IResult::Done(i, o) => { // loop trip must always consume (otherwise infinite loops) if i == input { ret = $crate::IResult::Error(error_position!($crate::ErrorKind::ManyTill,input)); break; } res.push(o); input = i; }, } }, } } ret } ); ($i:expr, $f:expr, $g: expr) => ( many_till!($i, call!($f), call!($g)); ); ); /// `many_m_n!(usize, usize, I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// Applies the parser between m and n times (n included) and returns the list of /// results in a Vec /// /// the embedded parser may return Incomplete /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::{Done, Error}; /// # #[cfg(feature = "verbose-errors")] /// # use nom::Err::Position; /// # use nom::ErrorKind; /// # fn main() { /// named!(multi<&[u8], Vec<&[u8]> >, many_m_n!(2, 4, tag!( "abcd" ) ) ); /// /// let a = b"abcdefgh"; /// let b = b"abcdabcdefgh"; /// let c = b"abcdabcdabcdabcdabcdefgh"; /// /// assert_eq!(multi(&a[..]),Error(error_position!(ErrorKind::ManyMN,&a[..]))); /// let res = vec![&b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&b[..]), Done(&b"efgh"[..], res)); /// let res2 = vec![&b"abcd"[..], &b"abcd"[..], &b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&c[..]), Done(&b"abcdefgh"[..], res2)); /// # } /// ``` #[macro_export] macro_rules! many_m_n( ($i:expr, $m:expr, $n: expr, $submac:ident!( $($args:tt)* )) => ( { use $crate::InputLength; let mut res = ::std::vec::Vec::with_capacity($m); let mut input = $i.clone(); let mut count: usize = 0; let mut err = false; let mut incomplete: ::std::option::Option<$crate::Needed> = ::std::option::Option::None; loop { if count == $n { break } let i_ = input.clone(); match $submac!(i_, $($args)*) { $crate::IResult::Done(i, o) => { // do not allow parsers that do not consume input (causes infinite loops) if i.input_len() == input.input_len() { break; } res.push(o); input = i; count += 1; } $crate::IResult::Error(_) => { err = true; break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { incomplete = ::std::option::Option::Some($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(i)) => { let (size,overflowed) = i.overflowing_add($i.input_len() - input.input_len()); incomplete = ::std::option::Option::Some( match overflowed { true => $crate::Needed::Unknown, false => $crate::Needed::Size(size), } ); break; }, } if input.input_len() == 0 { break; } } if count < $m { if err { $crate::IResult::Error(error_position!($crate::ErrorKind::ManyMN,$i)) } else { match incomplete { ::std::option::Option::Some(i) => $crate::IResult::Incomplete(i), ::std::option::Option::None => $crate::IResult::Incomplete( $crate::Needed::Unknown ) } } } else { match incomplete { ::std::option::Option::Some(i) => $crate::IResult::Incomplete(i), ::std::option::Option::None => $crate::IResult::Done(input, res) } } } ); ($i:expr, $m:expr, $n: expr, $f:expr) => ( many_m_n!($i, $m, $n, call!($f)); ); ); /// `count!(I -> IResult<I,O>, nb) => I -> IResult<I, Vec<O>>` /// Applies the child parser a specified number of times /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::{Done,Error}; /// # #[cfg(feature = "verbose-errors")] /// # use nom::Err::Position; /// # use nom::ErrorKind; /// # fn main() { /// named!(counter< Vec<&[u8]> >, count!( tag!( "abcd" ), 2 ) ); /// /// let a = b"abcdabcdabcdef"; /// let b = b"abcdefgh"; /// let res = vec![&b"abcd"[..], &b"abcd"[..]]; /// /// assert_eq!(counter(&a[..]), Done(&b"abcdef"[..], res)); /// assert_eq!(counter(&b[..]), Error(error_position!(ErrorKind::Count, &b[..]))); /// # } /// ``` /// #[macro_export] macro_rules! count( ($i:expr, $submac:ident!( $($args:tt)* ), $count: expr) => ( { let ret: $crate::IResult<_,_>; let mut input = $i.clone(); let mut res = ::std::vec::Vec::new(); loop { if res.len() == $count { ret = $crate::IResult::Done(input, res); break; } let input_ = input.clone(); match $submac!(input_, $($args)*) { $crate::IResult::Done(i,o) => { res.push(o); input = i; }, $crate::IResult::Error(_) => { ret = $crate::IResult::Error(error_position!($crate::ErrorKind::Count,$i)); break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; } $crate::IResult::Incomplete($crate::Needed::Size(sz)) => { let (size,overflowed) = sz.overflowing_add( $crate::InputLength::input_len(&($i)) - $crate::InputLength::input_len(&input) ); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; } } } ret } ); ($i:expr, $f:expr, $count: expr) => ( count!($i, call!($f), $count); ); ); /// `count_fixed!(O, I -> IResult<I,O>, nb) => I -> IResult<I, [O; nb]>` /// Applies the child parser a fixed number of times and returns a fixed size array /// The type must be specified and it must be `Copy` /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::{Done,Error}; /// # #[cfg(feature = "verbose-errors")] /// # use nom::Err::Position; /// # use nom::ErrorKind; /// # fn main() { /// named!(counter< [&[u8]; 2] >, count_fixed!( &[u8], tag!( "abcd" ), 2 ) ); /// // can omit the type specifier if returning slices /// // named!(counter< [&[u8]; 2] >, count_fixed!( tag!( "abcd" ), 2 ) ); /// /// let a = b"abcdabcdabcdef"; /// let b = b"abcdefgh"; /// let res = [&b"abcd"[..], &b"abcd"[..]]; /// /// assert_eq!(counter(&a[..]), Done(&b"abcdef"[..], res)); /// assert_eq!(counter(&b[..]), Error(error_position!(ErrorKind::Count, &b[..]))); /// # } /// ``` /// #[macro_export] macro_rules! count_fixed ( ($i:expr, $typ:ty, $submac:ident!( $($args:tt)* ), $count: expr) => ( { let ret; let mut input = $i.clone(); // `$typ` must be Copy, and thus having no destructor, this is panic safe let mut res: [$typ; $count] = unsafe{[::std::mem::uninitialized(); $count as usize]}; let mut cnt: usize = 0; loop { if cnt == $count { ret = $crate::IResult::Done(input, res); break; } match $submac!(input, $($args)*) { $crate::IResult::Done(i,o) => { res[cnt] = o; cnt += 1; input = i; }, $crate::IResult::Error(_) => { ret = $crate::IResult::Error(error_position!($crate::ErrorKind::Count,$i)); break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; } $crate::IResult::Incomplete($crate::Needed::Size(sz)) => { let (size,overflowed) = sz.overflowing_add( $crate::InputLength::input_len(&($i)) - $crate::InputLength::input_len(&input) ); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; } } } ret } ); ($i:expr, $typ: ty, $f:ident, $count: expr) => ( count_fixed!($i, $typ, call!($f), $count); ); ); /// `length_count!(I -> IResult<I, nb>, I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// gets a number from the first parser, then applies the second parser that many times #[macro_export] macro_rules! length_count( ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => ( { match $submac!($i, $($args)*) { $crate::IResult::Error(e) => $crate::IResult::Error(e), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(i, o) => { match count!(i, $submac2!($($args2)*), o as usize) { $crate::IResult::Error(e) => $crate::IResult::Error(e), $crate::IResult::Incomplete($crate::Needed::Unknown) => $crate::IResult::Incomplete($crate::Needed::Unknown), $crate::IResult::Incomplete($crate::Needed::Size(n)) => { let (size,overflowed) = n.overflowing_add( $crate::InputLength::input_len(&($i)) - $crate::InputLength::input_len(&i) ); match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), } }, $crate::IResult::Done(i2, o2) => $crate::IResult::Done(i2, o2) } } } } ); ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => ( length_count!($i, $submac!($($args)*), call!($g)); ); ($i:expr, $f:expr, $submac:ident!( $($args:tt)* )) => ( length_count!($i, call!($f), $submac!($($args)*)); ); ($i:expr, $f:expr, $g:expr) => ( length_count!($i, call!($f), call!($g)); ); ); /// `length_data!(I -> IResult<I, nb>) => O` /// /// `length_data` gets a number from the first parser, than takes a subslice of the input /// of that size, and returns that subslice #[macro_export] macro_rules! length_data( ($i:expr, $submac:ident!( $($args:tt)* )) => ( match $submac!($i, $($args)*) { $crate::IResult::Error(e) => $crate::IResult::Error(e), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(i, o) => { match take!(i, o as usize) { $crate::IResult::Error(e) => $crate::IResult::Error(e), $crate::IResult::Incomplete($crate::Needed::Unknown) => $crate::IResult::Incomplete($crate::Needed::Unknown), $crate::IResult::Incomplete($crate::Needed::Size(n)) => { let (size,overflowed) = n.overflowing_add( $crate::InputLength::input_len(&($i)) - $crate::InputLength::input_len(&i) ); match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), } }, $crate::IResult::Done(i2, o2) => $crate::IResult::Done(i2, o2) } } } ); ($i:expr, $f:expr) => ( length_data!($i, call!($f)); ); ); /// `length_value!(I -> IResult<I, nb>, I -> IResult<I,O>) => I -> IResult<I, Vec<O>>` /// gets a number from the first parser, takes a subslice of the input of that size, /// then applies the second parser on that subslice. If the second parser returns /// `Incomplete`, `length_value` will return an error #[macro_export] macro_rules! length_value( ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => ( { match $submac!($i, $($args)*) { $crate::IResult::Error(e) => $crate::IResult::Error(e), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(i, o) => { match take!(i, o as usize) { $crate::IResult::Error(e) => $crate::IResult::Error(e), $crate::IResult::Incomplete($crate::Needed::Unknown) => $crate::IResult::Incomplete($crate::Needed::Unknown), $crate::IResult::Incomplete($crate::Needed::Size(n)) => { let (size,overflowed) = n.overflowing_add( $crate::InputLength::input_len(&($i)) - $crate::InputLength::input_len(&i) ); match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), } }, $crate::IResult::Done(i2, o2) => { match complete!(o2, $submac2!($($args2)*)) { $crate::IResult::Error(e) => $crate::IResult::Error(e), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(_, o3) => $crate::IResult::Done(i2, o3) } } } } } } ); ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => ( length_value!($i, $submac!($($args)*), call!($g)); ); ($i:expr, $f:expr, $submac:ident!( $($args:tt)* )) => ( length_value!($i, call!($f), $submac!($($args)*)); ); ($i:expr, $f:expr, $g:expr) => ( length_value!($i, call!($f), call!($g)); ); ); /// `fold_many0!(I -> IResult<I,O>, R, Fn(R, O) -> R) => I -> IResult<I, R>` /// Applies the parser 0 or more times and folds the list of return values /// /// the embedded parser may return Incomplete /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::Done; /// # fn main() { /// named!(multi<&[u8], Vec<&[u8]> >, /// fold_many0!( tag!( "abcd" ), Vec::new(), |mut acc: Vec<_>, item| { /// acc.push(item); /// acc /// })); /// /// let a = b"abcdabcdefgh"; /// let b = b"azerty"; /// /// let res = vec![&b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&a[..]), Done(&b"efgh"[..], res)); /// assert_eq!(multi(&b[..]), Done(&b"azerty"[..], Vec::new())); /// # } /// ``` /// 0 or more #[macro_export] macro_rules! fold_many0( ($i:expr, $submac:ident!( $($args:tt)* ), $init:expr, $f:expr) => ( { use $crate::InputLength; let ret; let f = $f; let mut res = $init; let mut input = $i.clone(); loop { if input.input_len() == 0 { ret = $crate::IResult::Done(input, res); break; } match $submac!(input, $($args)*) { $crate::IResult::Error(_) => { ret = $crate::IResult::Done(input, res); break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { ret = $crate::IResult::Incomplete($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(i)) => { let (size,overflowed) = i.overflowing_add( ($i).input_len() - input.input_len() ); ret = match overflowed { true => $crate::IResult::Incomplete($crate::Needed::Unknown), false => $crate::IResult::Incomplete($crate::Needed::Size(size)), }; break; }, $crate::IResult::Done(i, o) => { // loop trip must always consume (otherwise infinite loops) if i == input { ret = $crate::IResult::Error( error_position!($crate::ErrorKind::Many0,input) ); break; } res = f(res, o); input = i; } } } ret } ); ($i:expr, $f:expr, $init:expr, $fold_f:expr) => ( fold_many0!($i, call!($f), $init, $fold_f); ); ); /// `fold_many1!(I -> IResult<I,O>, R, Fn(R, O) -> R) => I -> IResult<I, R>` /// Applies the parser 1 or more times and folds the list of return values /// /// the embedded parser may return Incomplete /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::{Done, Error}; /// # #[cfg(feature = "verbose-errors")] /// # use nom::Err::Position; /// # use nom::ErrorKind; /// # fn main() { /// named!(multi<&[u8], Vec<&[u8]> >, /// fold_many1!( tag!( "abcd" ), Vec::new(), |mut acc: Vec<_>, item| { /// acc.push(item); /// acc /// })); /// /// let a = b"abcdabcdefgh"; /// let b = b"azerty"; /// /// let res = vec![&b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&a[..]), Done(&b"efgh"[..], res)); /// assert_eq!(multi(&b[..]), Error(error_position!(ErrorKind::Many1,&b[..]))); /// # } /// ``` #[macro_export] macro_rules! fold_many1( ($i:expr, $submac:ident!( $($args:tt)* ), $init:expr, $f:expr) => ( { use $crate::InputLength; match $submac!($i, $($args)*) { $crate::IResult::Error(_) => $crate::IResult::Error( error_position!($crate::ErrorKind::Many1,$i) ), $crate::IResult::Incomplete(i) => $crate::IResult::Incomplete(i), $crate::IResult::Done(i1,o1) => { let acc = $init; let f = $f; if i1.input_len() == 0 { let acc = f(acc, o1); $crate::IResult::Done(i1,acc) } else { let mut acc = f(acc, o1); let mut input = i1; let mut incomplete: ::std::option::Option<$crate::Needed> = ::std::option::Option::None; loop { if input.input_len() == 0 { break; } match $submac!(input, $($args)*) { $crate::IResult::Error(_) => { break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { incomplete = ::std::option::Option::Some($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(i)) => { let (size,overflowed) = i.overflowing_add( ($i).input_len() - input.input_len() ); incomplete = ::std::option::Option::Some( match overflowed { true => $crate::Needed::Unknown, false => $crate::Needed::Size(size), } ); break; }, $crate::IResult::Done(i, o) => { if i.input_len() == input.input_len() { break; } acc = f(acc, o); input = i; } } } match incomplete { ::std::option::Option::Some(i) => $crate::IResult::Incomplete(i), ::std::option::Option::None => $crate::IResult::Done(input, acc) } } } } } ); ($i:expr, $f:expr, $init:expr, $fold_f:expr) => ( fold_many1!($i, call!($f), $init, $fold_f); ); ); /// `fold_many_m_n!(usize, usize, I -> IResult<I,O>, R, Fn(R, O) -> R) => I -> IResult<I, R>` /// Applies the parser between m and n times (n included) and folds the list of return value /// /// the embedded parser may return Incomplete /// /// ``` /// # #[macro_use] extern crate nom; /// # use nom::IResult::{Done, Error}; /// # #[cfg(feature = "verbose-errors")] /// # use nom::Err::Position; /// # use nom::ErrorKind; /// # fn main() { /// named!(multi<&[u8], Vec<&[u8]> >, /// fold_many_m_n!(2, 4, tag!( "abcd" ), Vec::new(), |mut acc: Vec<_>, item| { /// acc.push(item); /// acc /// })); /// /// let a = b"abcdefgh"; /// let b = b"abcdabcdefgh"; /// let c = b"abcdabcdabcdabcdabcdefgh"; /// /// assert_eq!(multi(&a[..]),Error(error_position!(ErrorKind::ManyMN,&a[..]))); /// let res = vec![&b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&b[..]), Done(&b"efgh"[..], res)); /// let res2 = vec![&b"abcd"[..], &b"abcd"[..], &b"abcd"[..], &b"abcd"[..]]; /// assert_eq!(multi(&c[..]), Done(&b"abcdefgh"[..], res2)); /// # } /// ``` #[macro_export] macro_rules! fold_many_m_n( ($i:expr, $m:expr, $n: expr, $submac:ident!( $($args:tt)* ), $init:expr, $f:expr) => ( { use $crate::InputLength; let mut acc = $init; let f = $f; let mut input = $i.clone(); let mut count: usize = 0; let mut err = false; let mut incomplete: ::std::option::Option<$crate::Needed> = ::std::option::Option::None; loop { if count == $n { break } match $submac!(input, $($args)*) { $crate::IResult::Done(i, o) => { // do not allow parsers that do not consume input (causes infinite loops) if i.input_len() == input.input_len() { break; } acc = f(acc, o); input = i; count += 1; } $crate::IResult::Error(_) => { err = true; break; }, $crate::IResult::Incomplete($crate::Needed::Unknown) => { incomplete = ::std::option::Option::Some($crate::Needed::Unknown); break; }, $crate::IResult::Incomplete($crate::Needed::Size(i)) => { let (size,overflowed) = i.overflowing_add( ($i).input_len() - input.input_len() ); incomplete = ::std::option::Option::Some( match overflowed { true => $crate::Needed::Unknown, false => $crate::Needed::Size(size), } ); break; }, } if input.input_len() == 0 { break; } } if count < $m { if err { $crate::IResult::Error(error_position!($crate::ErrorKind::ManyMN,$i)) } else { match incomplete { ::std::option::Option::Some(i) => $crate::IResult::Incomplete(i), ::std::option::Option::None => $crate::IResult::Incomplete($crate::Needed::Unknown) } } } else { match incomplete { ::std::option::Option::Some(i) => $crate::IResult::Incomplete(i), ::std::option::Option::None => $crate::IResult::Done(input, acc) } } } ); ($i:expr, $m:expr, $n: expr, $f:expr, $init:expr, $fold_f:expr) => ( fold_many_m_n!($i, $m, $n, call!($f), $init, $fold_f); ); ); #[cfg(test)] mod tests { use internal::{Needed,IResult}; use internal::IResult::*; use util::ErrorKind; use nom::{alpha,be_u8,be_u16,le_u16,digit}; use std::str::{self,FromStr}; // reproduce the tag and take macros, because of module import order macro_rules! tag ( ($i:expr, $inp: expr) => ( { #[inline(always)] fn as_bytes<T: $crate::AsBytes>(b: &T) -> &[u8] { b.as_bytes() } let expected = $inp; let bytes = as_bytes(&expected); tag_bytes!($i,bytes) } ); ); macro_rules! tag_bytes ( ($i:expr, $bytes: expr) => ( { use std::cmp::min; let len = $i.len(); let blen = $bytes.len(); let m = min(len, blen); let reduced = &$i[..m]; let b = &$bytes[..m]; let res: $crate::IResult<_,_> = if reduced != b { $crate::IResult::Error(error_position!($crate::ErrorKind::Tag, $i)) } else if m < blen { $crate::IResult::Incomplete($crate::Needed::Size(blen)) } else { $crate::IResult::Done(&$i[blen..], reduced) }; res } ); ); macro_rules! take( ($i:expr, $count:expr) => ( { let cnt = $count as usize; let res:$crate::IResult<&[u8],&[u8]> = if $i.len() < cnt { $crate::IResult::Incomplete($crate::Needed::Size(cnt)) } else { $crate::IResult::Done(&$i[cnt..],&$i[0..cnt]) }; res } ) ); #[test] #[cfg(feature = "std")] fn separated_list() { named!(multi<&[u8],Vec<&[u8]> >, separated_list!(tag!(","), tag!("abcd"))); named!(multi_empty<&[u8],Vec<&[u8]> >, separated_list!(tag!(","), tag!(""))); named!(multi_longsep<&[u8],Vec<&[u8]> >, separated_list!(tag!(".."), tag!("abcd"))); let a = &b"abcdef"[..]; let b = &b"abcd,abcdef"[..]; let c = &b"azerty"[..]; let d = &b",,abc"[..]; let e = &b"abcd,abcd,ef"[..]; let f = &b"abc"[..]; let g = &b"abcd."[..]; let h = &b"abcd,abc"[..]; let res1 = vec![&b"abcd"[..]]; assert_eq!(multi(a), Done(&b"ef"[..], res1)); let res2 = vec![&b"abcd"[..], &b"abcd"[..]]; assert_eq!(multi(b), Done(&b"ef"[..], res2)); assert_eq!(multi(c), Done(&b"azerty"[..], Vec::new())); assert_eq!(multi_empty(d), Error(error_position!(ErrorKind::SeparatedList, d))); //let res3 = vec![&b""[..], &b""[..], &b""[..]]; //assert_eq!(multi_empty(d), Done(&b"abc"[..], res3)); let res4 = vec![&b"abcd"[..], &b"abcd"[..]]; assert_eq!(multi(e), Done(&b",ef"[..], res4)); assert_eq!(multi(f), Incomplete(Needed::Size(4))); assert_eq!(multi_longsep(g), Incomplete(Needed::Size(6))); assert_eq!(multi(h), Incomplete(Needed::Size(9))); } #[test] #[cfg(feature = "std")] fn separated_list_complete() { named!(multi<&[u8],Vec<&[u8]> >, separated_list_complete!(tag!(","), alpha)); let a = &b"abcdef"[..]; let b = &b"abcd,abcdef"[..]; let c = &b"abcd,abcd,ef"[..]; let d = &b"abc."[..]; let e = &b"abcd,ef."[..]; let f = &b"123"[..]; assert_eq!(multi(a), Done(&b""[..], vec!(a))); assert_eq!(multi(b), Done(&b""[..], vec!(&b"abcd"[..], &b"abcdef"[..]))); assert_eq!(multi(c), Done(&b""[..], vec!(&b"abcd"[..], &b"abcd"[..], &b"ef"[..]))); assert_eq!(multi(d), Done(&b"."[..], vec!(&b"abc"[..]))); assert_eq!(multi(e), Done(&b"."[..], vec!(&b"abcd"[..], &b"ef"[..]))); assert_eq!(multi(f), Done(&b"123"[..], Vec::new())); } #[test] #[cfg(feature = "std")] fn separated_nonempty_list() { named!(multi<&[u8],Vec<&[u8]> >, separated_nonempty_list!(tag!(","), tag!("abcd"))); named!(multi_longsep<&[u8],Vec<&[u8]> >, separated_nonempty_list!(tag!(".."), tag!("abcd"))); let a = &b"abcdef"[..]; let b = &b"abcd,abcdef"[..]; let c = &b"azerty"[..]; let d = &b"abcd,abcd,ef"[..]; let f = &b"abc"[..]; let g = &b"abcd."[..]; let h = &b"abcd,abc"[..]; let res1 = vec![&b"abcd"[..]]; assert_eq!(multi(a), Done(&b"ef"[..], res1)); let res2 = vec![&b"abcd"[..], &b"abcd"[..]]; assert_eq!(multi(b), Done(&b"ef"[..], res2)); assert_eq!(multi(c), Error(error_position!(ErrorKind::Tag,c))); let res3 = vec![&b"abcd"[..], &b"abcd"[..]]; assert_eq!(multi(d), Done(&b",ef"[..], res3)); assert_eq!(multi(f), Incomplete(Needed::Size(4))); assert_eq!(multi_longsep(g), Incomplete(Needed::Size(6))); assert_eq!(multi(h), Incomplete(Needed::Size(9))); } #[test] #[cfg(feature = "std")] fn separated_nonempty_list_complete() { named!(multi<&[u8],Vec<&[u8]> >, separated_nonempty_list_complete!(tag!(","), alpha)); let a = &b"abcdef"[..]; let b = &b"abcd,abcdef"[..]; let c = &b"abcd,abcd,ef"[..]; let d = &b"abc."[..]; let e = &b"abcd,ef."[..]; let f = &b"123"[..]; assert_eq!(multi(a), Done(&b""[..], vec!(a))); assert_eq!(multi(b), Done(&b""[..], vec!(&b"abcd"[..], &b"abcdef"[..]))); assert_eq!(multi(c), Done(&b""[..], vec!(&b"abcd"[..], &b"abcd"[..], &b"ef"[..]))); assert_eq!(multi(d), Done(&b"."[..], vec!(&b"abc"[..]))); assert_eq!(multi(e), Done(&b"."[..], vec!(&b"abcd"[..], &b"ef"[..]))); assert_eq!(multi(f), Error(error_position!(ErrorKind::Alpha, &b"123"[..]))); } #[test] #[cfg(feature = "std")] fn many0() { named!( tag_abcd, tag!("abcd") ); named!( tag_empty, tag!("") ); named!( multi<&[u8],Vec<&[u8]> >, many0!(tag_abcd) ); named!( multi_empty<&[u8],Vec<&[u8]> >, many0!(tag_empty) ); assert_eq!(multi(&b"abcdef"[..]), Done(&b"ef"[..], vec![&b"abcd"[..]])); assert_eq!(multi(&b"abcdabcdefgh"[..]), Done(&b"efgh"[..], vec![&b"abcd"[..], &b"abcd"[..]])); assert_eq!(multi(&b"azerty"[..]), Done(&b"azerty"[..], Vec::new())); assert_eq!(multi(&b"abcdab"[..]), Incomplete(Needed::Size(8))); assert_eq!(multi(&b"abcd"[..]), Done(&b""[..], vec![&b"abcd"[..]])); assert_eq!(multi(&b""[..]), Done(&b""[..], Vec::new())); assert_eq!(multi_empty(&b"abcdef"[..]), Error(error_position!(ErrorKind::Many0, &b"abcdef"[..]))); } #[cfg(feature = "nightly")] use test::Bencher; #[cfg(feature = "nightly")] #[bench] fn many0_bench(b: &mut Bencher) { named!(multi<&[u8],Vec<&[u8]> >, many0!(tag!("abcd"))); b.iter(|| { multi(&b"abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"[..]) }); } #[test] #[cfg(feature = "std")] fn many1() { named!(multi<&[u8],Vec<&[u8]> >, many1!(tag!("abcd"))); let a = &b"abcdef"[..]; let b = &b"abcdabcdefgh"[..]; let c = &b"azerty"[..]; let d = &b"abcdab"[..]; let res1 = vec![&b"abcd"[..]]; assert_eq!(multi(a), Done(&b"ef"[..], res1)); let res2 = vec![&b"abcd"[..], &b"abcd"[..]]; assert_eq!(multi(b), Done(&b"efgh"[..], res2)); assert_eq!(multi(c), Error(error_position!(ErrorKind::Many1,c))); assert_eq!(multi(d), Incomplete(Needed::Size(8))); } #[test] #[cfg(feature = "std")] fn many_till() { named!(multi<&[u8], (Vec<&[u8]>, &[u8]) >, many_till!( tag!( "abcd" ), tag!( "efgh" ) ) ); let a = b"abcdabcdefghabcd"; let b = b"efghabcd"; let c = b"azerty"; let res_a = (vec![&b"abcd"[..], &b"abcd"[..]], &b"efgh"[..]); let res_b: (Vec<&[u8]>, &[u8]) = (Vec::new(), &b"efgh"[..]); assert_eq!(multi(&a[..]), Done(&b"abcd"[..], res_a)); assert_eq!(multi(&b[..]), Done(&b"abcd"[..], res_b)); assert_eq!(multi(&c[..]), Error(error_node_position!(ErrorKind::ManyTill,&c[..], error_position(ErrorKind::Tag,&c[..])))); } #[test] #[cfg(feature = "std")] fn infinite_many() { fn tst(input: &[u8]) -> IResult<&[u8], &[u8]> { println!("input: {:?}", input); Error(error_position!(ErrorKind::Custom(0),input)) } // should not go into an infinite loop named!(multi0<&[u8],Vec<&[u8]> >, many0!(tst)); let a = &b"abcdef"[..]; assert_eq!(multi0(a), Done(a, Vec::new())); named!(multi1<&[u8],Vec<&[u8]> >, many1!(tst)); let a = &b"abcdef"[..]; assert_eq!(multi1(a), Error(error_position!(ErrorKind::Many1,a))); } #[test] #[cfg(feature = "std")] fn many_m_n() { named!(multi<&[u8],Vec<&[u8]> >, many_m_n!(2, 4, tag!("Abcd"))); let a = &b"Abcdef"[..]; let b = &b"AbcdAbcdefgh"[..]; let c = &b"AbcdAbcdAbcdAbcdefgh"[..]; let d = &b"AbcdAbcdAbcdAbcdAbcdefgh"[..]; let e = &b"AbcdAb"[..]; assert_eq!(multi(a), Error(error_position!(ErrorKind::ManyMN,a))); let res1 = vec![&b"Abcd"[..], &b"Abcd"[..]]; assert_eq!(multi(b), Done(&b"efgh"[..], res1)); let res2 = vec![&b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..]]; assert_eq!(multi(c), Done(&b"efgh"[..], res2)); let res3 = vec![&b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..]]; assert_eq!(multi(d), Done(&b"Abcdefgh"[..], res3)); assert_eq!(multi(e), Incomplete(Needed::Size(8))); } #[test] #[cfg(feature = "std")] fn count() { const TIMES: usize = 2; named!( tag_abc, tag!("abc") ); named!( cnt_2<&[u8], Vec<&[u8]> >, count!(tag_abc, TIMES ) ); assert_eq!(cnt_2(&b"abcabcabcdef"[..]), Done(&b"abcdef"[..], vec![&b"abc"[..], &b"abc"[..]])); assert_eq!(cnt_2(&b"ab"[..]), Incomplete(Needed::Size(3))); assert_eq!(cnt_2(&b"abcab"[..]), Incomplete(Needed::Size(6))); assert_eq!(cnt_2(&b"xxx"[..]), Error(error_position!(ErrorKind::Count, &b"xxx"[..]))); assert_eq!(cnt_2(&b"xxxabcabcdef"[..]), Error(error_position!(ErrorKind::Count, &b"xxxabcabcdef"[..]))); assert_eq!(cnt_2(&b"abcxxxabcdef"[..]), Error(error_position!(ErrorKind::Count, &b"abcxxxabcdef"[..]))); } #[test] #[cfg(feature = "std")] fn count_zero() { const TIMES: usize = 0; named!( tag_abc, tag!("abc") ); named!( counter_2<&[u8], Vec<&[u8]> >, count!(tag_abc, TIMES ) ); let done = &b"abcabcabcdef"[..]; let parsed_done = Vec::new(); let rest = done; let incomplete_1 = &b"ab"[..]; let parsed_incompl_1 = Vec::new(); let incomplete_2 = &b"abcab"[..]; let parsed_incompl_2 = Vec::new(); let error = &b"xxx"[..]; let error_remain = &b"xxx"[..]; let parsed_err = Vec::new(); let error_1 = &b"xxxabcabcdef"[..]; let parsed_err_1 = Vec::new(); let error_1_remain = &b"xxxabcabcdef"[..]; let error_2 = &b"abcxxxabcdef"[..]; let parsed_err_2 = Vec::new(); let error_2_remain = &b"abcxxxabcdef"[..]; assert_eq!(counter_2(done), Done(rest, parsed_done)); assert_eq!(counter_2(incomplete_1), Done(incomplete_1, parsed_incompl_1)); assert_eq!(counter_2(incomplete_2), Done(incomplete_2, parsed_incompl_2)); assert_eq!(counter_2(error), Done(error_remain, parsed_err)); assert_eq!(counter_2(error_1), Done(error_1_remain, parsed_err_1)); assert_eq!(counter_2(error_2), Done(error_2_remain, parsed_err_2)); } #[test] fn count_fixed() { const TIMES: usize = 2; named!( tag_abc, tag!("abc") ); named!( cnt_2<&[u8], [&[u8]; TIMES] >, count_fixed!(&[u8], tag_abc, TIMES ) ); assert_eq!(cnt_2(&b"abcabcabcdef"[..]), Done(&b"abcdef"[..], [&b"abc"[..], &b"abc"[..]])); assert_eq!(cnt_2(&b"ab"[..]), Incomplete(Needed::Size(3))); assert_eq!(cnt_2(&b"abcab"[..]), Incomplete(Needed::Size(6))); assert_eq!(cnt_2(&b"xxx"[..]), Error(error_position!(ErrorKind::Count, &b"xxx"[..]))); assert_eq!(cnt_2(&b"xxxabcabcdef"[..]), Error(error_position!(ErrorKind::Count, &b"xxxabcabcdef"[..]))); assert_eq!(cnt_2(&b"abcxxxabcdef"[..]), Error(error_position!(ErrorKind::Count, &b"abcxxxabcdef"[..]))); } #[allow(dead_code)] pub fn compile_count_fixed(input: &[u8]) -> IResult<&[u8], ()> { do_parse!(input, tag!("abcd") >> count_fixed!( u16, le_u16, 4 ) >> eof!() >> () ) } #[allow(unused_variables)] #[test] fn count_fixed_no_type() { const TIMES: usize = 2; named!( tag_abc, tag!("abc") ); named!( counter_2<&[u8], [&[u8]; TIMES], () >, count_fixed!(&[u8], tag_abc, TIMES ) ); let done = &b"abcabcabcdef"[..]; let parsed_main = [&b"abc"[..], &b"abc"[..]]; let rest = &b"abcdef"[..]; let incomplete_1 = &b"ab"[..]; let incomplete_2 = &b"abcab"[..]; let error = &b"xxx"[..]; let error_1 = &b"xxxabcabcdef"[..]; let error_1_remain = &b"xxxabcabcdef"[..]; let error_2 = &b"abcxxxabcdef"[..]; let error_2_remain = &b"abcxxxabcdef"[..]; assert_eq!(counter_2(done), Done(rest, parsed_main)); assert_eq!(counter_2(incomplete_1), Incomplete(Needed::Size(3))); assert_eq!(counter_2(incomplete_2), Incomplete(Needed::Size(6))); assert_eq!(counter_2(error), Error(error_position!(ErrorKind::Count, error))); assert_eq!(counter_2(error_1), Error(error_position!(ErrorKind::Count, error_1_remain))); assert_eq!(counter_2(error_2), Error(error_position!(ErrorKind::Count, error_2_remain))); } named!(pub number<u32>, map_res!( map_res!( digit, str::from_utf8 ), FromStr::from_str )); #[test] #[cfg(feature = "std")] fn length_count() { named!(tag_abc, tag!(&b"abc"[..]) ); named!( cnt<&[u8], Vec<&[u8]> >, length_count!(number, tag_abc) ); assert_eq!(cnt(&b"2abcabcabcdef"[..]), Done(&b"abcdef"[..], vec![&b"abc"[..], &b"abc"[..]])); assert_eq!(cnt(&b"2ab"[..]), Incomplete(Needed::Size(4))); assert_eq!(cnt(&b"3abcab"[..]), Incomplete(Needed::Size(7))); assert_eq!(cnt(&b"xxx"[..]), Error(error_position!(ErrorKind::Digit, &b"xxx"[..]))); assert_eq!(cnt(&b"2abcxxx"[..]), Error(error_position!(ErrorKind::Count, &b"abcxxx"[..]))); } #[test] fn length_data() { named!( take<&[u8], &[u8]>, length_data!(number) ); assert_eq!(take(&b"6abcabcabcdef"[..]), Done(&b"abcdef"[..], &b"abcabc"[..])); assert_eq!(take(&b"3ab"[..]), Incomplete(Needed::Size(4))); assert_eq!(take(&b"xxx"[..]), Error(error_position!(ErrorKind::Digit, &b"xxx"[..]))); assert_eq!(take(&b"2abcxxx"[..]), Done(&b"cxxx"[..], &b"ab"[..])); } #[test] fn length_value_test() { named!(length_value_1<&[u8], u16 >, length_value!(be_u8, be_u16)); named!(length_value_2<&[u8], (u8, u8) >, length_value!(be_u8, tuple!(be_u8, be_u8))); let i1 = [0, 5, 6]; assert_eq!(length_value_1(&i1), IResult::Error(error_position!(ErrorKind::Complete, &b""[..]))); assert_eq!(length_value_2(&i1), IResult::Error(error_position!(ErrorKind::Complete, &b""[..]))); let i2 = [1, 5, 6, 3]; assert_eq!(length_value_1(&i2), IResult::Error(error_position!(ErrorKind::Complete, &i2[1..2]))); assert_eq!(length_value_2(&i2), IResult::Error(error_position!(ErrorKind::Complete, &i2[1..2]))); let i3 = [2, 5, 6, 3, 4, 5, 7]; assert_eq!(length_value_1(&i3), IResult::Done(&i3[3..], 1286)); assert_eq!(length_value_2(&i3), IResult::Done(&i3[3..], (5, 6))); let i4 = [3, 5, 6, 3, 4, 5]; assert_eq!(length_value_1(&i4), IResult::Done(&i4[4..], 1286)); assert_eq!(length_value_2(&i4), IResult::Done(&i4[4..], (5, 6))); } #[test] #[cfg(feature = "std")] fn fold_many0() { fn fold_into_vec<T>(mut acc: Vec<T>, item: T) -> Vec<T> { acc.push(item); acc }; named!( tag_abcd, tag!("abcd") ); named!( tag_empty, tag!("") ); named!( multi<&[u8],Vec<&[u8]> >, fold_many0!(tag_abcd, Vec::new(), fold_into_vec) ); named!( multi_empty<&[u8],Vec<&[u8]> >, fold_many0!(tag_empty, Vec::new(), fold_into_vec) ); assert_eq!(multi(&b"abcdef"[..]), Done(&b"ef"[..], vec![&b"abcd"[..]])); assert_eq!(multi(&b"abcdabcdefgh"[..]), Done(&b"efgh"[..], vec![&b"abcd"[..], &b"abcd"[..]])); assert_eq!(multi(&b"azerty"[..]), Done(&b"azerty"[..], Vec::new())); assert_eq!(multi(&b"abcdab"[..]), Incomplete(Needed::Size(8))); assert_eq!(multi(&b"abcd"[..]), Done(&b""[..], vec![&b"abcd"[..]])); assert_eq!(multi(&b""[..]), Done(&b""[..], Vec::new())); assert_eq!(multi_empty(&b"abcdef"[..]), Error(error_position!(ErrorKind::Many0, &b"abcdef"[..]))); } #[test] #[cfg(feature = "std")] fn fold_many1() { fn fold_into_vec<T>(mut acc: Vec<T>, item: T) -> Vec<T> { acc.push(item); acc }; named!(multi<&[u8],Vec<&[u8]> >, fold_many1!(tag!("abcd"), Vec::new(), fold_into_vec)); let a = &b"abcdef"[..]; let b = &b"abcdabcdefgh"[..]; let c = &b"azerty"[..]; let d = &b"abcdab"[..]; let res1 = vec![&b"abcd"[..]]; assert_eq!(multi(a), Done(&b"ef"[..], res1)); let res2 = vec![&b"abcd"[..], &b"abcd"[..]]; assert_eq!(multi(b), Done(&b"efgh"[..], res2)); assert_eq!(multi(c), Error(error_position!(ErrorKind::Many1,c))); assert_eq!(multi(d), Incomplete(Needed::Size(8))); } #[test] #[cfg(feature = "std")] fn fold_many_m_n() { fn fold_into_vec<T>(mut acc: Vec<T>, item: T) -> Vec<T> { acc.push(item); acc }; named!(multi<&[u8],Vec<&[u8]> >, fold_many_m_n!(2, 4, tag!("Abcd"), Vec::new(), fold_into_vec)); let a = &b"Abcdef"[..]; let b = &b"AbcdAbcdefgh"[..]; let c = &b"AbcdAbcdAbcdAbcdefgh"[..]; let d = &b"AbcdAbcdAbcdAbcdAbcdefgh"[..]; let e = &b"AbcdAb"[..]; assert_eq!(multi(a), Error(error_position!(ErrorKind::ManyMN,a))); let res1 = vec![&b"Abcd"[..], &b"Abcd"[..]]; assert_eq!(multi(b), Done(&b"efgh"[..], res1)); let res2 = vec![&b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..]]; assert_eq!(multi(c), Done(&b"efgh"[..], res2)); let res3 = vec![&b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..], &b"Abcd"[..]]; assert_eq!(multi(d), Done(&b"Abcdefgh"[..], res3)); assert_eq!(multi(e), Incomplete(Needed::Size(8))); } }
fn main() { // creating a new, empty string let s = String::new(); // this is same as line 9? // This is a &static str type let data = "initial contents"; println!("data: {}", data); let data_two = String::from("initial contents"); let s = data.to_string(); println!("s: {}", s); // the method also works on a literal directly: let s = "initial contents".to_string(); let mut a = String::from("foo"); a.push_str("bar"); println!("a: {}", a); // **NOTE** // q does not take ownership of w from that "push_str" // we know this because we can still print w after we push it into q let mut q = String::from("lo"); let w = "l"; q.push_str(w); println!("w is {}", w); println!("q: {}", q); // **Concatenation with the + operator or the format! Macro** // Can only add &str with String // So why does htis work? &r is of type &String not &str // We're able to because the compiler can "coerce" the &String // into a &str let e = String::from("Hello, "); let r = String::from("world!"); // can't add r, must used reference to r (&r) // can't use &e, must be e let t = e + &r; // e has been moved here and can no longer be used // can't use e anymore // println!("e: {}", e); println!("r: {}", r); println!("t: {}", t); let y = String::from("tic"); let u = String::from("tac"); let i = String::from("toe"); // Format takes no ownership of parameters // lines 49 and 50 are the same - 49 is just easier to read let z = format!("{}-{}-{}", y, u, i); let p = y + "-" + &u + "-" + &i; println!("p: {}", p); println!("z: {}", z); }
fn main() { // s is not yet valid, hasn't been declared // hardcoded into final executable let s = "hello"; // s is valid from this point forward, comes into scope // do stuff with s // allocated on the heap (at runtime) let mut t = String::from("hello"); // allocate more heap memory to hold the new total String contents t.push_str(", world!"); println!("{}", t); let x = 5; // copies the value of x, since int has fixed size, values pushed on stack let y = x; // s1 is a fixed sized struct containing // ptr to heap memory containing string value // len in bytes // capacity in bytes let s1 = String::from("hello"); // s2 makes a new copy of the struct, not the heap allocated string value // ownership is moved to s2 variable to prevent double free // s1 is no longer a valid variable // thus Rust does a shallow copy + move, never automatically performs deep copy let mut s2 = s1; // ERROR can't borrow moved value // let s3 = s1.clone() // intentionally deep copy heap contents pointed to by // s2 for pointing by s3 // explicit clone() only required for heap data, stack copies are fixed size and cheap // specifically, explicit clone() only required for types that do not // implement the Copy trait. Implementing the Copy trait requires that the type // is only composed of scalar values which do not implement the Drop trait. // e.g. (i32, i32) is Copy, but (i32, String) is not. let mut s3 = s2.clone(); // passing variables to a function works similarly as assignment in terms of move or copy. // function return values can be moved into a variable in the parent scope. // We can use references to avoid having to pass variables back from a function (borrows). // calc... thus borrows (owns a new read only pointer) to s2. let len = calculate_length(&s2); println!("The length of '{}' is {}.", s2, len); // Changing a referenced value requires to take a mutable reference/borrow change(&mut s3); println!("s3 is: '{}'", s3); // This works because the (very) last time the immutable references are used // occurs before the mutable reference is declared. let r1 = &s2; let r2 = &s2; println!("{} and {}", r1, r2); let r3 = &mut s2; println!("{}", r3); // would result in a compile time error stating that r3 is not allowed // since we're using immutable references after it would be declared. // println!("{} and {}", r1, r2); let mut s4 = String::from("hello world"); let word = first_word_first_pass(&s4); s4.clear(); // word still points to index 5 marking the end of the first word that // was in s4, but s4 is now empty, word has to be updated again, and is stale // data until then. // solution: string slices! // references to a portion of a String. // range indices must occur at valid UTF-8 character boundaries // can't slice into the middle of a multibyte character. let mut s4 = String::from("hello world"); let hello = &s4[..5]; let world = &s4[6..]; let word = first_word(&s4); // ERROR: the line below borrows s4, thus it can't also be borrowed // mutable at this point to clear it // s4.clear(); println!("the first word is: {}", word); // but as before, this is ok because the compiler can verify this clear() happens after the // last time s4 is used. s4.clear(); } // s, t, no longer valid, goes out of scope // s is popped off the stack // t is deallocated automatically from the heap using drop() (RAII) fn calculate_length(s: &String) -> usize { s.len() } fn change(s: &mut String) { s.push_str(", world!") } // This mostly works, but the problem // is that this returns a usize value which must be stored separately from // and thus maintained alongside the String the usize value is tracking. fn first_word_first_pass(s: &String) -> usize { let bytes = s.as_bytes(); for (i, &item) in bytes.iter().enumerate() { if item == b' ' { return i; } } s.len() } // The string slice is a ref back to s, thus // the slice state is maintained alongside s automagically // accepts &str as a param because that works with both &String and &str values. fn first_word(s: &str) -> &str { let bytes = s.as_bytes(); for (i, &item) in bytes.iter().enumerate() { if item == b' ' { return &s[..i]; } } &s[..] }
use crate::test::spec::unified_runner::run_unified_tests; #[cfg_attr(feature = "tokio-runtime", tokio::test)] #[cfg_attr(feature = "async-std-runtime", async_std::test)] async fn run_unified() { run_unified_tests(&["versioned-api"]).await; }
#[cfg(test)] use super::super::failpoints::Failpoints; use super::commit_log::CommitLog; use super::error::Error; use super::file_system::{FileKind, FileSystem, OpenMode, SeriesDir}; use super::index::Index; use std::collections::HashMap; use std::sync::{Arc, Mutex}; pub struct SeriesEnv { dir: Arc<SeriesDir>, commit_log: CommitLog, index: Index, #[cfg(test)] fp: Arc<Failpoints>, } impl SeriesEnv { fn create(dir: Arc<SeriesDir>, #[cfg(test)] fp: Arc<Failpoints>) -> Result<SeriesEnv, Error> { let log = CommitLog::open( dir.clone(), #[cfg(test)] fp.clone(), )?; let index_offset = log.current().index_offset; Ok(SeriesEnv { dir: dir.clone(), commit_log: log, index: Index::open( dir.clone().open(FileKind::Index, OpenMode::Write)?, index_offset, )?, #[cfg(test)] fp: fp, }) } pub fn dir(&self) -> Arc<SeriesDir> { self.dir.clone() } pub fn commit_log(&self) -> &CommitLog { &self.commit_log } #[cfg(test)] pub fn fp(&self) -> Arc<Failpoints> { self.fp.clone() } pub fn index(&self) -> &Index { &self.index } } pub struct Env { fs: FileSystem, series: Arc<Mutex<HashMap<String, Arc<SeriesEnv>>>>, #[cfg(test)] pub fp: Arc<Failpoints>, } impl Env { pub fn fs(&self) -> &FileSystem { &self.fs } pub fn series<S: AsRef<str>>(&self, name: S) -> Result<Arc<SeriesEnv>, Error> { let mut series = self.series.lock().unwrap(); match series.get(name.as_ref()) { Some(env) => Ok(env.clone()), _ => { let env = Arc::new(SeriesEnv::create( self.fs.series(name.as_ref())?, #[cfg(test)] self.fp.clone(), )?); series.insert(name.as_ref().to_owned(), env.clone()); Ok(env.clone()) } } } } pub fn create( fs: FileSystem, #[cfg(test)] fp: Arc<Failpoints>, ) -> Env { Env { fs: fs, series: Arc::new(Mutex::new(HashMap::new())), #[cfg(test)] fp, } } #[cfg(test)] pub mod test { use super::super::file_system; use super::*; use std::fs; use std::ops::Deref; use std::path::PathBuf; use std::time::{SystemTime, UNIX_EPOCH}; pub struct TempEnv { pub env: Env, path: PathBuf, } impl Drop for TempEnv { fn drop(&mut self) { fs::remove_dir_all(&self.path).unwrap(); } } impl Deref for TempEnv { type Target = Env; fn deref(&self) -> &Self::Target { &self.env } } pub fn create_with_failpoints(fp: Arc<Failpoints>) -> Result<TempEnv, Error> { let path = PathBuf::from(format!( "temp-dir-{:?}", SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_nanos() )); Ok(TempEnv { env: super::create(file_system::open(&path)?, fp), path: path.clone(), }) } pub fn create() -> Result<TempEnv, Error> { create_with_failpoints(Arc::new(Failpoints::create())) } }
use std::{path::Path, str}; use failure::{ Fail, }; use crate::parse::{Parse, ParseError}; /// Represents all shell builtins. #[derive(Clone, Debug)] pub enum Builtin<'a> { Clear, Cd(&'a Path), Exit, } #[derive(Clone, Debug, Fail)] pub enum CdError { #[fail(display = "cd: no path provided")] NoPath, } // ===== impl Builtin ===== impl<'a> Parse<'a> for Builtin<'a> { type Error = CdError; fn parse_from(text: &'a str) -> Result<Self, ParseError<Self::Error>> { let mut args = super::ArgsIter { text }; match args.next().ok_or(ParseError::NoInput)? { "clear" => Ok(Builtin::Clear), "cd" => { let path = args.next().ok_or(CdError::NoPath)?; Ok(Builtin::Cd(Path::new(path))) }, "exit" => { Ok(Builtin::Exit) }, _ => Err(ParseError::Unrecognized), } } }
mod fixtures; use assert_cmd::prelude::*; use assert_fs::fixture::TempDir; use fixtures::{port, server, tmpdir, Error, TestServer}; use regex::Regex; use rstest::rstest; use std::io::{BufRead, BufReader}; use std::process::{Command, Stdio}; #[rstest] #[case(&["-i", "12.123.234.12"])] #[case(&["-i", "::", "-i", "12.123.234.12"])] fn bind_fails(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { Command::cargo_bin("miniserve")? .arg(tmpdir.path()) .arg("-p") .arg(port.to_string()) .args(args) .assert() .stderr(predicates::str::contains("Failed to bind server to")) .failure(); Ok(()) } #[rstest] #[case(server(&[] as &[&str]), true, true)] #[case(server(&["-i", "::"]), false, true)] #[case(server(&["-i", "0.0.0.0"]), true, false)] #[case(server(&["-i", "::", "-i", "0.0.0.0"]), true, true)] fn bind_ipv4_ipv6( #[case] server: TestServer, #[case] bind_ipv4: bool, #[case] bind_ipv6: bool, ) -> Result<(), Error> { assert_eq!( reqwest::blocking::get(format!("http://127.0.0.1:{}", server.port()).as_str()).is_ok(), bind_ipv4 ); assert_eq!( reqwest::blocking::get(format!("http://[::1]:{}", server.port()).as_str()).is_ok(), bind_ipv6 ); Ok(()) } #[rstest] #[case(&[] as &[&str])] #[case(&["-i", "::"])] #[case(&["-i", "127.0.0.1"])] #[case(&["-i", "0.0.0.0"])] #[case(&["-i", "::", "-i", "0.0.0.0"])] #[case(&["--random-route"])] #[case(&["--route-prefix", "/prefix"])] fn validate_printed_urls(tmpdir: TempDir, port: u16, #[case] args: &[&str]) -> Result<(), Error> { let mut child = Command::cargo_bin("miniserve")? .arg(tmpdir.path()) .arg("-p") .arg(port.to_string()) .args(args) .stdout(Stdio::piped()) .spawn()?; // WARN assumes urls list is terminated by an empty line let url_lines = BufReader::new(child.stdout.take().unwrap()) .lines() .map(|line| line.expect("Error reading stdout")) .take_while(|line| !line.is_empty()) /* non-empty lines */ .collect::<Vec<_>>(); let url_lines = url_lines.join("\n"); let urls = Regex::new(r"http://[a-zA-Z0-9\.\[\]:/]+") .unwrap() .captures_iter(url_lines.as_str()) .map(|caps| caps.get(0).unwrap().as_str()) .collect::<Vec<_>>(); assert!(!urls.is_empty()); for url in urls { reqwest::blocking::get(url)?.error_for_status()?; } child.kill()?; Ok(()) }
#[doc = "Register `TAFCR` reader"] pub type R = crate::R<TAFCR_SPEC>; #[doc = "Register `TAFCR` writer"] pub type W = crate::W<TAFCR_SPEC>; #[doc = "Field `TAMP1E` reader - Tamper 1 detection enable"] pub type TAMP1E_R = crate::BitReader; #[doc = "Field `TAMP1E` writer - Tamper 1 detection enable"] pub type TAMP1E_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP1TRG` reader - Active level for tamper 1"] pub type TAMP1TRG_R = crate::BitReader; #[doc = "Field `TAMP1TRG` writer - Active level for tamper 1"] pub type TAMP1TRG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMPIE` reader - Tamper interrupt enable"] pub type TAMPIE_R = crate::BitReader; #[doc = "Field `TAMPIE` writer - Tamper interrupt enable"] pub type TAMPIE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TAMP1INSEL` reader - TAMPER1 mapping"] pub type TAMP1INSEL_R = crate::BitReader; #[doc = "Field `TAMP1INSEL` writer - TAMPER1 mapping"] pub type TAMP1INSEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TSINSEL` reader - TIMESTAMP mapping"] pub type TSINSEL_R = crate::BitReader; #[doc = "Field `TSINSEL` writer - TIMESTAMP mapping"] pub type TSINSEL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ALARMOUTTYPE` reader - AFO_ALARM output type"] pub type ALARMOUTTYPE_R = crate::BitReader; #[doc = "Field `ALARMOUTTYPE` writer - AFO_ALARM output type"] pub type ALARMOUTTYPE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - Tamper 1 detection enable"] #[inline(always)] pub fn tamp1e(&self) -> TAMP1E_R { TAMP1E_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - Active level for tamper 1"] #[inline(always)] pub fn tamp1trg(&self) -> TAMP1TRG_R { TAMP1TRG_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - Tamper interrupt enable"] #[inline(always)] pub fn tampie(&self) -> TAMPIE_R { TAMPIE_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 16 - TAMPER1 mapping"] #[inline(always)] pub fn tamp1insel(&self) -> TAMP1INSEL_R { TAMP1INSEL_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - TIMESTAMP mapping"] #[inline(always)] pub fn tsinsel(&self) -> TSINSEL_R { TSINSEL_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - AFO_ALARM output type"] #[inline(always)] pub fn alarmouttype(&self) -> ALARMOUTTYPE_R { ALARMOUTTYPE_R::new(((self.bits >> 18) & 1) != 0) } } impl W { #[doc = "Bit 0 - Tamper 1 detection enable"] #[inline(always)] #[must_use] pub fn tamp1e(&mut self) -> TAMP1E_W<TAFCR_SPEC, 0> { TAMP1E_W::new(self) } #[doc = "Bit 1 - Active level for tamper 1"] #[inline(always)] #[must_use] pub fn tamp1trg(&mut self) -> TAMP1TRG_W<TAFCR_SPEC, 1> { TAMP1TRG_W::new(self) } #[doc = "Bit 2 - Tamper interrupt enable"] #[inline(always)] #[must_use] pub fn tampie(&mut self) -> TAMPIE_W<TAFCR_SPEC, 2> { TAMPIE_W::new(self) } #[doc = "Bit 16 - TAMPER1 mapping"] #[inline(always)] #[must_use] pub fn tamp1insel(&mut self) -> TAMP1INSEL_W<TAFCR_SPEC, 16> { TAMP1INSEL_W::new(self) } #[doc = "Bit 17 - TIMESTAMP mapping"] #[inline(always)] #[must_use] pub fn tsinsel(&mut self) -> TSINSEL_W<TAFCR_SPEC, 17> { TSINSEL_W::new(self) } #[doc = "Bit 18 - AFO_ALARM output type"] #[inline(always)] #[must_use] pub fn alarmouttype(&mut self) -> ALARMOUTTYPE_W<TAFCR_SPEC, 18> { ALARMOUTTYPE_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "tamper and alternate function configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tafcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`tafcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct TAFCR_SPEC; impl crate::RegisterSpec for TAFCR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`tafcr::R`](R) reader structure"] impl crate::Readable for TAFCR_SPEC {} #[doc = "`write(|w| ..)` method takes [`tafcr::W`](W) writer structure"] impl crate::Writable for TAFCR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets TAFCR to value 0"] impl crate::Resettable for TAFCR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use crate::error::{Error, Result}; use crate::objectify::ObjectifyErrorKind; use crate::sexpr::{Sexpr, TrackedSexpr}; use crate::source::SourceLocation; use crate::symbol::Symbol; use SourceLocation::NoSource; pub fn scan_out_defines(body: TrackedSexpr) -> Result<TrackedSexpr> { let uninit: TrackedSexpr = Sexpr::Uninitialized.into(); let mut variables = TrackedSexpr::nil(NoSource); let mut values = TrackedSexpr::nil(NoSource); body.scan(|expr| -> Result<()> { if is_definition(expr) { let vars = std::mem::replace(&mut variables, TrackedSexpr::nil(NoSource)); variables = TrackedSexpr::cons(definition_variable(expr)?.clone(), vars, NoSource); values = TrackedSexpr::cons(uninit.clone(), values.clone(), NoSource); } Ok(()) })?; if variables.is_null() { return Ok(body); } fn transform(body: TrackedSexpr) -> Result<TrackedSexpr> { match body.decons() { Ok((mut expr, rest)) => { let src = rest.src.start_at(&expr.src); if is_definition(&expr) { expr = make_assignment( definition_variable(&expr)?.clone(), definition_value(&expr)?.clone(), ) } transform(rest) .map(|transformed_rest| TrackedSexpr::cons(expr, transformed_rest, src)) } Err(body) => Ok(body), } } let new_body = make_let(variables, values, transform(body)?); Ok(TrackedSexpr::cons( new_body, TrackedSexpr::nil(NoSource), NoSource, )) } fn is_definition(expr: &TrackedSexpr) -> bool { expr.at(0).map(|sx| sx == "define").unwrap_or(false) } pub fn definition_variable(expr: &TrackedSexpr) -> Result<&TrackedSexpr> { expr.at(1) .and_then(|var| if var.is_symbol() { Ok(var) } else { var.car() }) .map_err(|_| Error::at_expr(ObjectifyErrorKind::ExpectedList, expr)) } pub fn definition_value(expr: &TrackedSexpr) -> Result<TrackedSexpr> { expr.at(1) .and_then(|var| { if var.is_symbol() { expr.at(2).map(|x| x.clone()) } else { Ok(make_function( expr.at(1).unwrap().cdr()?.clone(), expr.cdr().unwrap().cdr()?.clone(), expr.src.clone(), )) } }) .map_err(|_| Error::at_expr(ObjectifyErrorKind::ExpectedList, expr)) } fn make_assignment(variable: TrackedSexpr, value: TrackedSexpr) -> TrackedSexpr { use TrackedSexpr as S; S::cons( Sexpr::Symbol(Symbol::new("set!")).into(), S::cons( variable, S::cons(value, TrackedSexpr::nil(NoSource), NoSource), NoSource, ), NoSource, ) } pub fn make_function( variables: TrackedSexpr, body: TrackedSexpr, src: SourceLocation, ) -> TrackedSexpr { let part_src = body.src.start_at(&variables.src); TrackedSexpr::cons( Sexpr::Symbol(Symbol::new("lambda")).into(), TrackedSexpr::cons(variables, body, part_src), src, ) } fn make_let(variables: TrackedSexpr, values: TrackedSexpr, body: TrackedSexpr) -> TrackedSexpr { let var_and_body = TrackedSexpr::cons(variables, body, NoSource); let func = TrackedSexpr::cons( Sexpr::Symbol(Symbol::new("lambda")).into(), var_and_body, NoSource, ); let call = TrackedSexpr::cons(func, values, NoSource); call }
#[doc = "Register `SR` reader"] pub type R = crate::R<SR_SPEC>; #[doc = "Register `SR` writer"] pub type W = crate::W<SR_SPEC>; #[doc = "Field `BSY` reader - Busy"] pub type BSY_R = crate::BitReader<BSYR_A>; #[doc = "Busy\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum BSYR_A { #[doc = "0: No write/erase operation is in progress"] Inactive = 0, #[doc = "1: No write/erase operation is in progress"] Active = 1, } impl From<BSYR_A> for bool { #[inline(always)] fn from(variant: BSYR_A) -> Self { variant as u8 != 0 } } impl BSY_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> BSYR_A { match self.bits { false => BSYR_A::Inactive, true => BSYR_A::Active, } } #[doc = "No write/erase operation is in progress"] #[inline(always)] pub fn is_inactive(&self) -> bool { *self == BSYR_A::Inactive } #[doc = "No write/erase operation is in progress"] #[inline(always)] pub fn is_active(&self) -> bool { *self == BSYR_A::Active } } #[doc = "Field `PGERR` reader - Programming error"] pub type PGERR_R = crate::BitReader<PGERRR_A>; #[doc = "Programming error\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum PGERRR_A { #[doc = "0: No programming error occurred"] NoError = 0, #[doc = "1: A programming error occurred"] Error = 1, } impl From<PGERRR_A> for bool { #[inline(always)] fn from(variant: PGERRR_A) -> Self { variant as u8 != 0 } } impl PGERR_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> PGERRR_A { match self.bits { false => PGERRR_A::NoError, true => PGERRR_A::Error, } } #[doc = "No programming error occurred"] #[inline(always)] pub fn is_no_error(&self) -> bool { *self == PGERRR_A::NoError } #[doc = "A programming error occurred"] #[inline(always)] pub fn is_error(&self) -> bool { *self == PGERRR_A::Error } } #[doc = "Programming error\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum PGERRW_AW { #[doc = "1: Reset programming error"] Reset = 1, } impl From<PGERRW_AW> for bool { #[inline(always)] fn from(variant: PGERRW_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `PGERR` writer - Programming error"] pub type PGERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, PGERRW_AW>; impl<'a, REG, const O: u8> PGERR_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Reset programming error"] #[inline(always)] pub fn reset(self) -> &'a mut crate::W<REG> { self.variant(PGERRW_AW::Reset) } } #[doc = "Field `WRPRTERR` reader - Write protection error"] pub type WRPRTERR_R = crate::BitReader<WRPRTERRR_A>; #[doc = "Write protection error\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum WRPRTERRR_A { #[doc = "0: No write protection error occurred"] NoError = 0, #[doc = "1: A write protection error occurred"] Error = 1, } impl From<WRPRTERRR_A> for bool { #[inline(always)] fn from(variant: WRPRTERRR_A) -> Self { variant as u8 != 0 } } impl WRPRTERR_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> WRPRTERRR_A { match self.bits { false => WRPRTERRR_A::NoError, true => WRPRTERRR_A::Error, } } #[doc = "No write protection error occurred"] #[inline(always)] pub fn is_no_error(&self) -> bool { *self == WRPRTERRR_A::NoError } #[doc = "A write protection error occurred"] #[inline(always)] pub fn is_error(&self) -> bool { *self == WRPRTERRR_A::Error } } #[doc = "Write protection error\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum WRPRTERRW_AW { #[doc = "1: Reset write protection error"] Reset = 1, } impl From<WRPRTERRW_AW> for bool { #[inline(always)] fn from(variant: WRPRTERRW_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `WRPRTERR` writer - Write protection error"] pub type WRPRTERR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, WRPRTERRW_AW>; impl<'a, REG, const O: u8> WRPRTERR_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Reset write protection error"] #[inline(always)] pub fn reset(self) -> &'a mut crate::W<REG> { self.variant(WRPRTERRW_AW::Reset) } } #[doc = "Field `EOP` reader - End of operation"] pub type EOP_R = crate::BitReader<EOPR_A>; #[doc = "End of operation\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EOPR_A { #[doc = "0: No EOP event occurred"] NoEvent = 0, #[doc = "1: An EOP event occurred"] Event = 1, } impl From<EOPR_A> for bool { #[inline(always)] fn from(variant: EOPR_A) -> Self { variant as u8 != 0 } } impl EOP_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EOPR_A { match self.bits { false => EOPR_A::NoEvent, true => EOPR_A::Event, } } #[doc = "No EOP event occurred"] #[inline(always)] pub fn is_no_event(&self) -> bool { *self == EOPR_A::NoEvent } #[doc = "An EOP event occurred"] #[inline(always)] pub fn is_event(&self) -> bool { *self == EOPR_A::Event } } #[doc = "End of operation\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EOPW_AW { #[doc = "1: Reset EOP event"] Reset = 1, } impl From<EOPW_AW> for bool { #[inline(always)] fn from(variant: EOPW_AW) -> Self { variant as u8 != 0 } } #[doc = "Field `EOP` writer - End of operation"] pub type EOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EOPW_AW>; impl<'a, REG, const O: u8> EOP_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Reset EOP event"] #[inline(always)] pub fn reset(self) -> &'a mut crate::W<REG> { self.variant(EOPW_AW::Reset) } } impl R { #[doc = "Bit 0 - Busy"] #[inline(always)] pub fn bsy(&self) -> BSY_R { BSY_R::new((self.bits & 1) != 0) } #[doc = "Bit 2 - Programming error"] #[inline(always)] pub fn pgerr(&self) -> PGERR_R { PGERR_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 4 - Write protection error"] #[inline(always)] pub fn wrprterr(&self) -> WRPRTERR_R { WRPRTERR_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - End of operation"] #[inline(always)] pub fn eop(&self) -> EOP_R { EOP_R::new(((self.bits >> 5) & 1) != 0) } } impl W { #[doc = "Bit 2 - Programming error"] #[inline(always)] #[must_use] pub fn pgerr(&mut self) -> PGERR_W<SR_SPEC, 2> { PGERR_W::new(self) } #[doc = "Bit 4 - Write protection error"] #[inline(always)] #[must_use] pub fn wrprterr(&mut self) -> WRPRTERR_W<SR_SPEC, 4> { WRPRTERR_W::new(self) } #[doc = "Bit 5 - End of operation"] #[inline(always)] #[must_use] pub fn eop(&mut self) -> EOP_W<SR_SPEC, 5> { EOP_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Flash status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SR_SPEC; impl crate::RegisterSpec for SR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`sr::R`](R) reader structure"] impl crate::Readable for SR_SPEC {} #[doc = "`write(|w| ..)` method takes [`sr::W`](W) writer structure"] impl crate::Writable for SR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets SR to value 0"] impl crate::Resettable for SR_SPEC { const RESET_VALUE: Self::Ux = 0; }
//! Pokemon type /// Pokemon type #[derive(PartialEq, Eq, Clone, Copy, Hash, Debug)] #[derive(serde::Serialize, serde::Deserialize)] pub enum Type { Normal = 0x00, Fighting = 0x01, Flying = 0x02, Poison = 0x03, Ground = 0x04, Rock = 0x05, Bug = 0x06, Ghost = 0x07, Steel = 0x08, Mystery = 0x09, Fire = 0x0a, Water = 0x0b, Grass = 0x0c, Electric = 0x0d, Psychic = 0x0e, Ice = 0x0f, Dragon = 0x10, Dark = 0x11, } impl Type { /// Returns the `C` name of this type pub fn c_name(&self) -> &'static str { match self { Type::Normal => "NORMAL", Type::Fighting => "FIGHTING", Type::Flying => "FLYING", Type::Poison => "POISON", Type::Ground => "GROUND", Type::Rock => "ROCK", Type::Bug => "BUG", Type::Ghost => "GHOST", Type::Steel => "STEEL", Type::Mystery => "MYSTERY", Type::Fire => "FIRE", Type::Water => "WATER", Type::Grass => "GRASS", Type::Electric => "ELECTRIC", Type::Psychic => "PSYCHIC", Type::Ice => "ICE", Type::Dragon => "DRAGON", Type::Dark => "DARK", } } }
fn main() { proconio::input! { a: i32, b: i32, c: i32, } let mut x = 1; while c * x < a { x += 1; } println!("{}", if c * x <= b {c * x} else {-1}); }
// Copyright 2018 Dmitry Tantsur <divius.inside@gmail.com> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Common protocol bits. #![allow(dead_code)] // various things are unused with --no-default-features #![allow(missing_docs)] use std::collections::HashMap; use eui48::MacAddress; use reqwest::{Method, Url}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::de::{DeserializeOwned, Error as DeserError}; use serde_json; use super::super::{Error, ErrorKind, Result}; use super::super::auth::AuthMethod; use super::super::session::ServiceInfo; use super::super::utils; use super::ApiVersion; #[derive(Clone, Debug, Deserialize)] pub struct Link { #[serde(deserialize_with = "deser_url")] pub href: Url, pub rel: String } #[derive(Clone, Debug, Deserialize)] pub struct Ref { pub id: String, pub links: Vec<Link> } #[derive(Clone, Debug, Deserialize)] pub struct IdAndName { pub id: String, pub name: String } #[derive(Clone, Debug, Deserialize)] pub struct KeyValue { pub key: String, pub value: String, } #[derive(Clone, Debug, Deserialize)] pub struct Version { pub id: String, pub links: Vec<Link>, pub status: String, #[serde(deserialize_with = "empty_as_none", default)] pub version: Option<ApiVersion>, #[serde(deserialize_with = "empty_as_none", default)] pub min_version: Option<ApiVersion> } #[derive(Clone, Debug, Deserialize)] #[serde(untagged)] pub enum Root { Versions { versions: Vec<Version> }, Version { version: Version } } impl Version { pub fn into_service_info(self) -> Result<ServiceInfo> { let endpoint = match self.links.into_iter().find(|x| &x.rel == "self") { Some(link) => link.href, None => { return Err(Error::new( ErrorKind::InvalidResponse, "Invalid version - missing self link")); } }; Ok(ServiceInfo { root_url: endpoint, current_version: self.version, minimum_version: self.min_version }) } } /// Generic code to extract a `ServiceInfo` from a URL. pub fn fetch_service_info(endpoint: Url, auth: &AuthMethod, service_type: &str, major_version: &str) -> Result<ServiceInfo> { debug!("Fetching {} service info from {}", service_type, endpoint); // Workaround for old version of Nova returning HTTP endpoints even if // accessed via HTTP let secure = endpoint.scheme() == "https"; let result = auth.request(Method::Get, endpoint.clone())?.send(); match result { Ok(mut resp) => { let mut info = match resp.json()? { Root::Version { version: ver } => ver.into_service_info(), Root::Versions { versions: vers } => { match vers.into_iter().find(|x| &x.id == major_version) { Some(ver) => ver.into_service_info(), None => Err(Error::new_endpoint_not_found(service_type)) } } }?; // Older Nova returns insecure URLs even for secure protocol. if secure { let _ = info.root_url.set_scheme("https").unwrap(); } debug!("Received {:?} for {} service from {}", info, service_type, endpoint); Ok(info) }, Err(ref e) if e.kind() == ErrorKind::ResourceNotFound => { if utils::url::is_root(&endpoint) { Err(Error::new_endpoint_not_found(service_type)) } else { debug!("Got HTTP 404 from {}, trying parent endpoint", endpoint); fetch_service_info(utils::url::pop(endpoint, true), auth, service_type, major_version) } }, Err(other) => Err(other) } } /// Deserialize value where empty string equals None. pub fn empty_as_none<'de, D, T>(des: D) -> ::std::result::Result<Option<T>, D::Error> where D: Deserializer<'de>, T: DeserializeOwned { let value = serde_json::Value::deserialize(des)?; match &value { &serde_json::Value::String(ref s) if s == "" => return Ok(None), _ => () }; serde_json::from_value(value).map_err(DeserError::custom) } /// Deserialize value where empty string equals None. pub fn empty_as_default<'de, D, T>(des: D) -> ::std::result::Result<T, D::Error> where D: Deserializer<'de>, T: DeserializeOwned + Default { let value = serde_json::Value::deserialize(des)?; match &value { &serde_json::Value::String(ref s) if s == "" => return Ok(Default::default()), _ => () }; serde_json::from_value(value).map_err(DeserError::custom) } /// Deserialize a URL. pub fn deser_url<'de, D>(des: D) -> ::std::result::Result<Url, D::Error> where D: Deserializer<'de> { Url::parse(&String::deserialize(des)?).map_err(DeserError::custom) } /// Deserialize a URL. pub fn deser_optional_url<'de, D>(des: D) -> ::std::result::Result<Option<Url>, D::Error> where D: Deserializer<'de> { let value: Option<String> = Deserialize::deserialize(des)?; match value { Some(s) => Url::parse(&s).map_err(DeserError::custom).map(Some), None => Ok(None) } } /// Deserialize a key-value mapping. pub fn deser_key_value<'de, D>(des: D) -> ::std::result::Result<HashMap<String, String>, D::Error> where D: Deserializer<'de> { let value: Vec<KeyValue> = Deserialize::deserialize(des)?; Ok(value.into_iter().map(|kv| (kv.key, kv.value)).collect()) } /// Serialize a MAC address in its HEX format. pub fn ser_mac<S>(value: &MacAddress, serializer: S) -> ::std::result::Result<S::Ok, S::Error> where S: Serializer { value.to_hex_string().serialize(serializer) } /// Serialize a MAC address in its HEX format. pub fn ser_opt_mac<S>(value: &Option<MacAddress>, serializer: S) -> ::std::result::Result<S::Ok, S::Error> where S: Serializer { value.map(|m| m.to_hex_string()).serialize(serializer) }
use failure::Error; pub fn line() -> Result<(), Error> { Ok(()) }
use super::{SourcePos, Span}; use nom::error::ErrorKind; use nom::{Finish, IResult}; use std::fmt; /// An error encountered when parsing sass. /// /// This contains an error message (currently just a String, and often /// not very descriptive) and informaion on where in the parsed data /// the error occured. #[derive(Debug, PartialEq, Eq)] pub struct ParseError { pub msg: String, pub pos: SourcePos, } impl std::error::Error for ParseError {} impl ParseError { /// Check a nom result for errors. /// /// This differs from a `From<nom::Err>` implementation in that an /// `Ok` result with remaining unparsed data is also considered an /// error. pub fn check<T>(res: IResult<Span, T>) -> Result<T, Self> { let (rest, value) = res.finish()?; if rest.fragment().is_empty() { Ok(value) } else { Err(ParseError::remaining(rest)) } } fn remaining(span: Span) -> ParseError { ParseError { msg: "Expected end of file.".into(), pos: span.into(), } } fn err(kind: ErrorKind, span: Span) -> ParseError { ParseError { msg: format!("Parse error: {:?}", kind), pos: span.into(), } } } impl From<nom::error::Error<Span<'_>>> for ParseError { fn from(err: nom::error::Error<Span>) -> Self { ParseError::err(err.code, err.input) } } impl fmt::Display for ParseError { fn fmt(&self, out: &mut fmt::Formatter) -> fmt::Result { let line_no = self.pos.line_no.to_string(); write!( out, "{msg}\ \n{0:lnw$} ,\ \n{ln} | {line}\ \n{0:lnw$} |{0:>lpos$}^\ \n{0:lnw$} '", "", line = self.pos.line, msg = self.msg, ln = line_no, lnw = line_no.len(), lpos = self.pos.line_pos, )?; let mut nextpos = Some(&self.pos); while let Some(pos) = nextpos { write!( out, "\n{0:lnw$} {file} {row}:{col} {cause}", "", lnw = line_no.len(), file = pos.file.name(), row = pos.line_no, col = pos.line_pos, cause = if pos.file.imported_from().is_some() { "import" } else { "root stylesheet" }, )?; nextpos = pos.file.imported_from(); } Ok(()) } }
pub fn global_sso() -> Option<String> { // try fetching global SSO url std::env::var("SSO_URL").ok() }
#[cfg(test)] #[path = "../../../../tests/unit/solver/mutation/ruin/cluster_removal_test.rs"] mod cluster_removal_test; use super::*; use crate::algorithms::dbscan::{create_clusters, Cluster, NeighborhoodFn}; use crate::algorithms::geometry::Point; use crate::construction::heuristics::InsertionContext; use crate::models::common::Timestamp; use crate::models::problem::Job; use crate::models::Problem; use crate::solver::mutation::get_route_jobs; use crate::solver::RefinementContext; use crate::utils::{compare_floats, Environment, Random}; use rand::prelude::*; use std::sync::Arc; /// A ruin strategy which removes job clusters using [`DBSCAN`] algorithm. /// /// [`DBSCAN`]: ../../algorithms/dbscan/index.html /// pub struct ClusterRemoval { clusters: Vec<Vec<Job>>, limits: RuinLimits, } impl ClusterRemoval { /// Creates a new instance of `ClusterRemoval`. pub fn new(problem: Arc<Problem>, environment: Arc<Environment>, min_items: usize, limits: RuinLimits) -> Self { let min_items = min_items.max(3); let epsilon = estimate_epsilon(&problem, min_items); let mut clusters = create_job_clusters(&problem, environment.random.as_ref(), min_items, epsilon) .into_iter() .map(|cluster| cluster.into_iter().cloned().collect::<Vec<_>>()) .collect::<Vec<_>>(); clusters.shuffle(&mut environment.random.get_rng()); Self { clusters, limits } } /// Creates a new instance of `ClusterRemoval` with default parameters. pub fn new_with_defaults(problem: Arc<Problem>, environment: Arc<Environment>) -> Self { Self::new(problem, environment, 4, RuinLimits::default()) } } impl Ruin for ClusterRemoval { fn run(&self, _: &RefinementContext, mut insertion_ctx: InsertionContext) -> InsertionContext { let locked = insertion_ctx.solution.locked.clone(); let mut route_jobs = get_route_jobs(&insertion_ctx.solution); let max_removed_activities = self.limits.get_chunk_size(&insertion_ctx); let tracker = self.limits.get_tracker(); let mut indices = (0..self.clusters.len()).into_iter().collect::<Vec<usize>>(); indices.shuffle(&mut insertion_ctx.environment.random.get_rng()); indices.into_iter().take_while(|_| tracker.is_not_limit(max_removed_activities)).for_each(|idx| { let cluster = self.clusters.get(idx).unwrap(); let left = max_removed_activities - tracker.get_removed_activities(); cluster .iter() .filter(|job| !locked.contains(job)) .take_while(|_| tracker.is_not_limit(max_removed_activities)) .take(left) .for_each(|job| { if let Some(rc) = route_jobs.get_mut(job) { // NOTE actual insertion context modification via route mut if rc.route.tour.contains(job) { rc.route_mut().tour.remove(job); tracker.add_actor(rc.route.actor.clone()); tracker.add_job((*job).clone()); } } }); }); tracker.iterate_removed_jobs(|job| insertion_ctx.solution.required.push(job.clone())); insertion_ctx } } fn create_job_clusters<'a>( problem: &'a Problem, random: &(dyn Random + Send + Sync), min_items: usize, epsilon: f64, ) -> Vec<Cluster<'a, Job>> { // get main parameters with some randomization let profile = &problem.fleet.profiles[random.uniform_int(0, problem.fleet.profiles.len() as i32 - 1) as usize]; let neighbor_fn: NeighborhoodFn<'a, Job> = Box::new(move |job, eps| { Box::new(once(job).chain( problem.jobs.neighbors(profile, job, 0.).take_while(move |(_, cost)| *cost < eps).map(|(job, _)| job), )) }); create_clusters(problem.jobs.all_as_slice(), epsilon, min_items, &neighbor_fn) } /// Estimates DBSCAN epsilon parameter. fn estimate_epsilon(problem: &Problem, min_points: usize) -> f64 { // for each job get distance to its nth neighbor let mut costs = get_average_costs(problem, min_points); // sort all distances in ascending order and form the curve costs.sort_by(|&a, &b| compare_floats(a, b)); let curve = costs.into_iter().enumerate().map(|(idx, cost)| Point::new(idx as f64, cost)).collect::<Vec<_>>(); // get max curvature approximation and return it as a guess for optimal epsilon value get_max_curvature(curve.as_slice()) } /// Gets average costs across all profiles. fn get_average_costs(problem: &Problem, min_points: usize) -> Vec<f64> { let mut costs = problem.fleet.profiles.iter().fold(vec![0.; problem.jobs.size()], |mut acc, profile| { problem.jobs.all().enumerate().for_each(|(idx, job)| { acc[idx] += problem .jobs .neighbors(profile, &job, Timestamp::default()) .filter(|(_, cost)| *cost > 0.) .nth(min_points - 1) // TODO consider time window difference as extra cost? .map(|(_, cost)| *cost) .unwrap_or(0.); }); acc }); costs.iter_mut().for_each(|cost| *cost /= problem.fleet.profiles.len() as f64); costs } /// Gets max curvature approximation: for each point p on the curve, find the one with the maximum /// distance d to a line drawn from the first to the last point of the curves. fn get_max_curvature(values: &[Point]) -> f64 { if values.is_empty() { return 0.; } let first = values.first().unwrap(); let last = values.last().unwrap(); values .iter() .fold((0., f64::MIN), |acc, p| { let distance = p.distance_to_line(&first, &last); if distance > acc.1 { (p.y, distance) } else { acc } }) .0 }
use super::parse::IOrder::*; use super::parse::*; use nom::IResult; use std::convert::TryInto; use std::fs::File; use std::io::Write; use std::io::{BufRead, BufReader}; use std::str::from_utf8; pub fn readfile(filename: &str) -> Vec<String> { let mut lines = Vec::new(); let file = File::open(filename).unwrap(); let reader = BufReader::new(file); for line in reader.lines() { lines.push(line.unwrap() + "\n"); } return lines; } pub fn parse_file<'a>(lines: &'a Vec<String>) -> IResult<&[u8], IFile<'a>> { let mut ifile = IFile::default(); let mut pos: u16 = 0; for i in 0..lines.len() { let (_, ast) = parse(lines[i].as_bytes())?; match ast { AST::Pos(p) => pos = p, AST::Symbol(s) => { ifile.symbols.insert(s, pos); } AST::Order(order) => { let len: u16 = order.len.try_into().unwrap(); ifile.complies.insert(pos, order); pos = pos + len; } }; } return Ok((b"", ifile)); } pub fn check_symbol(mut ifile: IFile) -> Result<IFile, String> { for (_, val) in ifile.complies.iter_mut() { if val.symbol.is_some() { let s = val.symbol.unwrap(); match ifile.symbols.get(s) { Some(pos) => { *val = IComplie { val_c: pos.clone(), ..(val.clone()) } } None => return Err(format!("not found symbol {}", from_utf8(s).unwrap())), } } } return Ok(ifile); } pub fn write_to_file(filename: &str, ifile: IFile) -> std::io::Result<()> { let mut pos: u16 = 0; let mut file = File::create(filename)?; for (p, val) in ifile.complies.iter() { while pos < p.clone() { write!(file, "{:08b}\n", IOrder::HALT as u8)?; pos += 1; } match val.iorder { HALT | RET | NOP | IRET => { write!(file, "{:08b}\n", val.iorder as u8)?; pos += 1 } CALL | JMP | JLE | JL | JE | JNE | JGE | JG | JS | JNS | JA | JAE | JB | JBE => { write!(file, "{:08b}\n", val.iorder as u8)?; for byte in val.val_c.to_le_bytes().iter() { write!(file, "{:08b}\n", byte)?; } pos += 3 } RRMOVQ | ADDQ | SUBQ | MULQ | DIVQ | ANDQ | ORQ | XORQ | OUT => { write!(file, "{:08b}\n", val.iorder as u8)?; write!(file, "{:04b}{:04b}\n", val.r_a, val.r_b)?; pos += 2 } IRMOVQ | MRMOVQ | RMMOVQ => { write!(file, "{:08b}\n", val.iorder as u8)?; write!(file, "{:04b}{:04b}\n", val.r_a, val.r_b)?; for byte in val.val_c.to_le_bytes().iter() { write!(file, "{:08b}\n", byte)?; } pos += 4 } CONST => { for byte in val.val_c.to_le_bytes().iter() { write!(file, "{:08b}\n", byte)?; } pos += 2 } } } while pos < 512 { write!(file, "{:08b}\n", IOrder::HALT as u8)?; pos += 1; } Ok(()) }
pub mod exec_nix_store; pub mod parsing; use super::tree::{Path, Tree, TreePathMap}; use std::path::PathBuf; use std::str::FromStr; /// This corresponds to a nix store path. /// /// ``` /// use nix_query_tree_viewer::nix_query_tree::NixQueryDrv; /// /// let nix_query_drv = /// NixQueryDrv::from("/nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10"); /// ``` #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct NixQueryDrv(PathBuf); impl<T: ?Sized + AsRef<std::ffi::OsStr>> From<&T> for NixQueryDrv { fn from(s: &T) -> NixQueryDrv { NixQueryDrv(PathBuf::from(s.as_ref().to_os_string())) } } impl std::ops::Deref for NixQueryDrv { type Target = std::path::Path; fn deref(&self) -> &std::path::Path { &self.0 } } impl NixQueryDrv { pub fn cmp_hash(&self, other: &Self) -> std::cmp::Ordering { self.0.cmp(&other.0) } pub fn cmp_drv_name(&self, other: &Self) -> std::cmp::Ordering { self.drv_name().cmp(&other.drv_name()) } /// Pull out the hash and derivation name from a `NixQueryDrv` /// /// ``` /// use nix_query_tree_viewer::nix_query_tree::NixQueryDrv; /// /// let nix_query_drv = /// NixQueryDrv::from("/nix/store/az4kl5slhbkmmy4vj98z3hzxxkan7zza-gnugrep-3.3"); /// assert_eq!( /// nix_query_drv.hash_and_drv_name(), /// String::from("az4kl5slhbkmmy4vj98z3hzxxkan7zza-gnugrep-3.3") /// ); /// ``` pub fn hash_and_drv_name(&self) -> String { let drv_str = self.0.to_string_lossy(); String::from((drv_str).trim_start_matches("/nix/store/")) } /// Pull out a truncated hash and derivation name from a `NixQueryDrv` /// /// ``` /// use nix_query_tree_viewer::nix_query_tree::NixQueryDrv; /// /// let nix_query_drv = /// NixQueryDrv::from("/nix/store/az4kl5slhbkmmy4vj98z3hzxxkan7zza-gnugrep-3.3"); /// assert_eq!( /// nix_query_drv.short_hash_and_drv_name(), /// String::from("az4kl5s..gnugrep-3.3") /// ); /// ``` pub fn short_hash_and_drv_name(&self) -> String { let drv_str = self.0.to_string_lossy(); let drv_str_no_store = String::from(drv_str.trim_start_matches("/nix/store/")); let option_drv_name = drv_str_no_store .find('-') .and_then(|i| drv_str_no_store.get(i + 1..)); let option_short_hash = drv_str_no_store.get(0..7); match (option_drv_name, option_short_hash) { (Some(drv_name), Some(short_hash)) => { format!("{}..{}", short_hash, drv_name) } _ => panic!("Ill-formed nix path"), } } /// Pull out a derivation name from a `NixQueryDrv`. /// /// ``` /// use nix_query_tree_viewer::nix_query_tree::NixQueryDrv; /// /// let nix_query_drv = /// NixQueryDrv::from("/nix/store/az4kl5slhbkmmy4vj98z3hzxxkan7zza-gnugrep-3.3"); /// assert_eq!(nix_query_drv.drv_name(), String::from("gnugrep-3.3")); /// ``` /// /// * Panics /// /// This panics if the derivation name doesn't have a `-` in it. All nix derivations have /// a `-` in them after the hash. pub fn drv_name(&self) -> String { let drv_str = self.0.to_string_lossy(); let option_dash_index = drv_str.find('-'); match option_dash_index { None => drv_str.into_owned(), Some(dash_index) => { let option_just_drv_name = drv_str.get(dash_index + 1..); match option_just_drv_name { None => { panic!("Nix paths will always have a dash in them.") } Some(drv_name) => drv_name.to_string(), } } } } } impl FromStr for NixQueryDrv { // This should really be never. type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(s.into()) } } impl std::fmt::Display for NixQueryDrv { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.0.to_string_lossy()) } } /// Whether or not there is a separate entry in this tree that recurses into the dependencies for /// this nix store entry. /// /// See `NixQueryEntry`. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum Recurse { Yes, No, } /// `NixQueryDrv` coupled with a marker for a recursive entry. /// /// ``` /// use nix_query_tree_viewer::nix_query_tree::{NixQueryEntry, Recurse}; /// use std::str::FromStr; /// /// let nix_query_entry = /// NixQueryEntry::from_str("/nix/store/az4kl5slhbkmmy4vj98z3hzxxkan7zza-gnugrep-3.3 [...]"); /// let actual_nix_query_entry = /// NixQueryEntry::new("/nix/store/az4kl5slhbkmmy4vj98z3hzxxkan7zza-gnugrep-3.3", Recurse::Yes); /// assert_eq!(nix_query_entry, Ok(actual_nix_query_entry)); /// ``` /// #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct NixQueryEntry(pub NixQueryDrv, pub Recurse); impl FromStr for NixQueryEntry { type Err = nom::Err<(String, nom::error::ErrorKind)>; fn from_str(s: &str) -> Result<Self, Self::Err> { parsing::nix_query_entry_parser(s).map_err(|err| err.to_owned()) } } impl std::ops::Deref for NixQueryEntry { type Target = std::path::Path; fn deref(&self) -> &std::path::Path { &self.0 } } impl NixQueryEntry { pub fn new<T>(nix_query_drv: &T, recurse: Recurse) -> NixQueryEntry where T: ?Sized + AsRef<std::ffi::OsStr>, { NixQueryEntry(NixQueryDrv::from(nix_query_drv), recurse) } pub fn cmp_hash(&self, other: &Self) -> std::cmp::Ordering { self.0.cmp_hash(&other.0) } pub fn cmp_drv_name(&self, other: &Self) -> std::cmp::Ordering { self.0.cmp_drv_name(&other.0) } pub fn hash_and_drv_name(&self) -> String { self.0.hash_and_drv_name() } pub fn short_hash_and_drv_name(&self) -> String { self.0.short_hash_and_drv_name() } pub fn drv_name(&self) -> String { self.0.drv_name() } } /// A `Tree` representing the result from `nix store --query --tree`. /// /// ``` /// use indoc::indoc; /// use nix_query_tree_viewer::nix_query_tree::NixQueryTree; /// use std::str::FromStr; /// /// let raw_tree = indoc!( /// "/nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10 /// +---/nix/store/pnd2kl27sag76h23wa5kl95a76n3k9i3-glibc-2.27 /// | +---/nix/store/pnd2kl27sag76h23wa5kl95a76n3k9i3-glibc-2.27 [...] /// +---/nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10 [...] /// " /// ); /// let nix_query_tree = NixQueryTree::from_str(raw_tree); /// /// assert!(nix_query_tree.is_ok()); /// ``` #[derive(Clone, Debug, Eq, PartialEq)] pub struct NixQueryTree(pub Tree<NixQueryEntry>); impl NixQueryTree { pub fn path_map(&self) -> NixQueryPathMap { let tree: &Tree<NixQueryEntry> = &self.0; let tree_path_map = tree.path_map_map(&|nix_query_entry| nix_query_entry.0.clone()); NixQueryPathMap(tree_path_map) } pub fn lookup(&self, path: Path) -> Option<&NixQueryEntry> { self.0.lookup(path) } } impl FromStr for NixQueryTree { type Err = nom::Err<(String, nom::error::ErrorKind)>; fn from_str(s: &str) -> Result<Self, Self::Err> { parsing::nix_query_tree_parser(s).map_err(|err| err.to_owned()) } } /// A mapping of `NixQueryDrv` to `TreePath`. This gives an easy way to /// figure out where a `NixQueryDrv` is an a `NixQueryTree`. /// /// ``` /// use indoc::indoc; /// use nix_query_tree_viewer::nix_query_tree::{NixQueryDrv, NixQueryTree}; /// use nix_query_tree_viewer::tree::Path; /// use std::str::FromStr; /// /// let raw_tree = indoc!( /// "/nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10 /// +---/nix/store/pnd2kl27sag76h23wa5kl95a76n3k9i3-glibc-2.27 /// | +---/nix/store/pnd2kl27sag76h23wa5kl95a76n3k9i3-glibc-2.27 [...] /// +---/nix/store/qy93dp4a3rqyn2mz63fbxjg228hffwyw-hello-2.10 [...] /// +---/nix/store/9ny6szla9dg61jv8q22qbnqsz37465n0-multiple-outputs.sh /// +---/nix/store/pnd2kl27sag76h23wa5kl95a76n3k9i3-glibc-2.27 [...] /// +---/nix/store/5wvmvcc3b7sisirx1vsqbqdis0sd1x5d-cc-wrapper.sh /// +---/nix/store/5jzbjvnrz85n454inlyxcpgap9i6k6la-pcre-8.43 /// " /// ); /// let nix_query_tree = NixQueryTree::from_str(raw_tree).unwrap(); /// let map = nix_query_tree.path_map(); /// let pcre_drv = NixQueryDrv::from("/nix/store/5jzbjvnrz85n454inlyxcpgap9i6k6la-pcre-8.43"); /// let expected_path = Some(Path::from(vec![2, 0, 1])); /// /// assert_eq!(map.lookup_first(&pcre_drv), expected_path.as_ref()); /// ``` #[derive(Clone, Debug, Eq, PartialEq)] pub struct NixQueryPathMap(pub TreePathMap<NixQueryDrv>); impl NixQueryPathMap { pub fn lookup_first(&self, k: &NixQueryDrv) -> Option<&Path> { self.0.lookup_first(k) } }
use anyhow::{bail, Result}; use std::net::{SocketAddr, ToSocketAddrs}; use structopt::StructOpt; use tokio::{ io, net::{TcpListener, TcpStream}, }; use tokio_stream::wrappers::TcpListenerStream; use tracing::debug; mod broadcast_stream; mod connection; mod coordinator; use connection::tokio_connection; use coordinator::Coordinator; #[derive(Debug, StructOpt)] struct Opt { #[structopt(short = "l", long = "listen")] listen: bool, #[structopt(min_values = 1, max_values = 2)] args: Vec<String>, } async fn connect(addr: &SocketAddr, coordinator: &mut Coordinator) -> Result<()> { let stream = TcpStream::connect(addr).await?; let (read, write) = io::split(stream); tokio::spawn(coordinator.add_connection(tokio_connection::new_tokio_connection(read, write))); Ok(()) } async fn listen(addr: &SocketAddr, coordinator: &mut Coordinator) -> Result<()> { let listener = TcpListener::bind(addr).await?; tokio::spawn( coordinator.add_connection(tokio_connection::new_spawner_connection( TcpListenerStream::new(listener), )), ); Ok(()) } async fn start_console(coordinator: &mut Coordinator) -> Result<()> { let stdout = io::stdout(); let stdin = io::stdin(); tokio::spawn(coordinator.add_connection(tokio_connection::new_tokio_connection(stdin, stdout))); Ok(()) } fn parse_options() -> Result<(bool, SocketAddr)> { let opt = Opt::from_args(); tracing_subscriber::fmt::init(); let (address, port): (&str, &str) = if opt.listen { match opt.args.as_slice() { [port] => ("0.0.0.0", port), [address, port] => (address, port), _ => bail!("Invalid number of arguments"), } } else { match opt.args.as_slice() { [address, port] => (address, port), _ => bail!("Invalid number of arguments"), } }; let port: u16 = port.parse()?; let addr = (address, port).to_socket_addrs()?.next().unwrap(); debug!("address={:?} port={:?} addr={:?}", address, port, addr); Ok((opt.listen, addr)) } async fn run_main() -> Result<()> { let (listen_mode, addr) = parse_options()?; let mut coordinator = Coordinator::new(); if listen_mode { listen(&addr, &mut coordinator).await?; } else { connect(&addr, &mut coordinator).await?; } start_console(&mut coordinator).await?; coordinator.run().await; Ok(()) } #[tokio::main] async fn main() { run_main().await.expect("Error in main"); }
use core::ptr; use crate::RcCounter; /// The metadata stored inside the vector (in-line). #[repr(C)] pub struct RcMeta<TCounter: RcCounter> { /// The pointer to the vector. pub vec_ptr: *const u8, /// the capacity of the vector. pub capacity: usize, /// the reference counter. pub counter: TCounter, } impl<TCounter: RcCounter> RcMeta<TCounter> { pub fn initial(counter: TCounter) -> Self { Self { vec_ptr: ptr::null(), capacity: 0, counter, } } /// extracts the embedded vector. #[inline] pub unsafe fn extract_vec(&self, len: usize) -> Vec<u8> { Vec::from_raw_parts(self.vec_ptr as *mut u8, len, self.capacity) } }
//! Transport from Dummy Source to Arrow2 Destination. use crate::destinations::arrow2::{Arrow2Destination, Arrow2DestinationError, Arrow2TypeSystem}; use crate::sources::dummy::{DummySource, DummyTypeSystem}; use crate::typesystem::TypeConversion; use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc}; use thiserror::Error; /// Convert Dummy data types to Arrow2 data types. pub struct DummyArrow2Transport; #[derive(Error, Debug)] pub enum DummyArrow2TransportError { #[error(transparent)] Destination(#[from] Arrow2DestinationError), #[error(transparent)] ConnectorX(#[from] crate::errors::ConnectorXError), } impl_transport!( name = DummyArrow2Transport, error = DummyArrow2TransportError, systems = DummyTypeSystem => Arrow2TypeSystem, route = DummySource => Arrow2Destination, mappings = { { F64[f64] => Float64[f64] | conversion auto} { I64[i64] => Int64[i64] | conversion auto} { Bool[bool] => Boolean[bool] | conversion auto} { String[String] => LargeUtf8[String] | conversion auto} { DateTime[DateTime<Utc>] => Date64[NaiveDateTime] | conversion option} } ); impl TypeConversion<DateTime<Utc>, NaiveDateTime> for DummyArrow2Transport { fn convert(val: DateTime<Utc>) -> NaiveDateTime { NaiveDateTime::from_timestamp(val.timestamp(), val.timestamp_subsec_nanos()) } } impl TypeConversion<NaiveDateTime, DateTime<Utc>> for DummyArrow2Transport { fn convert(val: NaiveDateTime) -> DateTime<Utc> { DateTime::from_utc(val, Utc) } } impl TypeConversion<NaiveDate, DateTime<Utc>> for DummyArrow2Transport { fn convert(val: NaiveDate) -> DateTime<Utc> { DateTime::from_utc(val.and_hms(0, 0, 0), Utc) } }
use std::io::BufReader; use std::io::prelude::*; use std::fs::File; use std::collections::HashSet; fn main() { let data = read_file(); let passes: HashSet<usize> = data.iter() .map(|string| seat_id(parse_boarding_pass(string.to_string()))) .collect::<HashSet<usize>>(); let min = passes.iter().min().unwrap(); let max = passes.iter().max().unwrap(); println!("Day 5 Phase 1. Max Boarding Pass = {}", max); for possible_seat_id in *min+1..*max-1 { if !passes.contains(&possible_seat_id) { println!("Day 5 Phase 2. Your Seat ID = {}", possible_seat_id); } } } // BP = [lower_row, upper_row, lower_column, upper_column] fn parse_boarding_pass(string: String) -> [usize; 4] { string.chars().fold([0, 127, 0, 7], |val, c| { match c { 'F' => [val[0], half(val[0], val[1]), val[2], val[3]], 'B' => [half(val[0], val[1]) + 1, val[1], val[2], val[3]], 'R' => [val[0], val[1], half(val[2], val[3]) + 1, val[3]], 'L' => [val[0], val[1], val[2], half(val[2], val[3])], _ => panic!("Whaaaat {}", c) } }) } fn half(lower: usize, upper: usize) -> usize { (upper + 1 - lower) / 2 + lower - 1 } fn seat_id(boarding_pass: [usize; 4]) -> usize { return boarding_pass[0] * 8 + boarding_pass[2]; } fn read_file() -> Vec<String> { let file = BufReader::new(File::open("day_5/src/day_5_input.txt").unwrap()); file.lines().map(|l| l.unwrap()).collect() }
use crate::lib::config::get_config_dfx_dir_path; use crate::lib::error::DfxResult; use anyhow::bail; use atty::Stream; use std::fs::File; use std::include_str; use std::path::PathBuf; pub fn get_telemetry_config_root() -> DfxResult<PathBuf> { let root = get_config_dfx_dir_path()?.join("telemetry"); if !root.exists() { if std::fs::create_dir_all(&root).is_err() { bail!( "Cannot create telemetry config directory at '{}'.", root.display(), ); } } else if !root.is_dir() { bail!( "Cannot find telemetry config directory at '{}'.", root.display(), ); } Ok(root) } pub fn witness_telemetry_consent() -> DfxResult<()> { if atty::is(Stream::Stderr) { let file = get_telemetry_config_root()?.join("witness.blank"); if !file.exists() { eprintln!("\n{}", include_str!("../../../assets/consent.txt")); if File::create(&file).is_err() { bail!( "Cannot create telemetry consent witness file at '{}'.", file.display(), ); } } else if !file.is_file() { bail!( "Cannot find telemetry consent witness file at '{}'.", file.display(), ); } } Ok(()) }
use std::collections::HashMap; use std::collections::HashSet; use std::fmt; use crate::ticket::Ticket; pub struct Rule { pub targets : Vec<String>, pub sources : Vec<String>, pub command : Vec<String>, } /* When a rule is first parsed, it goes into this struct, the targets, sources and command are simply parsed into vecs. This is before the topological-sort step which puts the data into a list of Nodes and creates Nodes for sources that are not listed as targest of rules. */ impl Rule { fn new( mut targets : Vec<String>, mut sources : Vec<String>, command : Vec<String>) -> Rule { targets.sort(); sources.sort(); Rule { targets: targets, sources: sources, command: command } } } /* Once the rules are topologically sorted, the data in them gets put into this struct. Instead of storing each source as a path, this stores indices indicating which other node has the source as a target. Node also carries an optional Ticket. If the Node came from a rule, that's the hash of the rule itself (not file content). */ #[derive(Debug)] pub struct Node { pub targets: Vec<String>, pub source_indices: Vec<(usize, usize)>, pub command : Vec<String>, pub rule_ticket : Option<Ticket>, } impl fmt::Display for Rule { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for t in self.targets.iter() { write!(f, "{}\n", t).unwrap(); } write!(f, ":\n").unwrap(); for t in self.sources.iter() { write!(f, "{}\n", t).unwrap(); } write!(f, ":\n").unwrap(); for t in self.command.iter() { write!(f, "{}\n", t).unwrap(); } write!(f, ":\n") } } impl fmt::Display for Node { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self.rule_ticket { Some(ticket) => { write!(f, "\n").unwrap(); for t in self.targets.iter() { write!(f, "{}\n", t).unwrap(); } write!(f, "{}\n\n", ticket).unwrap(); }, None => { for t in self.targets.iter() { write!(f, "{}\n", t).unwrap(); } } } write!(f, "") } } struct EndpointPair { start : usize, end : usize, } /* Iterates through the given str, returns an EndpointPair indicating the line content (without the newline character itself) */ fn get_line_endpoints(content : &str) -> Vec<EndpointPair> { let mut endpoints = Vec::new(); let mut last_i = 0usize; let mut current_i = 0usize; for (i, c) in content.chars().enumerate() { current_i = i; match c { '\n' => { endpoints.push(EndpointPair{ start:last_i, end:current_i, }); last_i = current_i+1; }, _ => {}, } } current_i += 1; if current_i > 1 && last_i < current_i { endpoints.push(EndpointPair{ start:last_i, end:current_i, }); } endpoints } /* Takes a String and a vector of EndpointPairs. Consumes both inputs and outputs a vector of Strings split off from the input String at the indices indicated by the endpoints. */ fn split_along_endpoints( mut content : String, mut endpoints : Vec<EndpointPair>) -> Vec<String> { let mut result = Vec::new(); let mut total = 0usize; for p in endpoints.drain(..) { let mut chunk = content.split_off(p.start - total); content = chunk.split_off(p.end - p.start); chunk.shrink_to_fit(); total = p.end; result.push(chunk); } result } #[derive(Debug)] pub enum ParseError { UnexpectedEmptyLine(String, usize), UnexpectedExtraColon(String, usize), UnexpectedContent(String, usize), UnexpectedEndOfFileMidTargets(String, usize), UnexpectedEndOfFileMidSources(String, usize), UnexpectedEndOfFileMidCommand(String, usize), } impl fmt::Display for ParseError { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match self { ParseError::UnexpectedEmptyLine(filename, line_number) => write!(formatter, "Unexpected empty line {}:{}", filename, line_number), ParseError::UnexpectedExtraColon(filename, line_number) => write!(formatter, "Unexpected extra ':' on line {}:{}", filename, line_number), ParseError::UnexpectedContent(filename, line_number) => write!(formatter, "Unexpected content on line {}:{}", filename, line_number), ParseError::UnexpectedEndOfFileMidTargets(filename, line_number) => write!(formatter, "Unexpected end of file mid-targets line {}:{}", filename, line_number), ParseError::UnexpectedEndOfFileMidSources(filename, line_number) => write!(formatter, "Unexpected end of file mid-sources line {}:{}", filename, line_number), ParseError::UnexpectedEndOfFileMidCommand(filename, line_number) => write!(formatter, "Unexpected end of file mid-command line {}:{}", filename, line_number), } } } /* Takes a vector of string-pairs representing (filename, content). Parses each file's contents as rules and returns one big vector full of Rule objects. If the aprsing of any one file presents an error, this function returns the ParseError object for the first error, and does not bother parsing the rest. */ pub fn parse_all(mut contents : Vec<(String, String)>) -> Result<Vec<Rule>, ParseError> { let mut result : Vec<Rule> = vec![]; for (filename, content) in contents.drain(..) { let single_parse_result = parse(filename, content)?; result.extend(single_parse_result); } Ok(result) } /* Reads in a .rules file content as a String, and creates a vector of Rule objects. */ pub fn parse(filename : String, content : String) -> Result<Vec<Rule>, ParseError> { enum Mode { Targets, Sources, Command, NewLine } let mut rules = Vec::new(); let mut targets = vec![]; let mut sources = vec![]; let mut command = vec![]; let mut mode = Mode::Targets; let mut line_number = 1; let endpoints = get_line_endpoints(&content); for line in split_along_endpoints(content, endpoints).drain(..) { match mode { Mode::Targets => { match line.as_ref() { "" => return Err(ParseError::UnexpectedEmptyLine(filename, line_number)), ":" => mode = Mode::Sources, _ => targets.push(line), } }, Mode::Sources => { match line.as_ref() { "" => return Err(ParseError::UnexpectedEmptyLine(filename, line_number)), ":" => mode = Mode::Command, _ => sources.push(line), } }, Mode::Command => { match line.as_ref() { "" => return Err(ParseError::UnexpectedEmptyLine(filename, line_number)), ":" => { mode = Mode::NewLine; rules.push(Rule::new(targets, sources, command)); targets = vec![]; sources = vec![]; command = vec![]; } _ => command.push(line), } }, Mode::NewLine => { match line.as_ref() { "" => mode = Mode::Targets, ":" => return Err(ParseError::UnexpectedExtraColon(filename, line_number)), _ => return Err(ParseError::UnexpectedContent(filename, line_number)), } }, } line_number += 1; } match mode { Mode::NewLine => return Ok(rules), Mode::Targets => return Err(ParseError::UnexpectedEndOfFileMidTargets(filename, line_number)), Mode::Sources => return Err(ParseError::UnexpectedEndOfFileMidSources(filename, line_number)), Mode::Command => return Err(ParseError::UnexpectedEndOfFileMidCommand(filename, line_number)), } } struct Frame { targets: Vec<String>, sources: Vec<String>, command: Vec<String>, rule_ticket: Option<Ticket>, index: usize, sub_index: usize, visited: bool, } impl Frame { fn from_source_and_index(source : &str, index : usize) -> Frame { Frame { targets: vec![source.to_string()], sources: vec![], command: vec![], rule_ticket: None, index: index, sub_index: 0, visited: true, } } fn from_rule_and_index(rule : Rule, index : usize) -> Frame { let rule_ticket = Ticket::from_strings( &rule.targets, &rule.sources, &rule.command); Frame { targets: rule.targets, sources: rule.sources, command: rule.command, rule_ticket: Some(rule_ticket), index: index, sub_index: 0, visited: false, } } } #[derive(Debug)] pub enum TopologicalSortError { TargetMissing(String), SelfDependentRule(String), CircularDependence(Vec<String>), TargetInMultipleRules(String), } impl fmt::Display for TopologicalSortError { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { match self { TopologicalSortError::TargetMissing(target) => write!(formatter, "Target missing from rules: {}", target), TopologicalSortError::SelfDependentRule(target) => write!(formatter, "Self-dependent target: {}", target), TopologicalSortError::CircularDependence(cycle) => { write!(formatter, "Circular dependence:\n")?; for t in cycle.iter() { write!(formatter, "{}\n", t)?; } Ok(()) }, TopologicalSortError::TargetInMultipleRules(target) => write!(formatter, "Target found in more than one rule: {}", target), } } } /* Consume Rules, and in their place, make Nodes. In each Node, leave 'source_indices' empty. Returns: frame_buffer: A vector of optional frames corresponding to original rules to_buffer_index: A map that tells us the index in frame_buffer of the ndoe that has the given string as a target, and also subindex, the index in that node's target list of the target in question */ fn rules_to_frame_buffer(mut rules : Vec<Rule>) -> Result< (Vec<Option<Frame>>, HashMap<String, (usize, usize)>), TopologicalSortError> { let mut frame_buffer : Vec<Option<Frame>> = Vec::new(); let mut to_buffer_index : HashMap<String, (usize, usize)> = HashMap::new(); let mut current_buffer_index = 0usize; for mut rule in rules.drain(..) { rule.targets.sort(); rule.sources.sort(); for (sub_index, target) in rule.targets.iter().enumerate() { let t_string = target.to_string(); match to_buffer_index.get(&t_string) { Some(_) => return Err(TopologicalSortError::TargetInMultipleRules(t_string)), None => to_buffer_index.insert(t_string, (current_buffer_index, sub_index)), }; } frame_buffer.push(Some(Frame::from_rule_and_index(rule, current_buffer_index))); current_buffer_index += 1; } Ok((frame_buffer, to_buffer_index)) } struct TopologicalSortMachine { /* The "buffer" referred to by variable-names here is the buffer of frames (frame_buffer) */ frame_buffer : Vec<Option<Frame>>, /* Sends the target name to a pair of indices: - index of the rule in which it's a target - index of the target in the rule's target list */ to_buffer_index : HashMap<String, (usize, usize)>, /* Keeps track of the next index to insert into frame_buffer with */ current_buffer_index : usize, /* Recall frame_buffer is a vector of options. That's so that the frames can be taken from frame_buffer and added to frames_in_order */ frames_in_order : Vec<Frame>, /* This maps index in frame_buffer to index in frames_in_order */ index_bijection : HashMap<usize, usize>, } /* Holds the state of the topological sort, so that we can either sort from one origin, or continue sorting until all rules have been visited. */ impl TopologicalSortMachine { pub fn new( frame_buffer : Vec<Option<Frame>>, to_buffer_index : HashMap<String, (usize, usize)> ) -> TopologicalSortMachine { let frame_buffer_length = frame_buffer.len(); TopologicalSortMachine { frame_buffer : frame_buffer, to_buffer_index : to_buffer_index, current_buffer_index : frame_buffer_length, frames_in_order : vec![], index_bijection : HashMap::new(), } } /* Originates a topological sort DFS from the frame indicated by the given index, noting the given sub_index as the location of the goal-target in that frame's target list. */ pub fn sort_once(&mut self, index : usize, sub_index : usize) -> Result<(), TopologicalSortError> { let starting_frame = match self.frame_buffer[index].take() { Some(mut frame) => { frame.sub_index = sub_index; frame }, None => { /* Assume we're in the middle of a build-all operation, and we've already handle this rule. */ return Ok(()); }, }; let mut indices_in_stack = HashSet::new(); indices_in_stack.insert(index); let mut stack = vec![starting_frame]; /* Depth-first traversal using 'stack' */ while let Some(frame) = stack.pop() { indices_in_stack.remove(&frame.index); if frame.visited { self.index_bijection.insert(frame.index, self.frames_in_order.len()); self.frames_in_order.push(frame); } else { let mut reverser = vec![]; for source in frame.sources.iter() { match self.to_buffer_index.get(source) { Some((buffer_index, sub_index)) => { if let Some(mut frame) = self.frame_buffer[*buffer_index].take() { frame.sub_index = *sub_index; reverser.push(frame); } else { if frame.index == *buffer_index { return Err(TopologicalSortError::SelfDependentRule( frame.targets[*sub_index].clone())); } /* Look for a cycle by checking the stack for another instance of the node we're currently on */ if indices_in_stack.contains(buffer_index) { let mut target_cycle = vec![]; for f in stack.iter() { target_cycle.push(f.targets[f.sub_index].clone()); } target_cycle.push(frame.targets[frame.sub_index].clone()); return Err(TopologicalSortError::CircularDependence(target_cycle)); } } }, None => { self.index_bijection.insert(self.current_buffer_index, self.frames_in_order.len()); self.frames_in_order.push(Frame::from_source_and_index(source, self.current_buffer_index)); self.frame_buffer.push(None); self.to_buffer_index.insert(source.to_string(), (self.current_buffer_index, 0)); self.current_buffer_index += 1; }, } } stack.push( Frame { targets: frame.targets, sources: frame.sources, command: frame.command, rule_ticket: frame.rule_ticket, index: frame.index, sub_index: frame.sub_index, visited: true } ); indices_in_stack.insert(frame.index); while let Some(f) = reverser.pop() { indices_in_stack.insert(f.index); stack.push(f); } } } Ok(()) } /* Remap the sources of all the nodes to indices in the new result vector. */ pub fn get_result(mut self) -> Result<Vec<Node>, TopologicalSortError> { let mut result = vec![]; for mut frame in self.frames_in_order.drain(..) { let mut source_indices = vec![]; for source in frame.sources.drain(..) { let (buffer_index, sub_index) = self.to_buffer_index.get(&source).unwrap(); source_indices.push((*self.index_bijection.get(buffer_index).unwrap(), *sub_index)); } result.push( Node { targets: frame.targets, source_indices: source_indices, command: frame.command, rule_ticket: frame.rule_ticket, } ); } Ok(result) } } /* Takes a vector of Rules and goal_target, goal target is the target in whose rule the search originates. Rules contain enough information to establish a dependence tree. This function searches that tree to create a sorted list of another type: Node. Leaves (sources which are not also listed as targets) become Nodes with a non-existant RuleInfo and an empty list of sources. */ pub fn topological_sort( rules : Vec<Rule>, goal_target : &str) -> Result<Vec<Node>, TopologicalSortError> { /* Convert Rules to Frames. Frame has some extra eleements that facilitate the topological sort. */ match rules_to_frame_buffer(rules) { Err(error) => { /* If two rules have the same target, we wind up here. */ return Err(error); }, Ok((frame_buffer, to_buffer_index)) => { let (index, sub_index) = match to_buffer_index.get(goal_target) { Some((index, sub_index)) => { (*index, *sub_index) }, None => return Err(TopologicalSortError::TargetMissing(goal_target.to_string())), }; let mut machine = TopologicalSortMachine::new(frame_buffer, to_buffer_index); machine.sort_once(index, sub_index)?; return machine.get_result(); } } } /* For building all targets. This function calls rules_to_frame_buffer to generate frames for the rules, then iterates through all the frames */ pub fn topological_sort_all( rules : Vec<Rule>) -> Result<Vec<Node>, TopologicalSortError> { /* Convert Rules to Frames. Frame has some extra eleements that facilitate the topological sort. */ match rules_to_frame_buffer(rules) { Err(error) => { /* If two rules have the same target, we wind up here. */ return Err(error); }, Ok((frame_buffer, to_buffer_index)) => { let frame_buffer_len = frame_buffer.len(); let mut machine = TopologicalSortMachine::new(frame_buffer, to_buffer_index); for index in 0..frame_buffer_len { machine.sort_once(index, 0)?; } return machine.get_result(); } } } #[cfg(test)] mod tests { use crate::rule:: { Rule, rules_to_frame_buffer, topological_sort, topological_sort_all, TopologicalSortError, EndpointPair, split_along_endpoints, parse, parse_all, ParseError, get_line_endpoints, }; /* Use the Rule constructor with some vectors of strings, and check that the strings end up in the right place. */ #[test] fn rules_are_rules() { let rulefile = "abc".to_string(); let r = Rule { targets : vec![rulefile[0..1].to_string()], sources : vec![rulefile[1..2].to_string()], command : vec![rulefile[2..3].to_string()], }; assert_eq!(r.targets[0], "a"); assert_eq!(r.sources[0], "b"); assert_eq!(r.command[0], "c"); } /* Call rules_to_frame_buffer with an empty vector, make sure we get an empty frame_buffer and an empty map. */ #[test] fn rules_to_frame_buffer_empty_to_empty() { match rules_to_frame_buffer(vec![]) { Ok((frame_buffer, to_frame_buffer_index)) => { assert_eq!(frame_buffer.len(), 0); assert_eq!(to_frame_buffer_index.len(), 0); }, Err(_) => panic!("Error on empty vector"), } } /* Call rules_to_frame_buffer with a vector with just one rule in it, one rule with a A couple sources a couple targets and a command. */ #[test] fn rules_to_frame_buffer_one_to_one() { match rules_to_frame_buffer( vec![ Rule { targets: vec!["plant".to_string(), "tangerine".to_string()], sources: vec!["seed".to_string(), "soil".to_string()], command: vec!["water every day".to_string()], }, ] ) { Ok((frame_buffer, to_frame_buffer_index)) => { /* There should be one frame, and pairs in the map: plant -> (0, 0) tangerine -> (0, 1) */ assert_eq!(frame_buffer.len(), 1); assert_eq!(to_frame_buffer_index.len(), 2); assert_eq!(*to_frame_buffer_index.get("plant").unwrap(), (0usize, 0usize)); assert_eq!(*to_frame_buffer_index.get("tangerine").unwrap(), (0usize, 1usize)); /* to_frame_buffer_index maps a target to a pair of indices: the index of the node and the index of the target in the node. */ let (node_index, target_index) = to_frame_buffer_index.get("plant").unwrap(); assert_eq!(*node_index, 0usize); /* Check that there's a node at that index with the right target */ match &frame_buffer[*node_index] { Some(frame) => assert_eq!(frame.targets[*target_index], "plant"), None => panic!("Expected some node with target 'plant' found None"), } /* to_frame_buffer_index maps a target to a pair of indices: the index of the node and the index of the target in the node. */ let (node_index, target_index) = to_frame_buffer_index.get("tangerine").unwrap(); assert_eq!(*node_index, 0usize); /* Check that there's a node at that index with the right target */ match &frame_buffer[*node_index] { Some(frame) => assert_eq!(frame.targets[*target_index], "tangerine"), None => panic!("Expected some node with target 'tangerine' found None"), } /* Get the frame (at index 0), and check that the sources and command are what was set above. */ match &frame_buffer[*node_index] { Some(frame) => { assert_eq!(frame.targets[*target_index], "tangerine"); assert_eq!(frame.sources[0], "seed"); assert_eq!(frame.sources[1], "soil"); match frame.command.first() { Some(command) => { assert_eq!(command, "water every day"); }, None => panic!("Expected some command found None"), } } None => panic!("Expected some node found None"), } assert_eq!(*to_frame_buffer_index.get("tangerine").unwrap(), (0usize, 1usize)); }, Err(_) => panic!("Error on legit rules"), } } #[test] fn rules_to_frame_buffer_two_to_two() { match rules_to_frame_buffer( vec![ Rule { targets: vec!["fruit".to_string()], sources: vec!["plant".to_string()], command: vec!["pick occasionally".to_string()], }, Rule { targets: vec!["plant".to_string()], sources: vec!["soil".to_string(), "seed".to_string()], command: vec!["water every day".to_string()], }, ] ) { Ok((frame_buffer, to_frame_buffer_index)) => { assert_eq!(frame_buffer.len(), 2); assert_eq!(to_frame_buffer_index.len(), 2); assert_eq!(*to_frame_buffer_index.get("fruit").unwrap(), (0usize, 0usize)); assert_eq!(*to_frame_buffer_index.get("plant").unwrap(), (1usize, 0usize)); }, Err(_) => panic!("Error on legit rules"), } } /* Create a list of rules where two rules list the same target. Try to call rules_to_frame_buffer, and check that an error-result is returned reporting the redundant target */ #[test] fn rules_to_frame_buffer_redundancy_error() { match rules_to_frame_buffer( vec![ Rule { targets: vec!["fruit".to_string()], sources: vec!["plant".to_string()], command: vec!["pick occasionally".to_string()], }, Rule { targets: vec!["plant".to_string(), "fruit".to_string()], sources: vec!["soil".to_string(), "seed".to_string()], command: vec!["water every day".to_string()], }, ] ) { Ok(_) => { panic!("Unexpected success on rules with redundant targets"); }, Err(error) => { match error { TopologicalSortError::TargetInMultipleRules(target) => assert_eq!(target, "fruit"), _ => panic!("Unexpected error type when multiple fruit expected") } } } } /* Topological sort the empty set of rules, but with a goal-target. That should error. */ #[test] fn topological_sort_empty_is_error() { match topological_sort(vec![], "prune") { Ok(_) => { panic!("Enexpected success on topological sort of empty"); }, Err(error) => { match error { TopologicalSortError::TargetMissing(target) => assert_eq!(target, "prune"), _ => panic!("Expected target missing prune, got another type of error") } }, } } /* Topological sort all of an empty set of rules, check that the result is empty. */ #[test] fn topological_sort_all_empty_is_empty() { match topological_sort_all(vec![]) { Ok(result) => { assert_eq!(result.len(), 0); }, Err(error) => { panic!("Expected success topological sorting empty vector of rules, got {}", error); }, } } /* Topological sort a list of one rule only. Check the result contains a frame with just that one rule's data. */ #[test] fn topological_sort_one_rule() { match topological_sort( vec![ Rule { targets: vec!["plant".to_string()], sources: vec![], command: vec![], }, ], "plant") { Ok(nodes) => { assert_eq!(nodes.len(), 1); assert_eq!(nodes[0].targets[0], "plant"); } Err(error) => panic!("Expected success, got: {}", error), } } /* Topological sort a list of one rule only. Check the result contains a frame with just that one rule's data. */ #[test] fn topological_sort_all_one_rule() { match topological_sort_all( vec![ Rule { targets: vec!["plant".to_string()], sources: vec![], command: vec![], }, ]) { Ok(nodes) => { assert_eq!(nodes.len(), 1); assert_eq!(nodes[0].targets[0], "plant"); } Err(error) => panic!("Expected success, got: {}", error), } } /* Topological sort a list of two rules only, one depends on the other as a source, but the order in the given list is backwards. Check that the topological sort reverses the order. */ #[test] fn topological_sort_two_rules() { match topological_sort( vec![ Rule { targets: vec!["fruit".to_string()], sources: vec!["plant".to_string()], command: vec!["pick occasionally".to_string()], }, Rule { targets: vec!["plant".to_string()], sources: vec![], command: vec![], }, ], "fruit") { Ok(nodes) => { assert_eq!(nodes.len(), 2); assert_eq!(nodes[0].targets[0], "plant"); assert_eq!(nodes[1].targets[0], "fruit"); } Err(error) => panic!("Expected success, got: {}", error), } } /* Topological sort all of a list of two rules only, one depends on the other as a source, but the order in the given list is backwards. Check that the topological sort reverses the order. */ #[test] fn topological_sort_all_two_rules() { match topological_sort_all( vec![ Rule { targets: vec!["fruit".to_string()], sources: vec!["plant".to_string()], command: vec!["pick occasionally".to_string()], }, Rule { targets: vec!["plant".to_string()], sources: vec![], command: vec![], }, ]) { Ok(nodes) => { assert_eq!(nodes.len(), 2); assert_eq!(nodes[0].targets[0], "plant"); assert_eq!(nodes[1].targets[0], "fruit"); } Err(error) => panic!("Expected success, got: {}", error), } } /* Topological sort a DAG that is not a tree. Four nodes math, physics, graphics, game physics and graphics both depend on math, and game depends on physics and graphics. */ #[test] fn topological_sort_four_rules_diamond_already_in_order() { match topological_sort( vec![ Rule { targets: vec!["math".to_string()], sources: vec![], command: vec![], }, Rule { targets: vec!["physics".to_string()], sources: vec!["math".to_string()], command: vec!["build physics".to_string()], }, Rule { targets: vec!["graphics".to_string()], sources: vec!["math".to_string()], command: vec!["build graphics".to_string()], }, Rule { targets: vec!["game".to_string()], sources: vec!["graphics".to_string(), "physics".to_string()], command: vec!["build game".to_string()], }, ], "game") { Ok(v) => { assert_eq!(v.len(), 4); assert_eq!(v[0].targets[0], "math"); assert_eq!(v[1].targets[0], "graphics"); assert_eq!(v[2].targets[0], "physics"); assert_eq!(v[3].targets[0], "game"); assert_eq!(v[0].source_indices.len(), 0); assert_eq!(v[1].source_indices.len(), 1); assert_eq!(v[1].source_indices[0], (0, 0)); assert_eq!(v[2].source_indices.len(), 1); assert_eq!(v[2].source_indices[0], (0, 0)); assert_eq!(v[3].source_indices.len(), 2); assert_eq!(v[3].source_indices[0], (1, 0)); assert_eq!(v[3].source_indices[1], (2, 0)); } Err(why) => panic!("Expected success, got: {}", why), } } /* Topological sort a DAG that is not a tree. Four nodes math, physics, graphics, game physics and graphics both depend on math, and game depends on physics and graphics. This is the same test as above, except the given vector is in the wrong order. The result should be the same as the above. Part of this is to test well-definedness of order. */ #[test] fn topological_sort_four_rules_diamond_scrambled() { match topological_sort( vec![ Rule { targets: vec!["graphics".to_string()], sources: vec!["math".to_string()], command: vec!["build graphics".to_string()], }, Rule { targets: vec!["physics".to_string()], sources: vec!["math".to_string()], command: vec!["build physics".to_string()], }, Rule { targets: vec!["math".to_string()], sources: vec![], command: vec![], }, Rule { targets: vec!["game".to_string()], sources: vec!["physics".to_string(), "graphics".to_string()], command: vec!["build game".to_string()], }, ], "game") { Ok(v) => { assert_eq!(v.len(), 4); assert_eq!(v[0].targets[0], "math"); assert_eq!(v[1].targets[0], "graphics"); assert_eq!(v[2].targets[0], "physics"); assert_eq!(v[3].targets[0], "game"); assert_eq!(v[0].source_indices.len(), 0); assert_eq!(v[1].source_indices.len(), 1); assert_eq!(v[1].source_indices[0], (0, 0)); assert_eq!(v[2].source_indices.len(), 1); assert_eq!(v[2].source_indices[0], (0, 0)); assert_eq!(v[3].source_indices.len(), 2); assert_eq!(v[3].source_indices[0], (1, 0)); assert_eq!(v[3].source_indices[1], (2, 0)); } Err(why) => panic!("Expected success, got: {}", why), } } /* Topological sort all rules in a DAG that is not a tree. Four nodes math, physics, graphics, game physics and graphics both depend on math, and game depends on physics and graphics. This is the same test as above, except the given vector is in the wrong order. The result should be the same as the above. Part of this is to test well-definedness of order. */ #[test] fn topological_sort_all_four_rules_diamond_scrambled() { match topological_sort_all( vec![ Rule { targets: vec!["graphics".to_string()], sources: vec!["math".to_string()], command: vec!["build graphics".to_string()], }, Rule { targets: vec!["physics".to_string()], sources: vec!["math".to_string()], command: vec!["build physics".to_string()], }, Rule { targets: vec!["math".to_string()], sources: vec![], command: vec![], }, Rule { targets: vec!["game".to_string()], sources: vec!["physics".to_string(), "graphics".to_string()], command: vec!["build game".to_string()], }, ]) { Ok(v) => { assert_eq!(v.len(), 4); assert_eq!(v[0].targets[0], "math"); assert_eq!(v[1].targets[0], "graphics"); assert_eq!(v[2].targets[0], "physics"); assert_eq!(v[3].targets[0], "game"); assert_eq!(v[0].source_indices.len(), 0); assert_eq!(v[1].source_indices.len(), 1); assert_eq!(v[1].source_indices[0], (0, 0)); assert_eq!(v[2].source_indices.len(), 1); assert_eq!(v[2].source_indices[0], (0, 0)); assert_eq!(v[3].source_indices.len(), 2); assert_eq!(v[3].source_indices[0], (1, 0)); assert_eq!(v[3].source_indices[1], (2, 0)); } Err(why) => panic!("Expected success, got: {}", why), } } /* Topological sort a poetry example. This has two intermediate build results that share a source file. It's a bit like the diamond, except the shared source is not a rule, just a file in the file system, and there are other source-files, too. The topologial sort should not only put the nodes in order, but also create nodes for the source files not specifically represented as rules. */ #[test] fn topological_sort_poem() { match topological_sort( vec![ Rule { targets: vec!["stanza1".to_string()], sources: vec!["chorus".to_string(), "verse1".to_string()], command: vec!["poemcat verse1 chorus".to_string()], }, Rule { targets: vec!["stanza2".to_string()], sources: vec!["chorus".to_string(), "verse2".to_string()], command: vec!["poemcat verse2 chorus".to_string()], }, Rule { targets: vec!["poem".to_string()], sources: vec!["stanza1".to_string(), "stanza2".to_string()], command: vec!["poemcat stanza1 stanza2".to_string()], }, ], "poem") { Ok(v) => { assert_eq!(v.len(), 6); assert_eq!(v[0].targets[0], "chorus"); assert_eq!(v[1].targets[0], "verse1"); assert_eq!(v[2].targets[0], "stanza1"); assert_eq!(v[3].targets[0], "verse2"); assert_eq!(v[4].targets[0], "stanza2"); assert_eq!(v[5].targets[0], "poem"); assert_eq!(v[0].source_indices.len(), 0); assert_eq!(v[1].source_indices.len(), 0); assert_eq!(v[3].source_indices.len(), 0); assert_eq!(v[2].source_indices, [(0, 0), (1, 0)]); assert_eq!(v[4].source_indices, [(0, 0), (3, 0)]); assert_eq!(v[5].source_indices, [(2, 0), (4, 0)]); } Err(why) => panic!("Expected success, got: {}", why), } } /* Topological sort a poetry example. This test is just like the one above but with the given list of rules in a different order. The result should be the same. */ #[test] fn topological_sort_poem_scrambled() { match topological_sort( vec![ Rule { targets: vec!["poem".to_string()], sources: vec!["stanza1".to_string(), "stanza2".to_string()], command: vec!["poemcat stanza1 stanza2".to_string()], }, Rule { targets: vec!["stanza2".to_string()], sources: vec!["verse2".to_string(), "chorus".to_string()], command: vec!["poemcat verse2 chorus".to_string()], }, Rule { targets: vec!["stanza1".to_string()], sources: vec!["verse1".to_string(), "chorus".to_string()], command: vec!["poemcat verse1 chorus".to_string()], }, ], "poem") { Ok(v) => { assert_eq!(v.len(), 6); assert_eq!(v[0].targets[0], "chorus"); assert_eq!(v[1].targets[0], "verse1"); assert_eq!(v[2].targets[0], "stanza1"); assert_eq!(v[3].targets[0], "verse2"); assert_eq!(v[4].targets[0], "stanza2"); assert_eq!(v[5].targets[0], "poem"); assert_eq!(v[0].source_indices.len(), 0); assert_eq!(v[1].source_indices.len(), 0); assert_eq!(v[3].source_indices.len(), 0); assert_eq!(v[2].source_indices, [(0, 0), (1, 0)]); assert_eq!(v[4].source_indices, [(0, 0), (3, 0)]); assert_eq!(v[5].source_indices, [(2, 0), (4, 0)]); } Err(why) => panic!("Expected success, got: {}", why), } } /* Topological sort a poetry example. This test is just like the one above but with the given list of rules in a different order. The result should be the same. */ #[test] fn topological_sort_all_poem_scrambled() { match topological_sort_all( vec![ Rule { targets: vec!["poem".to_string()], sources: vec!["stanza1".to_string(), "stanza2".to_string()], command: vec!["poemcat stanza1 stanza2".to_string()], }, Rule { targets: vec!["stanza2".to_string()], sources: vec!["verse2".to_string(), "chorus".to_string()], command: vec!["poemcat verse2 chorus".to_string()], }, Rule { targets: vec!["stanza1".to_string()], sources: vec!["verse1".to_string(), "chorus".to_string()], command: vec!["poemcat verse1 chorus".to_string()], }, ]) { Ok(v) => { assert_eq!(v.len(), 6); assert_eq!(v[0].targets[0], "chorus"); assert_eq!(v[1].targets[0], "verse1"); assert_eq!(v[2].targets[0], "stanza1"); assert_eq!(v[3].targets[0], "verse2"); assert_eq!(v[4].targets[0], "stanza2"); assert_eq!(v[5].targets[0], "poem"); assert_eq!(v[0].source_indices.len(), 0); assert_eq!(v[1].source_indices.len(), 0); assert_eq!(v[3].source_indices.len(), 0); assert_eq!(v[2].source_indices, [(0, 0), (1, 0)]); assert_eq!(v[4].source_indices, [(0, 0), (3, 0)]); assert_eq!(v[5].source_indices, [(2, 0), (4, 0)]); } Err(why) => panic!("Expected success, got: {}", why), } } /* Topological sort a poetry example. This test is just like the one above but with the given list of rules in a different order. The result should be the same. */ #[test] fn topological_sort_all_disconnected_graph() { match topological_sort_all( vec![ Rule { targets: vec!["poem".to_string()], sources: vec!["stanza1".to_string(), "stanza2".to_string()], command: vec!["poemcat stanza1 stanza2".to_string()], }, Rule { targets: vec!["stanza2".to_string()], sources: vec!["verse2".to_string(), "chorus".to_string()], command: vec!["poemcat verse2 chorus".to_string()], }, Rule { targets: vec!["stanza1".to_string()], sources: vec!["verse1".to_string(), "chorus".to_string()], command: vec!["poemcat verse1 chorus".to_string()], }, ]) { Ok(v) => { assert_eq!(v.len(), 6); assert_eq!(v[0].targets[0], "chorus"); assert_eq!(v[1].targets[0], "verse1"); assert_eq!(v[2].targets[0], "stanza1"); assert_eq!(v[3].targets[0], "verse2"); assert_eq!(v[4].targets[0], "stanza2"); assert_eq!(v[5].targets[0], "poem"); assert_eq!(v[0].source_indices.len(), 0); assert_eq!(v[1].source_indices.len(), 0); assert_eq!(v[3].source_indices.len(), 0); assert_eq!(v[2].source_indices, [(0, 0), (1, 0)]); assert_eq!(v[4].source_indices, [(0, 0), (3, 0)]); assert_eq!(v[5].source_indices, [(2, 0), (4, 0)]); } Err(why) => panic!("Expected success, got: {}", why), } } /* Topological sort a dependence graph with a cycle in it. Check that the error returned points to the cycle. */ #[test] fn topological_sort_circular() { match topological_sort( vec![ Rule { targets: vec!["Quine".to_string(), "SomethingElse".to_string()], sources: vec!["Hofstadter".to_string()], command: vec!["poemcat Hofstadter".to_string()], }, Rule { targets: vec!["AnotherThing".to_string(), "Hofstadter".to_string()], sources: vec!["Quine".to_string()], command: vec!["poemcat Quine".to_string()], }, ], "Quine") { Ok(_) => panic!("Unexpected success topologically sorting with a circular dependence"), Err(error) => { match error { TopologicalSortError::CircularDependence(cycle) => { assert_eq!(cycle[0], "Quine"); assert_eq!(cycle[1], "Hofstadter"); }, _ => panic!("Expected circular dependence, got another type of error") } }, } } /* Make a Rule that depends on /itself/ as a source. Try to topologial sort, expect the error to reflect the self-dependence */ #[test] fn topological_sort_self_reference() { match topological_sort( vec![ Rule { targets: vec!["Hofstadter".to_string()], sources: vec!["Hofstadter".to_string()], command: vec!["poemcat Hofstadter".to_string()], }, ], "Hofstadter") { Ok(_) => panic!("Unexpected success topologically sorting with a self-dependent rule"), Err(error) => { match error { TopologicalSortError::SelfDependentRule(target) => assert_eq!(target, "Hofstadter"), _ => panic!("Expected self-dependent rule, got another type of error") } }, } } /* Create a rule with a few sources that don't exist as targets of other rules. Perform a topological sort and check that the sources are created as nodes. */ #[test] fn topological_sort_make_nodes_for_sources() { match topological_sort( vec![ Rule { targets: vec!["fruit".to_string()], sources: vec!["plant".to_string()], command: vec!["pick occasionally".to_string()], }, Rule { targets: vec!["plant".to_string()], sources: vec![ "seed".to_string(), "soil".to_string(), "sunlight".to_string(), "water".to_string(), ], command: vec!["take care of plant".to_string()], }, ], "fruit") { Ok(v) => { assert_eq!(v.len(), 6); assert_eq!(v[0].targets[0], "seed"); assert_eq!(v[1].targets[0], "soil"); assert_eq!(v[2].targets[0], "sunlight"); assert_eq!(v[3].targets[0], "water"); assert_eq!(v[4].targets[0], "plant"); assert_eq!(v[5].targets[0], "fruit"); } Err(why) => panic!("Expected success, got: {}", why), } } /* Check the function split_along_endpoints returns an empty list when given the empty string. */ #[test] fn split_along_endpoints_empty() { let v = split_along_endpoints("".to_string(), vec![]); assert_eq!(v.len(), 0); } /* Call split_along_endpoints with a string and endpoints that cut a prefix off the string, Check that the prefix is returned. */ #[test] fn split_along_endpoints_one() { let v = split_along_endpoints("apples".to_string(), vec![ EndpointPair { start: 0usize, end: 3usize, } ] ); assert_eq!(v.len(), 1); assert_eq!(v[0], "app"); } /* Call split_along_endpoints with two words and endpoints that cut the string into the two words, Check that two words are returned as separate strings. */ #[test] fn split_along_endpoints_two() { let v = split_along_endpoints("applesbananas".to_string(), vec![ EndpointPair { start: 0usize, end: 6usize, }, EndpointPair { start: 6usize, end: 13usize, }, ] ); assert_eq!(v.len(), 2); assert_eq!(v[0], "apples"); assert_eq!(v[1], "bananas"); } /* Call split_along_endpoints with two words with junk interspersed and endpoints that separate out the two words, Check that two words are returned as separate strings. */ #[test] fn split_along_endpoints_two_padding() { let v = split_along_endpoints("123apples012bananas".to_string(), vec![ EndpointPair { start: 3usize, end: 9usize, }, EndpointPair { start: 12usize, end: 19usize, }, ] ); assert_eq!(v.len(), 2); assert_eq!(v[0], "apples"); assert_eq!(v[1], "bananas"); } /* Call get_line_endpoints on a string with no newlines. Check we get that string's endpoints in a vec */ #[test] fn get_line_endpoints_empty() { let v = get_line_endpoints("abcd"); assert_eq!(v.len(), 1); assert_eq!(v[0].start, 0); assert_eq!(v[0].end, 4); } /* Call get_line_endpoints on a string ending in a newline. Check that we get endpoints capturing the string upto but not including the newline. */ #[test] fn get_line_endpoints_one() { let v = get_line_endpoints("abcd\n"); assert_eq!(v.len(), 1); assert_eq!(v[0].start, 0); assert_eq!(v[0].end, 4); } /* Call get_line_endpoints on a string with newlines interspersed. Check that we get endpoints capturing the non-newline content. */ #[test] fn get_line_endpoints_two() { let v = get_line_endpoints("ab\ncd\n"); assert_eq!(v.len(), 2); assert_eq!(v[0].start, 0); assert_eq!(v[0].end, 2); assert_eq!(v[1].start, 3); assert_eq!(v[1].end, 5); } /* Call get_line_endpoints on a string with no newline at the end. Check that we get endpoints capturing the non-newline content. */ #[test] fn get_line_endpoints_no_newline_at_end() { let v = get_line_endpoints("ab\ncd"); assert_eq!(v.len(), 2); assert_eq!(v[0].start, 0); assert_eq!(v[0].end, 2); assert_eq!(v[1].start, 3); assert_eq!(v[1].end, 5); } /* Call get_line_endpoints on a string with newlines interspersed. Check that we get endpoints capturing the non-newline content. This time by extracting substrings using the endpoints */ #[test] fn get_line_endpoints_rule() { let s = "a\n:\nb\n:\nc\n:\n"; let v = get_line_endpoints(s); assert_eq!(v.len(), 6); assert_eq!(s[v[0].start..v[0].end], *"a"); assert_eq!(s[v[1].start..v[1].end], *":"); assert_eq!(s[v[2].start..v[2].end], *"b"); assert_eq!(s[v[3].start..v[3].end], *":"); assert_eq!(s[v[4].start..v[4].end], *"c"); assert_eq!(s[v[5].start..v[5].end], *":"); } /* Combine get_line_endpoints and split_along_endpoints to parse a properly formatted rule. */ #[test] fn split_along_endpoints_rule() { let text = "a\n:\nb\n:\nc\n:\n".to_string(); let endpoints = get_line_endpoints(&text); assert_eq!(endpoints.len(), 6); let v = split_along_endpoints(text, endpoints); assert_eq!(v.len(), 6); assert_eq!(v[0], "a"); assert_eq!(v[1], ":"); assert_eq!(v[2], "b"); assert_eq!(v[3], ":"); assert_eq!(v[4], "c"); assert_eq!(v[5], ":"); } /* Call parse on an empty string, check that the rule list is empty. */ #[test] fn parse_empty() { match parse("spool.rules".to_string(), "".to_string()) { Ok(_) => { panic!("Unexpected success when parsing empty string"); }, Err(error) => { match error { ParseError::UnexpectedEndOfFileMidTargets(filename, line_number) => { assert_eq!(filename, "spool.rules".to_string()); assert_eq!(line_number, 1); }, _=> panic!("Expected unexpected end of file mid-targets error"), } } }; } /* Call parse on a properly formatted rule, check that the targets, sources and command are what was in the text. */ #[test] fn parse_one() { match parse( "seven.rules".to_string(), "a\n:\nb\n:\nc\n:\n".to_string()) { Ok(v) => { assert_eq!(v.len(), 1); assert_eq!(v[0].targets, vec!["a".to_string()]); assert_eq!(v[0].sources, vec!["b".to_string()]); assert_eq!(v[0].command, vec!["c".to_string()]); }, Err(why) => panic!("Expected success, got: {}", why), }; } /* Call parse on twp properly formatted rules, check that the targets, sources and command are what was in the text. */ #[test] fn parse_two() { match parse( "paper.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\nf\n:\n".to_string()) { Ok(v) => { assert_eq!(v.len(), 2); assert_eq!(v[0].targets, vec!["a".to_string()]); assert_eq!(v[0].sources, vec!["b".to_string()]); assert_eq!(v[0].command, vec!["c".to_string()]); assert_eq!(v[1].targets, vec!["d".to_string()]); assert_eq!(v[1].sources, vec!["e".to_string()]); assert_eq!(v[1].command, vec!["f".to_string()]); }, Err(why) => panic!("Expected success, got: {}", why), }; } #[test] fn parse_all_empty() { match parse_all( vec![]) { Ok(v) => { assert_eq!(v.len(), 0); }, Err(why) => panic!("Expected success, got: {}", why), }; } #[test] fn parse_all_one() { match parse_all(vec![("rulesfile1".to_string(), "a\n:\nb\n:\nc\n:\n".to_string())]) { Ok(v) => { assert_eq!(v.len(), 1); assert_eq!(v[0].targets, vec!["a".to_string()]); assert_eq!(v[0].sources, vec!["b".to_string()]); assert_eq!(v[0].command, vec!["c".to_string()]); }, Err(why) => panic!("Expected success, got: {}", why), }; } #[test] fn parse_all_two() { match parse_all( vec![ ("rulesfile1".to_string(), "a\n:\nb\n:\nc\n:\n".to_string()), ("rulesfile2".to_string(), "d\n:\ne\n:\nf\n:\n".to_string()) ]) { Ok(v) => { assert_eq!(v.len(), 2); assert_eq!(v[0].targets, vec!["a".to_string()]); assert_eq!(v[0].sources, vec!["b".to_string()]); assert_eq!(v[0].command, vec!["c".to_string()]); assert_eq!(v[1].targets, vec!["d".to_string()]); assert_eq!(v[1].sources, vec!["e".to_string()]); assert_eq!(v[1].command, vec!["f".to_string()]); }, Err(why) => panic!("Expected success, got: {}", why), }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_extra_newline_error1() { match parse( "banana.rules".to_string(), "\na\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\nf\n:\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEmptyLine(filename, line_number) => { assert_eq!(filename, "banana.rules".to_string()); assert_eq!(line_number, 1); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_extra_newline_error2() { match parse( "fruit.rules".to_string(), "a\n:\nb\n\n:\nc\n:\n\nd\n:\ne\n:\nf\n:\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEmptyLine(filename, line_number) => { assert_eq!(filename, "fruit.rules".to_string()); assert_eq!(line_number, 4); } error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_extra_newline_error3() { match parse( "well.rules".to_string(), "a\n:\nb\n:\nc\n:\n\n\nd\n:\ne\n:\nf\n:\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEmptyLine(filename, line_number) => { assert_eq!(filename, "well.rules".to_string()); assert_eq!(line_number, 8); } error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_targets1() { match parse( "glass.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\nf\n:\n\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidTargets(filename, line_number) => { assert_eq!(filename, "glass.rules".to_string()); assert_eq!(line_number, 15); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_targets2() { match parse( "spider.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\nf\n:\n\nt".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidTargets(filename, line_number) => { assert_eq!(filename, "spider.rules".to_string()); assert_eq!(line_number, 16); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_targets3() { match parse( "movie.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\nf\n:\n\nt\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidTargets(filename, line_number) => { assert_eq!(filename, "movie.rules".to_string()); assert_eq!(line_number, 16); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_sources1() { match parse( "box.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidSources(filename, line_number) => { assert_eq!(filename, "box.rules".to_string()); assert_eq!(line_number, 10); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_sources2() { match parse( "house".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ns".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidSources(filename, line_number) => { assert_eq!(filename, "house".to_string()); assert_eq!(line_number, 11); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_sources3() { match parse( "pi.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ns\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidSources(filename, line_number) => { assert_eq!(filename, "pi.rules".to_string()); assert_eq!(line_number, 11); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_command1() { match parse( "green.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidCommand(filename, line_number) => { assert_eq!(filename, "green.rules".to_string()); assert_eq!(line_number, 12); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_command2() { match parse( "sunset.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\n".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidCommand(filename, line_number) => { assert_eq!(filename, "sunset.rules".to_string()); assert_eq!(line_number, 12); }, error => panic!("Unexpected {}", error), } } }; } /* Call parse on improperly formatted rules, check the error. */ #[test] fn parse_unexpected_eof_mid_command3() { match parse( "tape.rules".to_string(), "a\n:\nb\n:\nc\n:\n\nd\n:\ne\n:\nf".to_string()) { Ok(_) => panic!("Unexpected success"), Err(error) => { match error { ParseError::UnexpectedEndOfFileMidCommand(filename, line_number) => { assert_eq!(filename, "tape.rules".to_string()); assert_eq!(line_number, 13); }, error => panic!("Unexpected {}", error), } } }; } }
use crate::name_resolution::{DictEntry, TopLevelContext}; use crate::rustspec::*; use crate::typechecker::pure_carrier; use core::iter::IntoIterator; use heck::SnakeCase; use lazy_static::lazy_static; use pretty::RcDoc; use regex::Regex; use rustc_span::DUMMY_SP; use std::collections::HashMap; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Mutex; pub(crate) const SEQ_MODULE: &'static str = "seq"; pub(crate) const ARRAY_MODULE: &'static str = "array"; pub(crate) const NAT_MODULE: &'static str = "nat_mod"; static ID_COUNTER: AtomicUsize = AtomicUsize::new(0); pub(crate) fn fresh_codegen_id() -> usize { ID_COUNTER.fetch_add(1, Ordering::SeqCst) } lazy_static! { static ref ID_MAP: Mutex<HashMap<usize, usize>> = Mutex::new(HashMap::new()); } pub(crate) fn translate_ident<'a>(x: Ident) -> RcDoc<'a, ()> { match x { Ident::Unresolved(s) => translate_ident_str(s.clone()), Ident::TopLevel(s) => translate_toplevel_ident(s), Ident::Local(LocalIdent { id, name: s, .. }) => { let mut id_map = ID_MAP.lock().unwrap(); let codegen_id: usize = match id_map.get(&id) { Some(c_id) => *c_id, None => { let c_id = fresh_codegen_id(); id_map.insert(id, c_id); c_id } }; translate_ident_str(format!("{}_{}", s, codegen_id)) } } } pub(crate) fn translate_toplevel_ident<'a>(x: TopLevelIdent) -> RcDoc<'a, ()> { match x.kind { TopLevelIdentKind::Type => { match format!("{}", translate_ident_str(x.string).pretty(0)).as_str() { s @ "uint128" | s @ "uint64" | s @ "uint32" | s @ "uint16" | s @ "uint8" | s @ "int128" | s @ "int64" | s @ "int32" | s @ "int16" | s @ "int8" | s @ "byte_seq" | s @ "public_byte_seq" | s @ "option" | s @ "result" => RcDoc::as_string(s), s => RcDoc::as_string(format!("{}_t", s)), } } TopLevelIdentKind::Constant => translate_ident_str(format!("{}_v", x.string)), _ => translate_ident_str(x.string), } } pub(crate) fn translate_ident_str<'a>(ident_str: String) -> RcDoc<'a, ()> { let mut ident_str = ident_str.clone(); let secret_int_regex = Regex::new(r"(?P<prefix>(U|I))(?P<digits>\d{1,3})").unwrap(); ident_str = secret_int_regex .replace_all(&ident_str, r"${prefix}int${digits}") .to_string(); let secret_signed_int_fix = Regex::new(r"iint").unwrap(); ident_str = secret_signed_int_fix .replace_all(&ident_str, "int") .to_string(); let mut snake_case_ident = ident_str.to_snake_case(); if snake_case_ident == "new" { snake_case_ident = "new_".to_string(); } RcDoc::as_string(snake_case_ident) } pub(crate) fn make_uint_size_coercion<'a>(pat: RcDoc<'a, ()>) -> RcDoc<'a, ()> { RcDoc::as_string("Definition") .append(RcDoc::space()) .append(RcDoc::as_string("uint_size_in_")) .append(pat.clone()) .append(RcDoc::as_string("(n : uint_size) : ")) .append(pat.clone()) .append(RcDoc::space()) .append(RcDoc::as_string(":= int_in_nat_mod n.")) .append(RcDoc::line()) .append(RcDoc::as_string("Coercion ")) .append(RcDoc::as_string("uint_size_in_")) .append(pat.clone()) .append(RcDoc::as_string(" : uint_size >-> ")) .append(pat.clone()) .append(RcDoc::as_string(".")) } pub(crate) fn make_tuple<'a, I: IntoIterator<Item = RcDoc<'a, ()>>>(args: I) -> RcDoc<'a, ()> { let iter = args.into_iter(); match &iter.size_hint().1 { Some(0) => RcDoc::as_string("tt"), _ => RcDoc::as_string("(") .append( RcDoc::line_() .append(RcDoc::intersperse( iter, RcDoc::as_string(",").append(RcDoc::line()), )) .group() .nest(2), ) .append(RcDoc::line_()) .append(RcDoc::as_string(")")) .group(), } } pub(crate) fn make_list<'a, I: IntoIterator<Item = RcDoc<'a, ()>>>(args: I) -> RcDoc<'a, ()> { RcDoc::as_string("[") .append( RcDoc::line_() .append(RcDoc::intersperse( args.into_iter(), RcDoc::as_string(";").append(RcDoc::line()), )) .group() .nest(2), ) .append(RcDoc::line_()) .append(RcDoc::as_string("]")) .group() } pub(crate) fn make_typ_tuple<'a, I: IntoIterator<Item = RcDoc<'a, ()>>>(args: I) -> RcDoc<'a, ()> { RcDoc::as_string("(") .append( RcDoc::line_() .append(RcDoc::intersperse( args.into_iter(), RcDoc::space() .append(RcDoc::as_string("'×")) .append(RcDoc::line()), )) .group() .nest(2), ) .append(RcDoc::line_()) .append(RcDoc::as_string(")")) .group() } pub(crate) fn make_paren<'a>(e: RcDoc<'a, ()>) -> RcDoc<'a, ()> { RcDoc::as_string("(") .append(RcDoc::softline_().append(e).group().nest(2)) .append(RcDoc::as_string(")")) .group() } pub(crate) fn translate_binop<'a, 'b>( op_prefix: RcDoc<'a, ()>, op: BinOpKind, op_typ: &'b Typ, top_ctx: &'a TopLevelContext, ) -> RcDoc<'a, ()> { match (op, &(op_typ.1).0) { (_, BaseTyp::Named(ident, _)) => { let ident = &ident.0; match top_ctx.typ_dict.get(ident) { Some((inner_ty, entry)) => match entry { DictEntry::NaturalInteger => match op { BinOpKind::Add => return RcDoc::as_string("+%"), BinOpKind::Sub => return RcDoc::as_string("-%"), BinOpKind::Mul => return RcDoc::as_string("*%"), BinOpKind::Div => return RcDoc::as_string("/%"), BinOpKind::Rem => return RcDoc::as_string("rem"), // Rem, // And, // Or, BinOpKind::BitXor => return RcDoc::as_string("xor"), BinOpKind::BitOr => return RcDoc::as_string("or"), BinOpKind::BitAnd => return RcDoc::as_string("and"), // Shl, // Shr, BinOpKind::Eq => return RcDoc::as_string("=.?"), BinOpKind::Lt => return RcDoc::as_string("<.?"), BinOpKind::Le => return RcDoc::as_string("<=.?"), BinOpKind::Ne => return RcDoc::as_string("!=.?"), BinOpKind::Ge => return RcDoc::as_string(">=.?"), BinOpKind::Gt => return RcDoc::as_string(">.?"), _ => unimplemented!("{:?}", op), }, DictEntry::Enum | DictEntry::Array | DictEntry::Alias => { return translate_binop(op_prefix, op, inner_ty, top_ctx) } }, _ => (), // should not happen } } _ => (), }; match (op, &(op_typ.1).0) { (_, BaseTyp::Seq(inner_ty)) | (_, BaseTyp::Array(_, inner_ty)) => { let _inner_ty_op = translate_binop( op_prefix, op, &( (Borrowing::Consumed, inner_ty.1.clone()), inner_ty.as_ref().clone(), ), top_ctx, ); let op_str = match op { BinOpKind::Sub => "minus", BinOpKind::Add => "add", BinOpKind::Mul => "mul", BinOpKind::Div => "div", BinOpKind::BitXor => "xor", BinOpKind::BitOr => "or", BinOpKind::BitAnd => "and", BinOpKind::Eq => "eq", BinOpKind::Ne => "neq", _ => panic!("operator: {:?}", op), // should not happen }; RcDoc::as_string(format!( "{}_{}", match &(op_typ.1).0 { BaseTyp::Seq(_) => SEQ_MODULE, BaseTyp::Array(_, _) => ARRAY_MODULE, _ => panic!(), // should not happen }, op_str )) } (BinOpKind::Sub, BaseTyp::Usize) | (BinOpKind::Sub, BaseTyp::Isize) => { op_prefix.append(RcDoc::as_string("-")) } (BinOpKind::Add, BaseTyp::Usize) | (BinOpKind::Add, BaseTyp::Isize) => { op_prefix.append(RcDoc::as_string("+")) } (BinOpKind::Mul, BaseTyp::Usize) | (BinOpKind::Mul, BaseTyp::Isize) => { op_prefix.append(RcDoc::as_string("*")) } (BinOpKind::Div, BaseTyp::Usize) | (BinOpKind::Div, BaseTyp::Isize) => { op_prefix.append(RcDoc::as_string("/")) } (BinOpKind::Rem, BaseTyp::Usize) => { RcDoc::as_string(".%") } (BinOpKind::Rem, BaseTyp::Isize) => { RcDoc::as_string("%%") } (BinOpKind::Shl, BaseTyp::Usize) => RcDoc::as_string("usize_shift_left"), (BinOpKind::Shr, BaseTyp::Usize) => RcDoc::as_string("usize_shift_right"), (BinOpKind::Rem, _) => RcDoc::as_string(".%"), (BinOpKind::Sub, _) => RcDoc::as_string(".-"), (BinOpKind::Add, _) => RcDoc::as_string(".+"), (BinOpKind::Mul, _) => RcDoc::as_string(".*"), (BinOpKind::Div, _) => RcDoc::as_string("./"), (BinOpKind::BitXor, _) => RcDoc::as_string(".^"), (BinOpKind::BitAnd, _) => RcDoc::as_string(".&"), (BinOpKind::BitOr, _) => RcDoc::as_string(".|"), (BinOpKind::Shl, _) => RcDoc::as_string("shift_left"), (BinOpKind::Shr, _) => RcDoc::as_string("shift_right"), (BinOpKind::Lt, _) => RcDoc::as_string("<.?"), (BinOpKind::Le, _) => RcDoc::as_string("<=.?"), (BinOpKind::Ge, _) => RcDoc::as_string(">=.?"), (BinOpKind::Gt, _) => RcDoc::as_string(">.?"), (BinOpKind::Ne, _) => RcDoc::as_string("!=.?"), (BinOpKind::Eq, _) => RcDoc::as_string("=.?"), (BinOpKind::And, _) => RcDoc::as_string("&&"), (BinOpKind::Or, _) => RcDoc::as_string("||"), } } pub(crate) fn translate_unop<'a>(op: UnOpKind, _op_typ: Typ) -> RcDoc<'a, ()> { match (op, &(_op_typ.1).0) { (UnOpKind::Not, BaseTyp::Bool) => RcDoc::as_string("negb"), (UnOpKind::Not, _) => RcDoc::as_string("not"), (UnOpKind::Neg, _) => RcDoc::as_string("-"), } } pub(crate) fn array_or_seq<'a>(t: Typ, top_ctxt: &'a TopLevelContext) -> RcDoc<'a, ()> { match &(t.1).0 { BaseTyp::Seq(_) => RcDoc::as_string("seq"), BaseTyp::Named(id, None) => { let name = &id.0; match top_ctxt.typ_dict.get(name) { Some((new_t, dict_entry)) => match dict_entry { DictEntry::Alias => array_or_seq(new_t.clone(), top_ctxt), DictEntry::Enum => panic!("should not happen"), DictEntry::Array => { match &(new_t.1).0 { BaseTyp::Array(_, _) => RcDoc::as_string("array"), _ => panic!(), // shouldd not happen } } DictEntry::NaturalInteger => panic!("should not happen"), }, None => panic!("should not happen"), } } BaseTyp::Named(_, Some(_)) => panic!("should not happen"), BaseTyp::Array(_, _) => RcDoc::as_string("array"), _ => panic!("should not happen"), } } pub(crate) fn add_ok_if_result( stmt: Statement, carrier: Fillable<CarrierTyp>, ) -> Spanned<Statement> { ( match carrier { Some(ert) => // If b has an early return, then we must prefix the returned // mutated variables by Ok or Some { match stmt { Statement::ReturnExp(e, Some((x, t_base))) => { let carrier = match ert.clone() { CarrierTyp::Option(_) => CarrierTyp::Option(t_base.clone()), CarrierTyp::Result(_, b) => CarrierTyp::Result(t_base.clone(), b), }; let (e, _) = pure_carrier(carrier.clone(), (e.clone(), DUMMY_SP.into())); Statement::ReturnExp(e, Some((x, (carrier.clone().into(), t_base.1)))) } _ => panic!("should not happen"), } } _ => stmt.clone(), }, DUMMY_SP.into(), ) } #[derive(Debug)] pub(crate) enum FuncPrefix { Regular, Array(ArraySize, BaseTyp), Seq(BaseTyp), NatMod(String, usize), // Modulo value, number of bits for the encoding, } pub(crate) fn translate_prefix_for_func_name<'a>( prefix: BaseTyp, top_ctx: &'a TopLevelContext, ) -> (RcDoc<'a, ()>, FuncPrefix) { match prefix { BaseTyp::Bool => panic!(), // should not happen BaseTyp::UInt8 => (RcDoc::as_string("pub_uint8"), FuncPrefix::Regular), BaseTyp::Int8 => (RcDoc::as_string("pub_int8"), FuncPrefix::Regular), BaseTyp::UInt16 => (RcDoc::as_string("pub_uint16"), FuncPrefix::Regular), BaseTyp::Int16 => (RcDoc::as_string("pub_int16"), FuncPrefix::Regular), BaseTyp::UInt32 => (RcDoc::as_string("pub_uint32"), FuncPrefix::Regular), BaseTyp::Int32 => (RcDoc::as_string("pub_int32"), FuncPrefix::Regular), BaseTyp::UInt64 => (RcDoc::as_string("pub_uint64"), FuncPrefix::Regular), BaseTyp::Int64 => (RcDoc::as_string("pub_int64"), FuncPrefix::Regular), BaseTyp::UInt128 => (RcDoc::as_string("pub_uint128"), FuncPrefix::Regular), BaseTyp::Int128 => (RcDoc::as_string("pub_int128"), FuncPrefix::Regular), BaseTyp::Usize => (RcDoc::as_string("uint_size"), FuncPrefix::Regular), BaseTyp::Isize => (RcDoc::as_string("int_size"), FuncPrefix::Regular), BaseTyp::Str => (RcDoc::as_string("string"), FuncPrefix::Regular), BaseTyp::Enum(_cases, _type_args) => { panic!("Should not happen") } BaseTyp::Seq(inner_ty) => ( RcDoc::as_string(SEQ_MODULE), FuncPrefix::Seq(inner_ty.as_ref().0.clone()), ), BaseTyp::Array(size, inner_ty) => ( RcDoc::as_string(ARRAY_MODULE), FuncPrefix::Array(size.0.clone(), inner_ty.as_ref().0.clone()), ), BaseTyp::Named(ident, _) => { // if the type is an array, we should print the Seq module instead let name = &ident.0; match top_ctx.typ_dict.get(name) { Some((alias_typ, DictEntry::Array)) | Some((alias_typ, DictEntry::Alias)) | Some((alias_typ, DictEntry::NaturalInteger)) => { translate_prefix_for_func_name((alias_typ.1).0.clone(), top_ctx) } // TODO: doesn't work if the alias uses a definition from another library // Needs fixing in the frontend _ => ( translate_ident_str(name.string.clone()), FuncPrefix::Regular, ), } } BaseTyp::Variable(_) => panic!(), // shoult not happen BaseTyp::Tuple(_) => panic!(), // should not happen BaseTyp::NaturalInteger(_, modulo, bits) => ( RcDoc::as_string(NAT_MODULE), FuncPrefix::NatMod(modulo.0.clone(), bits.0.clone()), ), BaseTyp::Placeholder => panic!("Got unexpected type `Placeholder`: this should have been filled by during the typechecking phase."), } }
use std::cmp::PartialEq; pub mod band; pub mod linear; /// ScaleKind represents supported scales. #[derive(Debug, PartialEq)] pub enum ScaleKind { Band, Linear, } /// Scale represents an axis scale that is used in views and chart. pub trait Scale<T> { /// Scale the provided domain value for a scale range. fn scale(&self, domain: &T) -> f32; /// Get the list of ticks that represent the scale on an axis. fn ticks(&self) -> Vec<T>; /// Get the scale kind. fn kind(&self) -> ScaleKind; /// Get the scale bandwidth. fn bandwidth(&self) -> f32; /// Check if scale range is reversed. fn is_range_reversed(&self) -> bool; /// Get the offset for each tick. fn tick_offset(&self) -> f32; }
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// FormulaAndFunctionMetricQueryDefinition : A formula and functions metrics query. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FormulaAndFunctionMetricQueryDefinition { #[serde(rename = "aggregator", skip_serializing_if = "Option::is_none")] pub aggregator: Option<crate::models::FormulaAndFunctionMetricAggregation>, #[serde(rename = "data_source")] pub data_source: crate::models::FormulaAndFunctionMetricDataSource, /// Name of the query for use in formulas. #[serde(rename = "name")] pub name: String, /// Metrics query definition. #[serde(rename = "query")] pub query: String, } impl FormulaAndFunctionMetricQueryDefinition { /// A formula and functions metrics query. pub fn new(data_source: crate::models::FormulaAndFunctionMetricDataSource, name: String, query: String) -> FormulaAndFunctionMetricQueryDefinition { FormulaAndFunctionMetricQueryDefinition { aggregator: None, data_source, name, query, } } }
mod external_serialization; mod fpfs; mod serialization; mod tg; mod tg_tools; mod types; mod utils; pub use fpfs::Fpfs; pub use tg::TgConnection;
// These lines define a function in Rust. The main function is special: it's // the beginning of every Rust program. The first line says "I'm declaring a // function named main which takes no arguments and returns nothing." If there // were arguments, they would go inside (( and )), and because we aren't // returning anything from this function, we can omit the return type entirely. // // You'll also note that the function is wrapped in curly braces ({ and }). // Rust requires these around all function bodies. It is also considered good // style to put the opening curly brace on the same line as the function // declaration, with one space in between. fn main() { // First, Rust is indented with four spaces, not tabs. // // Second, println!() is a Rust macro, which is how metaprogramming is done // in Rust. If it were a function instead, it would look like this: // println(). // // Third, Rust is an 'expression oriented' language, which means that most // things are expressions, rather than statements. The ; is used to // indicate that this expression is over, and the next one is ready to // begin. Most lines of Rust code end with a ;. println!("Hello, world!"); }
#![allow(non_snake_case)] #[allow(unused_imports)] use std::io::{self, Write}; #[allow(unused_imports)] use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}; #[allow(unused_imports)] use std::cmp::{max, min, Ordering}; macro_rules! input { (source = $s:expr, $($r:tt)*) => { let mut iter = $s.split_whitespace(); let mut next = || { iter.next().unwrap() }; input_inner!{next, $($r)*} }; ($($r:tt)*) => { let stdin = std::io::stdin(); let mut bytes = std::io::Read::bytes(std::io::BufReader::new(stdin.lock())); let mut next = move || -> String{ bytes .by_ref() .map(|r|r.unwrap() as char) .skip_while(|c|c.is_whitespace()) .take_while(|c|!c.is_whitespace()) .collect() }; input_inner!{next, $($r)*} }; } macro_rules! input_inner { ($next:expr) => {}; ($next:expr, ) => {}; ($next:expr, $var:ident : $t:tt $($r:tt)*) => { let $var = read_value!($next, $t); input_inner!{$next $($r)*} }; } macro_rules! read_value { ($next:expr, ( $($t:tt),* )) => { ( $(read_value!($next, $t)),* ) }; ($next:expr, [ $t:tt ; $len:expr ]) => { (0..$len).map(|_| read_value!($next, $t)).collect::<Vec<_>>() }; ($next:expr, chars) => { read_value!($next, String).chars().collect::<Vec<char>>() }; ($next:expr, char) => { read_value!($next, String).chars().collect::<Vec<char>>()[0] }; ($next:expr, usize1) => { read_value!($next, usize) - 1 }; ($next:expr, isize1) => { read_value!($next, isize) - 1 }; ($next:expr, $t:ty) => { $next().parse::<$t>().expect("Parse error") }; } macro_rules! debug { ($($a:expr),*) => { println!(concat!($(stringify!($a), " = {:?}, "),*), $($a),*); } } #[allow(dead_code)] const MOD: usize = 1000000007; #[allow(dead_code)] fn to_num(c: char) -> i64 { c as i64 - 48 } fn main() { input!{ N: usize, s: chars, } let mut flag = false; let animals: Vec<bool> = s.into_iter().map(|c| if c == 'o' {false} else {true}).collect(); let mut ans: Vec<Vec<bool>> = vec![vec![false, false], vec![false, true], vec![true, false], vec![true, true]]; for res in ans.iter_mut() { for i in 1..N-1 { let prev = res[i-1]; let now = res[i]; //今見ているところの一つ前 ^ 現在のiから得た情報 で次が確定する. //現在のiの暫定が羊なら0で反転させず,狼なら反転 res.push(prev ^ animals[i] ^ now); } if res[N-2] ^ animals[N-1] ^ res[N-1] == res[0] && res[N-1] ^ animals[0] ^ res[0] == res[1] { flag = true; for &mut c in res.into_iter() { print!("{}", if c {"W"} else {"S"}); } println!(""); break; } } if !flag { println!("-1"); } }
use std::marker::PhantomData; use combine::primitives::{Parser, SourcePosition, Positioner}; use combine::primitives::{Info, ParseError, Consumed, Error}; use combine::primitives::Stream as StreamTrait; use super::{Stream, State, Result}; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum TokenType { Css, Html, Import, From, If, Elif, For, In, Of, Key, As, Else, Events, Link, Store, Let, New, And, Or, Not, Comma, // , Equals, // = Eq, // == NotEq, // != Greater, // > Less, // < GreaterEq, // >= LessEq, // <= Colon, // : Dot, // . Dash, // - Plus, // + Multiply, // * Divide, // / Question, // ? ArrowRight, // -> OpenParen, // ( OpenBracket, // [ OpenBrace, // { CloseParen, // ) CloseBracket, // ] CloseBrace, // } Ident, CssWord, Number, String, Newline, Indent, Dedent, Eof, } #[derive(PartialEq, Eq, Debug, Clone)] pub struct Token<'a>(pub TokenType, pub &'a str, pub SourcePosition); impl<'a> Positioner for Token<'a> { type Position = SourcePosition; fn start() -> SourcePosition { SourcePosition { line: 1, column: 1, } } fn update(&self, pos: &mut SourcePosition) { *pos = self.2; } } pub trait ParseToken { fn into_string(self) -> String; fn unescape(self) -> String; } impl<'a> ParseToken for Token<'a> { fn into_string(self) -> String { return String::from(self.1); } fn unescape(self) -> String { let slice = self.1; let quote = slice.chars().next().unwrap(); if quote != '"' && quote != '\'' { panic!("Only string tokens can be unescaped"); } let mut result = String::new(); let mut iter = slice[1..].chars(); loop { let ch = if let Some(ch) = iter.next() { ch } else { break; }; match ch { '\\' => { if let Some(ch) = iter.next() { match ch { 'x' => unimplemented!(), '\n' => unimplemented!(), 'r' => result.push('\r'), 'n' => result.push('\n'), 't' => result.push('\t'), _ => result.push(ch), } } else { panic!("Slash at end of line"); } } '"'|'\'' => { if quote == ch { break; } else { result.push(ch); } } _ => { result.push(ch); } } } assert!(iter.next().is_none()); return result; } } pub struct TokenParser<I> { token: TokenType, ph: PhantomData<I>, } impl TokenType { fn info(&self) -> Info<Token<'static>, Token<'static>> { match *self { TokenType::Css => Info::Borrowed("css NAME[(PARAMS..)]"), TokenType::Html => Info::Borrowed("html NAME[(PARAMS)]"), TokenType::OpenParen => Info::Borrowed("("), TokenType::OpenBracket => Info::Borrowed("["), TokenType::OpenBrace => Info::Borrowed("{"), TokenType::CloseParen => Info::Borrowed(")"), TokenType::CloseBracket => Info::Borrowed("]"), TokenType::CloseBrace => Info::Borrowed("}"), TokenType::Ident => Info::Borrowed("identifier"), TokenType::CssWord => Info::Borrowed("css word or identifier"), TokenType::Number => Info::Borrowed("number"), TokenType::String => Info::Borrowed("quoted string"), TokenType::Newline => Info::Borrowed("new line"), TokenType::Indent => Info::Borrowed("indentation"), TokenType::Dedent => Info::Borrowed("unindent"), TokenType::Comma => Info::Borrowed("comma"), TokenType::Equals => Info::Borrowed("equals (assignment)"), TokenType::Eq => Info::Borrowed("double equals"), TokenType::NotEq => Info::Borrowed("not equals"), TokenType::Greater => Info::Borrowed("greater"), TokenType::Less => Info::Borrowed("less"), TokenType::GreaterEq => Info::Borrowed("greater or equal"), TokenType::LessEq => Info::Borrowed("less or equal"), TokenType::Colon => Info::Borrowed("colon"), TokenType::Dot => Info::Borrowed("dot"), TokenType::Dash => Info::Borrowed("dash (i.e. minus)"), TokenType::Plus => Info::Borrowed("plus"), TokenType::Multiply => Info::Borrowed("multiply"), TokenType::Divide => Info::Borrowed("division"), TokenType::Question => Info::Borrowed("question mark"), TokenType::ArrowRight => Info::Borrowed("arrow right"), TokenType::Eof => Info::Borrowed("end of file"), TokenType::Import => Info::Borrowed("import"), TokenType::From => Info::Borrowed("from"), TokenType::If => Info::Borrowed("if"), TokenType::Elif => Info::Borrowed("elif"), TokenType::For => Info::Borrowed("for"), TokenType::In => Info::Borrowed("in"), TokenType::Of => Info::Borrowed("of"), TokenType::Key => Info::Borrowed("key"), TokenType::As => Info::Borrowed("as"), TokenType::Else => Info::Borrowed("else"), TokenType::Events => Info::Borrowed("events"), TokenType::Link => Info::Borrowed("link"), TokenType::Store => Info::Borrowed("store"), TokenType::Let => Info::Borrowed("let"), TokenType::New => Info::Borrowed("new"), TokenType::Not => Info::Borrowed("not"), TokenType::And => Info::Borrowed("and"), TokenType::Or => Info::Borrowed("or"), } } } impl<'a> Parser for TokenParser<Stream<'a>> { type Input = Stream<'a>; type Output = Token<'a>; fn parse_lazy<'x>(&mut self, input: State<'x>) -> Result<'x, Token<'x>> { match input.input.clone().uncons() { Ok((c, s)) => { if c.0 == self.token { input.update(c, s) } else { Err(Consumed::Empty(ParseError::empty(input.position))) } } Err(err) => Err(Consumed::Empty(ParseError::new(input.position, err))) } } fn add_error(&mut self, error: &mut ParseError<Stream<'a>>) { error.errors.push(Error::Expected(self.token.info())); } } pub fn lift<'a>(tok: TokenType) -> TokenParser<Stream<'a>> { return TokenParser { token: tok, ph: PhantomData }; }
$NetBSD: patch-vendor_termios-0.3.1_src_lib.rs,v 1.1 2020/10/17 13:39:45 maya Exp $ NetBSD support, already upstream in termios 0.3.2 --- ../vendor/termios-0.3.1/src/lib.rs.orig 2018-03-26 21:22:45.000000000 +0000 +++ ../vendor/termios-0.3.1/src/lib.rs @@ -99,6 +99,10 @@ //! cfsetspeed(termios, termios::os::openbsd::B921600) //! } //! +//! #[cfg(target_os = "netbsd")] +//! fn set_fastest_speed(termios: &mut Termios) -> io::Result<()> { +//! cfsetspeed(termios, termios::os::netbsd::B921600) +//! } //! #[cfg(target_os = "dragonfly")] //! fn set_fastest_speed(termios: &mut Termios) -> io::Result<()> { //! cfsetspeed(termios, termios::os::dragonfly::B230400)
use crate::parser::parse_error::DateTimeParseError; use crate::validation::time::{End, Start}; use chrono::prelude::*; pub fn start(string: &str) -> Result<Start, DateTimeParseError> { Ok(Start::new(string_to_local_date(string)?)) } pub fn end(string: &str) -> Result<End, DateTimeParseError> { Ok(End::new(string_to_local_date(string)?)) } fn string_to_local_date(string: &str) -> Result<DateTime<Local>, DateTimeParseError> { let naive_time = string .parse::<NaiveDateTime>() .map_err(|_| DateTimeParseError::NotConvertible)?; Ok(Local.from_local_datetime(&naive_time).unwrap()) }
struct Solution; use std::cmp::max; /// https://leetcode.com/problems/best-time-to-buy-and-sell-stock/ impl Solution { /// 12 ms 2.9 MB pub fn max_profit(prices: Vec<i32>) -> i32 { if prices.len() <= 1 { return 0; } let mut max_profit = 0; let mut min_price = *prices.get(0).unwrap(); for &price in &prices[1..] { if price < min_price { min_price = price; } else { max_profit = max(max_profit, price - min_price); } } max_profit } } #[cfg(test)] mod test { use super::*; use rstest::rstest; #[rstest(prices, expected, case(&[7], 0), case(&[1, 7], 6), case(&[7,1,5,3,6,4], 5), case(&[7,6,4,3,1], 0))] fn max_price(prices: &[i32], expected: i32) { assert_eq!(Solution::max_profit(prices.to_vec()), expected); } }
pub mod s3; pub mod web_server;
#![allow(dead_code)] mod instruction; mod opcode; use memory::Memory; use util::*; #[derive(RustcDecodable, RustcEncodable, Copy, Clone, Debug, Default)] pub struct Registers { r1: u16, r2: u16, r3: u16, r4: u16, r5: u16, sp: u16, pc: u16, cf: bool, of: bool, zf: bool, sf: bool, } impl Registers { pub fn new() -> Self { Registers { r1: 0, r2: 0, r3: 0, r4: 0, r5: 0, sp: 0, pc: 0, cf: false, of: false, zf: false, sf: false, } } pub fn read_register(&self, reg: u8) -> u16 { match reg { 0 => 0, 1 => self.r1, 2 => self.r2, 3 => self.r3, 4 => self.r4, 5 => self.r5, 6 => self.sp, 7 => self.pc, _ => unreachable!(), } } pub fn write_register(&mut self, reg: u8, val: u16) { match reg { 0 => {}, 1 => self.r1 = val, 2 => self.r2 = val, 3 => self.r3 = val, 4 => self.r4 = val, 5 => self.r5 = val, 6 => self.sp = val, 7 => self.pc = val, _ => unreachable!(), }; } } #[derive(RustcDecodable, RustcEncodable, Clone, Debug, Default)] pub struct Cpu { pub registers: Registers, pub memmap: ::memmap::MemMap, } impl Cpu { pub fn new(memmap: ::memmap::MemMap) -> Self { Cpu { registers: Registers::new(), memmap: memmap, } } pub fn exec_instruction(&mut self, ram: &mut ::memory::Ram) { } // Instruction Utilities fn push_u16(&mut self, val: u16) { self.registers.sp -= 2; self.memmap.write_u16(self.registers.sp, val); } fn push_u8(&mut self, val: u8) { self.registers.sp -= 1; self.memmap.write_u8(self.registers.sp, val); } fn pop_u16(&mut self) -> u16 { let val = self.memmap.read_u16(self.registers.sp); self.registers.sp += 2; val } fn pop_u8(&mut self) -> u8 { let val = self.memmap.read_u8(self.registers.sp); self.registers.sp += 1; val } fn read_instruction_lo(&self) -> u8 { self.memmap.read_u8(self.registers.pc + 1) } fn decode_registers(val: u8) -> (u8, u8) { (val >> 4, val & 0x0F) } fn read_instr_registers(&self) -> (u8, u8) { Cpu::decode_registers(self.read_instruction_lo()) } // Instructions /// Add unsigned fn add(&mut self) { let (src, dst) = self.read_instr_registers(); let a = self.registers.read_register(src); let b = self.registers.read_register(dst); let result = a.wrapping_add(b); self.registers.cf = result < a || result < b; self.registers.of = false; self.registers.zf = result == 0; self.registers.sf = false; self.registers.write_register(dst, result); self.registers.pc += 2; } /// Add signed fn adds(&mut self) { let (src, dst) = self.read_instr_registers(); let a = self.registers.read_register(src) as i16; let b = self.registers.read_register(dst) as i16; let result = a as i32 + b as i32; let wrapped_result = a.wrapping_add(b); self.registers.cf = result > ::std::i16::MAX as i32 || result < ::std::i16::MIN as i32; self.registers.of = a.is_negative() == b.is_negative() && a.is_negative() != wrapped_result.is_negative(); self.registers.zf = wrapped_result == 0; self.registers.sf = wrapped_result < 0; self.registers.write_register(dst, wrapped_result as u16); self.registers.pc += 2; } /// Subtract unsigned fn sub(&mut self) { self.registers.pc += 2; } /// Subtract signed fn subs(&mut self) { self.registers.pc += 2; } /// Load unsigned immediate high bits to r1 fn ldih(&mut self) { let val = self.read_instruction_lo(); self.registers.r1 = hi_bits_no_shift(self.registers.r1) | (val as u16); self.registers.pc += 2; } /// Load unsigned immediate low bits to r1 fn ldil(&mut self) { let val = self.read_instruction_lo(); self.registers.r1 = lo_bits(self.registers.r1) as u16 | ((val as u16) << 8); self.registers.pc += 2; } /// Unconditional jump to register address fn jmpr(&mut self) { let (_, reg) = self.read_instr_registers(); self.registers.pc = self.registers.read_register(reg); // TODO: RAM only } /// Unconditional jump relative fn jmp(&mut self) { let val = self.read_instruction_lo() as i8; if val > 0 { self.registers.pc += val as u16; } else if val < 0 { self.registers.pc -= (-val) as u16; } } }
use std::{fs, path::Path}; // fn read_input_lines<P, T>(path: P, transformer: fn(&str) -> T) -> std::io::Result<Vec<T>> where P: AsRef<Path>, { let mut input: Vec<T> = vec![]; let contents = fs::read_to_string(path).unwrap(); for line in contents.lines() { input.push(transformer(line)); } Ok(input) } // #[derive(Clone, Copy, Debug, Eq, PartialEq)] struct Position { x: usize, y: usize, } impl Position { fn new(x: usize, y: usize) -> Self { Self { x, y } } fn from_str(s: &str) -> Self { let c: Vec<&str> = s.split(',').collect(); Self { x: c[0].parse().unwrap(), y: c[1].parse().unwrap(), } } } #[test] fn position_from_str() { assert_eq!(Position::from_str("0,0"), Position { x: 0, y: 0 }); assert_eq!(Position::from_str("123,456"), Position { x: 123, y: 456 }); assert_eq!(Position::from_str("999,999"), Position { x: 999, y: 999 }); } // struct LightsIterator { from: Position, to: Position, x: usize, y: usize, } impl LightsIterator { fn new(from: Position, to: Position) -> Self { Self { from, to, x: from.x, y: from.y, } } } impl Iterator for LightsIterator { type Item = Position; fn next(&mut self) -> Option<Self::Item> { if self.y > self.to.y { return None; } let position = Position::new(self.x, self.y); self.x += 1; if self.x > self.to.x { self.x = self.from.x; self.y += 1; } Some(position) } } // #[derive(Debug, Eq, PartialEq)] enum Instruction { Toggle { from: Position, to: Position }, TurnOn { from: Position, to: Position }, TurnOff { from: Position, to: Position }, } impl Instruction { fn from_str(s: &str) -> Instruction { let c: Vec<&str> = s.split(' ').collect(); if c[0] == "toggle" { return Instruction::Toggle { from: Position::from_str(c[1]), to: Position::from_str(c[3]), }; } if c[1] == "on" { return Instruction::TurnOn { from: Position::from_str(c[2]), to: Position::from_str(c[4]), }; } if c[1] == "off" { return Instruction::TurnOff { from: Position::from_str(c[2]), to: Position::from_str(c[4]), }; } panic!("Unexpected input"); } /// Return an iterator that loops over all the Positions of lights affected by this instruction. fn lights(&self) -> LightsIterator { match self { Instruction::Toggle { from, to } => LightsIterator::new(*from, *to), Instruction::TurnOff { from, to } => LightsIterator::new(*from, *to), Instruction::TurnOn { from, to } => LightsIterator::new(*from, *to), } } } // struct Grid { lights: Vec<u32>, } impl Grid { fn new() -> Self { Self { lights: vec![0; 1_000_000], } } fn toggle(&mut self, position: Position) { let i = (position.y * 1000) + position.x; self.lights[i] ^= 1; } fn set(&mut self, position: Position) { let i = (position.y * 1000) + position.x; self.lights[i] = 1; } fn clear(&mut self, position: Position) { let i = (position.y * 1000) + position.x; self.lights[i] = 0; } fn inc(&mut self, position: Position) { let i = (position.y * 1000) + position.x; self.lights[i] += 1; } fn dec(&mut self, position: Position) { let i = (position.y * 1000) + position.x; if self.lights[i] != 0 { self.lights[i] -= 1; } } fn count(&self) -> usize { self.lights.iter().filter(|v| **v != 0).count() } fn total(&self) -> u32 { self.lights.iter().sum() } } // pub fn part1() -> usize { let mut grid = Grid::new(); for instruction in read_input_lines("input.txt", Instruction::from_str).unwrap() { match instruction { Instruction::Toggle { from: _, to: _ } => { instruction.lights().for_each(|p| grid.toggle(p)); } Instruction::TurnOff { from: _, to: _ } => { instruction.lights().for_each(|p| grid.clear(p)); } Instruction::TurnOn { from: _, to: _ } => { instruction.lights().for_each(|p| grid.set(p)); } } } grid.count() } pub fn part2() -> u32 { let mut grid = Grid::new(); for instruction in read_input_lines("input.txt", Instruction::from_str).unwrap() { match instruction { Instruction::Toggle { from: _, to: _ } => { for position in instruction.lights() { grid.inc(position); grid.inc(position); } } Instruction::TurnOff { from: _, to: _ } => { instruction.lights().for_each(|p| grid.dec(p)); } Instruction::TurnOn { from: _, to: _ } => { instruction.lights().for_each(|p| grid.inc(p)); } } } grid.total() } // #[cfg(test)] mod tests { #[test] fn test_part1() { assert_eq!(super::part1(), 543903); } #[test] fn test_part2() { assert_eq!(super::part2(), 14687245); } }
#[doc = "Reader of register RCC_APB4DIVR"] pub type R = crate::R<u32, super::RCC_APB4DIVR>; #[doc = "Writer for register RCC_APB4DIVR"] pub type W = crate::W<u32, super::RCC_APB4DIVR>; #[doc = "Register RCC_APB4DIVR `reset()`'s with value 0x8000_0000"] impl crate::ResetValue for super::RCC_APB4DIVR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x8000_0000 } } #[doc = "APB4DIV\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum APB4DIV_A { #[doc = "0: aclk (default after\r\n reset)"] B_0X0 = 0, #[doc = "1: aclk / 2"] B_0X1 = 1, #[doc = "2: aclk / 4"] B_0X2 = 2, #[doc = "3: aclk / 8"] B_0X3 = 3, } impl From<APB4DIV_A> for u8 { #[inline(always)] fn from(variant: APB4DIV_A) -> Self { variant as _ } } #[doc = "Reader of field `APB4DIV`"] pub type APB4DIV_R = crate::R<u8, APB4DIV_A>; impl APB4DIV_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, APB4DIV_A> { use crate::Variant::*; match self.bits { 0 => Val(APB4DIV_A::B_0X0), 1 => Val(APB4DIV_A::B_0X1), 2 => Val(APB4DIV_A::B_0X2), 3 => Val(APB4DIV_A::B_0X3), i => Res(i), } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == APB4DIV_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == APB4DIV_A::B_0X1 } #[doc = "Checks if the value of the field is `B_0X2`"] #[inline(always)] pub fn is_b_0x2(&self) -> bool { *self == APB4DIV_A::B_0X2 } #[doc = "Checks if the value of the field is `B_0X3`"] #[inline(always)] pub fn is_b_0x3(&self) -> bool { *self == APB4DIV_A::B_0X3 } } #[doc = "Write proxy for field `APB4DIV`"] pub struct APB4DIV_W<'a> { w: &'a mut W, } impl<'a> APB4DIV_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: APB4DIV_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "aclk (default after reset)"] #[inline(always)] pub fn b_0x0(self) -> &'a mut W { self.variant(APB4DIV_A::B_0X0) } #[doc = "aclk / 2"] #[inline(always)] pub fn b_0x1(self) -> &'a mut W { self.variant(APB4DIV_A::B_0X1) } #[doc = "aclk / 4"] #[inline(always)] pub fn b_0x2(self) -> &'a mut W { self.variant(APB4DIV_A::B_0X2) } #[doc = "aclk / 8"] #[inline(always)] pub fn b_0x3(self) -> &'a mut W { self.variant(APB4DIV_A::B_0X3) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07); self.w } } #[doc = "APB4DIVRDY\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum APB4DIVRDY_A { #[doc = "0: The new division factor is not yet\r\n taken into account."] B_0X0 = 0, #[doc = "1: The new division factor is taken\r\n into account. (default after reset)"] B_0X1 = 1, } impl From<APB4DIVRDY_A> for bool { #[inline(always)] fn from(variant: APB4DIVRDY_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `APB4DIVRDY`"] pub type APB4DIVRDY_R = crate::R<bool, APB4DIVRDY_A>; impl APB4DIVRDY_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> APB4DIVRDY_A { match self.bits { false => APB4DIVRDY_A::B_0X0, true => APB4DIVRDY_A::B_0X1, } } #[doc = "Checks if the value of the field is `B_0X0`"] #[inline(always)] pub fn is_b_0x0(&self) -> bool { *self == APB4DIVRDY_A::B_0X0 } #[doc = "Checks if the value of the field is `B_0X1`"] #[inline(always)] pub fn is_b_0x1(&self) -> bool { *self == APB4DIVRDY_A::B_0X1 } } impl R { #[doc = "Bits 0:2 - APB4DIV"] #[inline(always)] pub fn apb4div(&self) -> APB4DIV_R { APB4DIV_R::new((self.bits & 0x07) as u8) } #[doc = "Bit 31 - APB4DIVRDY"] #[inline(always)] pub fn apb4divrdy(&self) -> APB4DIVRDY_R { APB4DIVRDY_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:2 - APB4DIV"] #[inline(always)] pub fn apb4div(&mut self) -> APB4DIV_W { APB4DIV_W { w: self } } }
fn main() { proconio::input! { a: i64, b: i64, } // if a == 0 { // println!("{}", f(b)); // }else{ // } println!("{}", f(b) ^ f(a-1)); // let mut ss = 0; // for i in a..=b{ // ss = ss ^ i; // } // println!("{}", ss); } fn f(a: i64) -> i64 { let cnt = (a + 1) / 2; let mut result = cnt % 2; if a % 2 == 0 { result ^ a } else { result } }
use core::convert::TryInto; use sha2::{Digest, Sha256}; use subspace_core_primitives::{crypto, Sha256Hash, SHA256_HASH_SIZE}; /// Derives a single object blob of a given size from given seed and index, which is intended to be /// used as pre-genesis object (blockchain seed data) pub fn from_seed<S: AsRef<[u8]>>(seed: S, index: u32, size: u32) -> Vec<u8> { let size = size as usize; let mut object = Vec::with_capacity(size); let mut acc: Sha256Hash = { let mut hasher = Sha256::new(); hasher.update(seed.as_ref()); hasher.update(index.to_le_bytes().as_ref()); hasher.finalize()[..] .try_into() .expect("Sha256 output is always 32 bytes; qed") }; for _ in 0..size / SHA256_HASH_SIZE { object.extend_from_slice(&acc); acc = crypto::sha256_hash(&acc); } let remainder = size % SHA256_HASH_SIZE; if remainder > 0 { object.extend_from_slice(&acc[..remainder]); } assert_eq!(object.len(), size); object }
#![feature(prelude_import)] #![no_std] #[prelude_import] use std::prelude::v1::*; #[macro_use] extern crate std as std; #[macro_use] extern crate inherit; struct AStruct {} fn main() {} fn a_func() {}
mod builder_test; mod compact_table; mod extended_table_test; mod index_test; mod iter_table; mod pool_table; mod table_test;
// Copyright 2017 Parity Technologies (UK) Ltd. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. extern crate base64; #[macro_use] extern crate futures; extern crate parking_lot; extern crate serde; extern crate serde_json; extern crate tempfile; use futures::Stream; use std::borrow::Cow; use std::io::Error as IoError; mod query; mod json_file; pub use self::json_file::JsonFileDatastore; pub use self::query::{Query, Order, Filter, FilterTy, FilterOp}; /// Abstraction over any struct that can store `(key, value)` pairs. pub trait Datastore<T> { /// Sets the value of a key. fn put(&self, key: Cow<str>, value: T); /// Returns the value corresponding to this key. // TODO: use higher-kinded stuff once stable to provide a more generic "accessor" for the data fn get(&self, key: &str) -> Option<T>; /// Returns true if the datastore contains the given key. fn has(&self, key: &str) -> bool; /// Removes the given key from the datastore. Returns true if the key existed. fn delete(&self, key: &str) -> bool; /// Executes a query on the key-value store. /// /// This operation is expensive on some implementations and cheap on others. It is your /// responsibility to pick the right implementation for the right job. fn query<'a>( &'a self, query: Query<T>, ) -> Box<Stream<Item = (String, T), Error = IoError> + 'a>; }
table! { comments (id) { id -> Int4, post_id -> Int4, text -> Text, } } table! { memo_tag_rels (id) { id -> Int8, tag_id -> Int8, memo_id -> Int8, created_at -> Nullable<Timestamp>, updated_at -> Nullable<Timestamp>, } } table! { memos (id) { id -> Int8, title -> Nullable<Varchar>, created_at -> Nullable<Timestamp>, updated_at -> Nullable<Timestamp>, } } table! { micrate_db_version (id) { id -> Int4, version_id -> Int8, is_applied -> Bool, tstamp -> Nullable<Timestamp>, } } table! { post_tag_rels (id) { id -> Int4, tag_id -> Int4, post_id -> Int4, } } table! { posts (id) { id -> Int4, title -> Varchar, body -> Text, published -> Bool, } } table! { tag_groups (id) { id -> Int8, name -> Nullable<Varchar>, created_at -> Nullable<Timestamp>, updated_at -> Nullable<Timestamp>, } } table! { tags (id) { id -> Int4, label -> Varchar, } } allow_tables_to_appear_in_same_query!( comments, memo_tag_rels, memos, micrate_db_version, post_tag_rels, posts, tag_groups, tags, );
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Certificate { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<certificate::Properties>, } pub mod certificate { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "friendlyName", default, skip_serializing_if = "Option::is_none")] pub friendly_name: Option<String>, #[serde(rename = "subjectName", default, skip_serializing_if = "Option::is_none")] pub subject_name: Option<String>, #[serde(rename = "hostNames", default, skip_serializing_if = "Vec::is_empty")] pub host_names: Vec<String>, #[serde(rename = "pfxBlob", default, skip_serializing_if = "Option::is_none")] pub pfx_blob: Option<String>, #[serde(rename = "siteName", default, skip_serializing_if = "Option::is_none")] pub site_name: Option<String>, #[serde(rename = "selfLink", default, skip_serializing_if = "Option::is_none")] pub self_link: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub issuer: Option<String>, #[serde(rename = "issueDate", default, skip_serializing_if = "Option::is_none")] pub issue_date: Option<String>, #[serde(rename = "expirationDate", default, skip_serializing_if = "Option::is_none")] pub expiration_date: Option<String>, pub password: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub thumbprint: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub valid: Option<bool>, #[serde(rename = "cerBlob", default, skip_serializing_if = "Option::is_none")] pub cer_blob: Option<String>, #[serde(rename = "publicKeyHash", default, skip_serializing_if = "Option::is_none")] pub public_key_hash: Option<String>, #[serde(rename = "hostingEnvironmentProfile", default, skip_serializing_if = "Option::is_none")] pub hosting_environment_profile: Option<HostingEnvironmentProfile>, #[serde(rename = "keyVaultId", default, skip_serializing_if = "Option::is_none")] pub key_vault_id: Option<String>, #[serde(rename = "keyVaultSecretName", default, skip_serializing_if = "Option::is_none")] pub key_vault_secret_name: Option<String>, #[serde(rename = "keyVaultSecretStatus", default, skip_serializing_if = "Option::is_none")] pub key_vault_secret_status: Option<properties::KeyVaultSecretStatus>, #[serde(rename = "serverFarmId", default, skip_serializing_if = "Option::is_none")] pub server_farm_id: Option<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum KeyVaultSecretStatus { Initialized, WaitingOnCertificateOrder, Succeeded, CertificateOrderFailed, OperationNotPermittedOnKeyVault, AzureServiceUnauthorizedToAccessKeyVault, KeyVaultDoesNotExist, KeyVaultSecretDoesNotExist, UnknownError, ExternalPrivateKey, Unknown, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CertificateCollection { pub value: Vec<Certificate>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CertificatePatchResource { #[serde(flatten)] pub proxy_only_resource: ProxyOnlyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<certificate_patch_resource::Properties>, } pub mod certificate_patch_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "friendlyName", default, skip_serializing_if = "Option::is_none")] pub friendly_name: Option<String>, #[serde(rename = "subjectName", default, skip_serializing_if = "Option::is_none")] pub subject_name: Option<String>, #[serde(rename = "hostNames", default, skip_serializing_if = "Vec::is_empty")] pub host_names: Vec<String>, #[serde(rename = "pfxBlob", default, skip_serializing_if = "Option::is_none")] pub pfx_blob: Option<String>, #[serde(rename = "siteName", default, skip_serializing_if = "Option::is_none")] pub site_name: Option<String>, #[serde(rename = "selfLink", default, skip_serializing_if = "Option::is_none")] pub self_link: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub issuer: Option<String>, #[serde(rename = "issueDate", default, skip_serializing_if = "Option::is_none")] pub issue_date: Option<String>, #[serde(rename = "expirationDate", default, skip_serializing_if = "Option::is_none")] pub expiration_date: Option<String>, pub password: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub thumbprint: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub valid: Option<bool>, #[serde(rename = "cerBlob", default, skip_serializing_if = "Option::is_none")] pub cer_blob: Option<String>, #[serde(rename = "publicKeyHash", default, skip_serializing_if = "Option::is_none")] pub public_key_hash: Option<String>, #[serde(rename = "hostingEnvironmentProfile", default, skip_serializing_if = "Option::is_none")] pub hosting_environment_profile: Option<HostingEnvironmentProfile>, #[serde(rename = "keyVaultId", default, skip_serializing_if = "Option::is_none")] pub key_vault_id: Option<String>, #[serde(rename = "keyVaultSecretName", default, skip_serializing_if = "Option::is_none")] pub key_vault_secret_name: Option<String>, #[serde(rename = "keyVaultSecretStatus", default, skip_serializing_if = "Option::is_none")] pub key_vault_secret_status: Option<properties::KeyVaultSecretStatus>, #[serde(rename = "serverFarmId", default, skip_serializing_if = "Option::is_none")] pub server_farm_id: Option<String>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum KeyVaultSecretStatus { Initialized, WaitingOnCertificateOrder, Succeeded, CertificateOrderFailed, OperationNotPermittedOnKeyVault, AzureServiceUnauthorizedToAccessKeyVault, KeyVaultDoesNotExist, KeyVaultSecretDoesNotExist, UnknownError, ExternalPrivateKey, Unknown, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DefaultErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<default_error_response::Error>, } pub mod default_error_response { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub innererror: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct HostingEnvironmentProfile { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<String>, pub location: String, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProxyOnlyResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, }
#![allow(dead_code)] pub const INTERRUPT_TABLE_BASE: usize = ::memory::MEMORY_SIZE; pub const INTERRUPT_TABLE_LENGTH: usize = 64; pub const INTERRUPT_TABLE_SIZE: usize = 2 * INTERRUPT_TABLE_LENGTH; #[derive(RustcEncodable, RustcDecodable, Clone, Debug, Default)] pub struct Idt { data: Box<[u8]>, } impl Idt { pub fn new() -> Self { Idt { data: Box::new([0; INTERRUPT_TABLE_SIZE]), } } } impl ::memory::Memory for Idt { fn read_u8(&self, addr: u16) -> u8 { self.data[addr as usize] } fn write_u8(&mut self, addr: u16, value: u8) { self.data[addr as usize] = value; } }
// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. #![feature(allocator_api)] use std::alloc::Layout; use std::borrow::Cow; use std::hash::Hash; use std::hash::Hasher; use std::io::Read; use std::io::Write; use anyhow::Result; use ocamlrep::ptr::UnsafeOcamlPtr; use ocamlrep::FromOcamlRep; use ocamlrep::ToOcamlRep; use parking_lot::Mutex; use serde::de::DeserializeOwned; use serde::Serialize; /// A `datastore::Store` which writes its values to sharedmem (via the `shmffi` /// crate) as bincode-serialized values. Can be configured to compress the /// bincode blobs using `Compression`. pub struct ShmStore<K, V> { /// An LRU cache of hashconsed values in front of the serialized shm heap. cache: Mutex<lru::LruCache<K, V>>, evictable: bool, compression: Compression, prefix: &'static str, } #[derive(Debug, Copy, Clone)] pub enum Evictability { NonEvictable, Evictable, } #[derive(Debug, Copy, Clone)] pub enum Compression { None, Lz4 { compression_level: u32 }, Zstd { compression_level: i32 }, } impl Default for Compression { fn default() -> Self { Self::Lz4 { compression_level: 1, } } } impl<K, V> ShmStore<K, V> where K: Key + Copy + Hash + Eq + Send + Sync + 'static, V: Clone + Serialize + DeserializeOwned + Send + Sync + 'static, { pub fn new(prefix: &'static str, evictability: Evictability, compression: Compression) -> Self { Self { cache: Mutex::new(lru::LruCache::new(1000)), evictable: matches!(evictability, Evictability::Evictable), compression, prefix, } } fn hash_key(&self, key: K) -> u64 { let mut hasher = hash::Hasher::default(); self.prefix.hash(&mut hasher); key.hash_key(&mut hasher); hasher.finish() } } impl<K, V> datastore::Store<K, V> for ShmStore<K, V> where K: Key + Copy + Hash + Eq + Send + Sync + 'static, V: Clone + Serialize + DeserializeOwned + Send + Sync + 'static, { fn contains_key(&self, key: K) -> Result<bool> { if self.cache.lock().contains(&key) { return Ok(true); } Ok(shmffi::with(|segment| { segment.table.contains_key(&self.hash_key(key)) })) } fn get(&self, key: K) -> Result<Option<V>> { if let Some(val) = self.cache.lock().get(&key) { return Ok(Some(val.clone())); } let hash = self.hash_key(key); let val_opt: Option<V> = shmffi::with(|segment| { segment .table .get(&hash) .map(|heap_value| match self.compression { Compression::None => deserialize(heap_value.as_slice()), Compression::Lz4 { .. } => { lz4_decompress_and_deserialize(heap_value.as_slice()) } Compression::Zstd { .. } => { zstd_decompress_and_deserialize(heap_value.as_slice()) } }) .transpose() })?; if let Some(val) = &val_opt { self.cache.lock().put(key, val.clone()); } Ok(val_opt) } fn insert(&self, key: K, val: V) -> Result<()> { let blob = match self.compression { Compression::None => serialize(&val)?, Compression::Lz4 { compression_level } => { serialize_and_lz4_compress(&val, compression_level)? } Compression::Zstd { compression_level } => { serialize_and_zstd_compress(&val, compression_level)? } }; self.cache.lock().put(key, val); let blob = ocaml_blob::SerializedValue::BStr(&blob); let _did_insert = shmffi::with(|segment| { segment.table.insert( self.hash_key(key), Some(Layout::from_size_align(blob.as_slice().len(), 1).unwrap()), self.evictable, |buffer| blob.to_heap_value_in(self.evictable, buffer), ) }); Ok(()) } fn move_batch(&self, keys: &mut dyn Iterator<Item = (K, K)>) -> Result<()> { let mut cache = self.cache.lock(); for (old_key, new_key) in keys { let old_hash = self.hash_key(old_key); let new_hash = self.hash_key(new_key); shmffi::with(|segment| { let (header, data) = segment.table.inspect_and_remove(&old_hash, |value| { let value = value.unwrap(); (value.header, <Box<[u8]>>::from(value.as_slice())) }); cache.pop(&old_key); segment.table.insert( new_hash, Some(Layout::from_size_align(data.len(), 1).unwrap()), header.is_evictable(), |buffer| { buffer.copy_from_slice(&data); ocaml_blob::HeapValue { header, data: std::ptr::NonNull::new(buffer.as_mut_ptr()).unwrap(), } }, ); // We choose not to `cache.put(new_key, ...)` here. }); } Ok(()) } fn remove_batch(&self, keys: &mut dyn Iterator<Item = K>) -> Result<()> { let mut cache = self.cache.lock(); for key in keys { cache.pop(&key); let hash = self.hash_key(key); let contains = shmffi::with(|segment| segment.table.contains_key(&hash)); if !contains { continue; } let _size = shmffi::with(|segment| { segment .table .inspect_and_remove(&hash, |value| value.unwrap().as_slice().len()) }); } Ok(()) } } fn serialize<T: Serialize>(val: &T) -> Result<Vec<u8>> { let mut serialized = Vec::new(); bincode::serialize_into(&mut serialized, &intern::WithIntern(val))?; Ok(serialized) } fn deserialize<T: DeserializeOwned>(serialized: &[u8]) -> Result<T> { Ok(intern::WithIntern::strip(bincode::deserialize(serialized))?) } fn serialize_and_lz4_compress<T: Serialize>(val: &T, level: u32) -> Result<Vec<u8>> { let encoder = lz4::EncoderBuilder::new().level(level).build(vec![])?; let mut w = std::io::BufWriter::new(encoder); bincode::serialize_into(&mut w, &intern::WithIntern(val))?; w.flush()?; let encoder = w.into_inner().expect("into_inner returned Err after flush"); let (compressed, result) = encoder.finish(); result?; Ok(compressed) } fn lz4_decompress_and_deserialize<R: Read, T: DeserializeOwned>(r: R) -> Result<T> { let r = lz4::Decoder::new(r)?; let mut r = std::io::BufReader::new(r); Ok(intern::WithIntern::strip(bincode::deserialize_from( &mut r, ))?) } fn serialize_and_zstd_compress<T: Serialize>(val: &T, level: i32) -> Result<Vec<u8>> { let mut compressed = vec![]; let w = zstd::Encoder::new(&mut compressed, level)?.auto_finish(); let mut w = std::io::BufWriter::new(w); bincode::serialize_into(&mut w, &intern::WithIntern(val))?; drop(w); Ok(compressed) } fn zstd_decompress_and_deserialize<R: Read, T: DeserializeOwned>(r: R) -> Result<T> { let r = zstd::Decoder::new(r)?; let mut r = std::io::BufReader::new(r); Ok(intern::WithIntern::strip(bincode::deserialize_from( &mut r, ))?) } impl<K, V> std::fmt::Debug for ShmStore<K, V> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("ShmStore").finish() } } /// There seems to be a problem with using the impl of `Hash` for interned /// symbols: since they're only 32-bit IDs, hashes based on them tend to /// collide, which our shmrs library cannot tolerate. Instead, we use this /// custom hashing trait and hash the entire string representation of the /// symbol. We might want to revisit this later and see whether there's a way to /// hash these less expensively. pub trait Key { fn hash_key<H: Hasher>(&self, state: &mut H); } impl Key for pos::TypeName { fn hash_key<H: Hasher>(&self, state: &mut H) { self.as_str().hash(state); } } impl Key for pos::ModuleName { fn hash_key<H: Hasher>(&self, state: &mut H) { self.as_str().hash(state); } } impl Key for pos::FunName { fn hash_key<H: Hasher>(&self, state: &mut H) { self.as_str().hash(state); } } impl Key for pos::ConstName { fn hash_key<H: Hasher>(&self, state: &mut H) { self.as_str().hash(state); } } impl<T: AsRef<str>> Key for (pos::TypeName, T) { fn hash_key<H: Hasher>(&self, state: &mut H) { let type_name: &str = self.0.as_ref(); type_name.hash(state); let member_name: &str = self.1.as_ref(); member_name.hash(state); } } impl Key for pos::RelativePath { fn hash_key<H: Hasher>(&self, state: &mut H) { self.prefix().hash(state); self.suffix().as_bytes().hash(state); } } impl Key for hh24_types::ToplevelSymbolHash { fn hash_key<H: Hasher>(&self, state: &mut H) { self.hash(state); } } impl Key for hh24_types::ToplevelCanonSymbolHash { fn hash_key<H: Hasher>(&self, state: &mut H) { self.hash(state); } } /// A `datastore::Store` which writes its values to sharedmem (via the `shmffi` /// crate) as OCaml-marshaled values. Can be configured to compress the /// marshaled blobs using `Compression`. pub struct OcamlShmStore<K, V> { /// An LRU cache of hashconsed values in front of the serialized shm heap. cache: Mutex<lru::LruCache<K, V>>, evictable: bool, compression: Compression, prefix: &'static str, } impl<K, V> OcamlShmStore<K, V> where K: Key + Copy + Hash + Eq + Send + Sync + 'static, V: Clone + Send + Sync + 'static, { pub fn new(prefix: &'static str, evictability: Evictability, compression: Compression) -> Self { Self { cache: Mutex::new(lru::LruCache::new(1000)), evictable: matches!(evictability, Evictability::Evictable), compression, prefix, } } /// # Safety /// /// Must be invoked on the main thread. Calls into the OCaml runtime and may /// trigger a GC, so no unrooted OCaml values may exist. The returned /// `UnsafeOcamlPtr` is unrooted and could be invalidated if the GC is /// triggered after this method returns. pub unsafe fn get_ocaml_value(&self, key: K) -> Option<UnsafeOcamlPtr> { shmffi::with(|segment| { segment.table.get(&self.hash_key(key)).map(|heap_value| { extern "C" { fn caml_input_value_from_block(data: *const u8, size: usize) -> UnsafeOcamlPtr; } let bytes = self.decompress(heap_value.as_slice()).unwrap(); caml_input_value_from_block(bytes.as_ptr(), bytes.len()) }) }) } fn decompress<'a>(&self, bytes: &'a [u8]) -> Result<Cow<'a, [u8]>> { Ok(match self.compression { Compression::None => Cow::Borrowed(bytes), Compression::Lz4 { .. } => Cow::Owned(lz4_decompress(bytes)?), Compression::Zstd { .. } => Cow::Owned(zstd_decompress(bytes)?), }) } fn hash_key(&self, key: K) -> u64 { let mut hasher = hash::Hasher::default(); self.prefix.hash(&mut hasher); key.hash_key(&mut hasher); hasher.finish() } } impl<K, V> datastore::Store<K, V> for OcamlShmStore<K, V> where K: Key + Copy + Hash + Eq + Send + Sync + 'static, V: ToOcamlRep + FromOcamlRep + Clone + Send + Sync + 'static, { fn contains_key(&self, key: K) -> Result<bool> { if self.cache.lock().contains(&key) { return Ok(true); } Ok(shmffi::with(|segment| { segment.table.contains_key(&self.hash_key(key)) })) } fn get(&self, key: K) -> Result<Option<V>> { if let Some(val) = self.cache.lock().get(&key) { return Ok(Some(val.clone())); } let hash = self.hash_key(key); let val_opt: Option<V> = shmffi::with(|segment| { segment .table .get(&hash) .map(|heap_value| -> Result<_> { let bytes = self.decompress(heap_value.as_slice()).unwrap(); let arena = ocamlrep::Arena::new(); let value = unsafe { ocamlrep_marshal::input_value(&bytes, &arena) }; Ok(V::from_ocamlrep(value)?) }) .transpose() })?; if let Some(val) = &val_opt { self.cache.lock().put(key, val.clone()); } Ok(val_opt) } fn insert(&self, key: K, val: V) -> Result<()> { let arena = ocamlrep::Arena::new(); let ocaml_val = arena.add_root(&val); let mut bytes = std::io::Cursor::new(Vec::with_capacity(4096)); ocamlrep_marshal::output_value( &mut bytes, ocaml_val, ocamlrep_marshal::ExternFlags::empty(), )?; let bytes = bytes.into_inner(); let bytes = match self.compression { Compression::None => bytes, Compression::Lz4 { compression_level } => lz4_compress(&bytes, compression_level)?, Compression::Zstd { compression_level } => zstd_compress(&bytes, compression_level)?, }; self.cache.lock().put(key, val); let blob = ocaml_blob::SerializedValue::BStr(&bytes); let _did_insert = shmffi::with(|segment| { segment.table.insert( self.hash_key(key), Some(Layout::from_size_align(blob.as_slice().len(), 1).unwrap()), self.evictable, |buffer| blob.to_heap_value_in(self.evictable, buffer), ) }); Ok(()) } fn move_batch(&self, keys: &mut dyn Iterator<Item = (K, K)>) -> Result<()> { let mut cache = self.cache.lock(); for (old_key, new_key) in keys { let old_hash = self.hash_key(old_key); let new_hash = self.hash_key(new_key); shmffi::with(|segment| { let (header, data) = segment.table.inspect_and_remove(&old_hash, |value| { let value = value.unwrap(); (value.header, <Box<[u8]>>::from(value.as_slice())) }); cache.pop(&old_key); segment.table.insert( new_hash, Some(Layout::from_size_align(data.len(), 1).unwrap()), header.is_evictable(), |buffer| { buffer.copy_from_slice(&data); ocaml_blob::HeapValue { header, data: std::ptr::NonNull::new(buffer.as_mut_ptr()).unwrap(), } }, ); // We choose not to `cache.put(new_key, ...)` here. }); } Ok(()) } fn remove_batch(&self, keys: &mut dyn Iterator<Item = K>) -> Result<()> { let mut cache = self.cache.lock(); for key in keys { let hash = self.hash_key(key); let _size = shmffi::with(|segment| { segment .table .inspect_and_remove(&hash, |value| value.unwrap().as_slice().len()) }); cache.pop(&key); } Ok(()) } } fn lz4_compress(mut bytes: &[u8], level: u32) -> Result<Vec<u8>> { let mut encoder = lz4::EncoderBuilder::new().level(level).build(vec![])?; std::io::copy(&mut bytes, &mut encoder)?; let (compressed, result) = encoder.finish(); result?; Ok(compressed) } fn lz4_decompress(compressed: &[u8]) -> Result<Vec<u8>> { let mut decompressed = vec![]; let mut decoder = lz4::Decoder::new(compressed)?; std::io::copy(&mut decoder, &mut decompressed)?; Ok(decompressed) } fn zstd_compress(mut bytes: &[u8], level: i32) -> Result<Vec<u8>> { let mut compressed = vec![]; zstd::stream::copy_encode(&mut bytes, &mut compressed, level)?; Ok(compressed) } fn zstd_decompress(mut compressed: &[u8]) -> Result<Vec<u8>> { let mut decompressed = vec![]; zstd::stream::copy_decode(&mut compressed, &mut decompressed)?; Ok(decompressed) } impl<K, V> std::fmt::Debug for OcamlShmStore<K, V> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("OcamlShmStore").finish() } }
//! This module defines the `Architecture` enum, and all the instructions declared for each //! architecture. //! //! Code for instruction matching is generated by a Python script during the build. #![allow(non_upper_case_globals)] use builder::{Builder, Value, ValueKind}; use emit::Emitter; use std::fmt::{self, Display, Formatter}; use std::mem; #[allow(unused_imports)] use byteorder::{NativeEndian, WriteBytesExt}; /// Informations about the current usage of the registers. pub type RegistersState = u128; /// Informations about the current usage of the stack. pub type StackState = u32; // //==========================================================================// // // OPERAND // // //==========================================================================// bitflags! { /// The definition of an operand. pub struct OpDef: u32 { /// Operand is null. const Null = 0b0_0000_0000; /// Operand is a register. const Register = 0b0_0000_0001; /// Operand is on the stack. const Stack = 0b0_0000_0010; /// Operand is in the memory. const Memory = 0b0_0000_0100; /// Operand is an immediate value. const Immediate = 0b0_0000_1000; /// Operand is a relative offset to next instruction. const Relative = 0b0_0001_0000; } } /// The architecture-specific location of an operand. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum OperandLocation { /// The value does not exist. Null, /// The value is stored on the register. Register(u16), /// The value is stored on the stack. Stack(u16), /// The value is stored somewhere in the memory. Memory(u64), /// The value is immediate (or constant). Immediate(u128), /// The value represents a relative address. Relative(i64) } /// A parameter passed to a `Proc`. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct Operand { /// Location (stack or register) of the parameter. pub location: OperandLocation, /// Size (in bytes) of the operand. pub size: u16 } impl Operand { /// Creates a new operand, given its size in bytes and location. #[inline] pub fn new(size: u16, location: OperandLocation) -> Self { Operand { size, location } } /// Creates a new null operand, with a null size and location. #[inline] pub fn null() -> Self { Operand { size: 0, location: OperandLocation::Null } } /// Returns the size of the data the operand represents. #[inline] pub fn size(&self) -> u16 { self.size } /// Returns a mutable reference to the raw data of the operand. /// /// # Panics /// The operand isn't a register. pub fn raw(&mut self) -> &mut u16 { match &mut self.location { &mut OperandLocation::Register(ref mut r) => r, _ => panic!("Operand::raw() can only be called on register operands.") } } /// Returns a "definition" of this operand. #[inline] pub fn definition(&self) -> OpDef { use self::OperandLocation::*; match self.location { Null => OpDef::Null, Register(_) => OpDef::Register, Stack(_) | Memory(_) => OpDef::Memory, Immediate(_) => OpDef::Immediate, Relative(_) => OpDef::Relative } } /// Returns the register represented by this operand, /// if it does represent a register. #[inline] pub fn get_register(&self) -> Option<u16> { if let OperandLocation::Register(reg) = self.location { Some(reg) } else { None } } } impl Display for Operand { fn fmt(&self, f: &mut Formatter) -> fmt::Result { use self::OperandLocation::*; match self.location { Null => Ok(()), Register(reg) => write!(f, "@{:03b}", reg), Stack(offset) => write!(f, "(stack -{:#X})", offset), Memory(addr) => write!(f, "[{:#X}]", addr), Immediate(imm) => write!(f, "{:#X}", imm), Relative(rel) => write!(f, "[{:#X}]", rel) } } } // //==========================================================================// // // ARCHITECTURE // // //==========================================================================// /// Defines the target architecture of a compilation. #[derive(Eq, PartialEq, Debug, Copy, Clone)] #[allow(non_camel_case_types)] pub enum Architecture { /// ARM architecture. ARM, /// x86 architecture. X86, /// x86-64 architecture. X86_64 } use self::Architecture::*; impl Architecture { /// Returns the current architecture, or `None` if the current architecture is not supported. #[inline] pub fn current() -> Option<Architecture> { match ::std::env::consts::ARCH { "arm" => Some(ARM), "x86" => Some(X86), "x86_64" => Some(X86_64), _ => None } } /// Encodes the specified instruction. pub fn encode_instr(&self, name: &str, op1: Operand, op2: Operand, op3: Operand, emitter: &mut Emitter) -> bool { match *self { X86 | X86_64 => x86::encode_instr(name, op1, op2, op3, emitter), _ => unimplemented!() } } /// Returns the default `RegistersState` for this architecture. pub fn default_state(&self) -> (RegistersState, StackState) { ((self.average_instruction_size() as usize * mem::size_of::<usize>()) as RegistersState, 0) } /// Returns a native operand that describes the given architecture-independant operand. /// /// Registers will be returned on priority, but stack offsets might be returned if no register /// is available. pub fn get_native_operand(&self, operand: &Value, rstate: &mut RegistersState, sstate: &mut StackState) -> Operand { let size = operand.size(); match operand.kind() { ValueKind::Immediate(imm) => Operand::new(size, OperandLocation::Immediate(imm as _)), ValueKind::Memory(mem) => Operand::new(size, OperandLocation::Memory(mem)), ValueKind::Argument(_) | ValueKind::Variable(_, _) => match match *self { X86 | X86_64 => x86::to_native(size, rstate), _ => unimplemented!() } { Some(operand) => operand, None => { *sstate += size as u32; Operand::new(size as _, OperandLocation::Stack(*sstate as _)) } }, _ => unimplemented!() } } /// Returns the average instruction size on this platform. pub fn average_instruction_size(&self) -> u8 { match *self { ARM => 4, X86 | X86_64 => 8 } } /// Returns the number of registers that can contain a value /// with the specified size in this architecture. pub fn registers_count(&self, size: u16) -> u16 { match (*self, size) { (ARM, n) if n <= 32 => 16, (X86, _) | (X86_64, _) => 12, _ => 0 } } /// Reduces common groups of instructions into specific instructions /// in the target architecture. pub fn reduce(&self, _builder: &mut Builder) { match *self { X86 | X86_64 | ARM => () } } /// Returns the default return operand for the target architecture. pub fn default_return_operand(&self, size: u16) -> Operand { match *self { X86 | X86_64 => Operand::new(size, OperandLocation::Register(0)), _ => unimplemented!() } } } impl Default for Architecture { fn default() -> Self { Self::current().expect("The current architecture is not supported.") } } impl Display for Architecture { fn fmt(&self, f: &mut Formatter) -> fmt::Result { f.write_str(match *self { ARM => "ARM", X86 => "x86", X86_64 => "x86-64" }) } } // //==========================================================================// // // ARCHITECTURE-SPECIFIC CODE // // //==========================================================================// mod x86 { use super::*; use super::OperandLocation as Location; include!(concat!(env!("OUT_DIR"), "/x86.rs")); bitflags! { pub struct Reg: u128 { const AX = 0b0001; const CX = 0b0010; const DX = 0b0100; const BX = 0b1000; const SP = 0b0001_0000; const BP = 0b0010_0000; const SI = 0b0100_0000; const DI = 0b1000_0000; } } #[allow(if_not_else)] pub fn to_native(size: u16, rstate: &mut RegistersState) -> Option<Operand> { let mut reg = Reg::from_bits_truncate(*rstate); let flag = if !reg.contains(Reg::AX) { reg.set(Reg::AX, true); 0b0000 } else if !reg.contains(Reg::CX) { reg.set(Reg::CX, true); 0b0001 } else if !reg.contains(Reg::DX) { reg.set(Reg::DX, true); 0b0010 } else if !reg.contains(Reg::BX) { reg.set(Reg::BX, true); 0b0011 } else { return None }; *rstate = reg.bits(); Some(Operand::new(size, Location::Register(flag as _))) } fn encode_opcode(opcode: u64, emitter: &mut Emitter) { unsafe { let opcode: [u8; 8] = mem::transmute(opcode); let count = match (opcode[0], opcode[1], opcode[2]) { (0x0F, 0x01, _) | (0x0F, 0xAE, _) | (0x0F, 0x38, _) | (0x0F, 0x3A, _) => 3, (0x0F, _, _) => 2, (_, _, _) => 1 }; emitter.emit_bytes(&opcode[0..count]).expect("Could not emit opcode"); } } fn encode_operand(operand: Operand, emitter: &mut Emitter) { match operand.location { Location::Null => return, Location::Register(opc) => { emitter.emit_byte(opc as u8) }, Location::Stack(offset) => { if offset < 128 { emitter.emit_byte((256 - offset) as u8) } else { emitter.write_int::<NativeEndian>(-(offset as i64), operand.size as _).map_err(Into::into) } }, Location::Relative(rel) => { emitter.write_int::<NativeEndian>(rel as _, operand.size as _).map_err(Into::into) }, Location::Immediate(val) => { emitter.write_int::<NativeEndian>(val as _, operand.size as _).map_err(Into::into) }, Location::Memory(_addr) => unimplemented!() } .expect("Could not encode operand."); } pub fn encode_instr(mnemonic: &str, mut op1: Operand, mut op2: Operand, mut op3: Operand, emitter: &mut Emitter) -> bool { if mnemonic != "nop" { info!("{:#X}\t{} {} ({}), {} ({}), {} ({})", emitter.offset(), mnemonic, op1, op1.size, op2, op2.size, op3, op3.size); } // encode opcode let opcode = match get_opcode(mnemonic, &mut op1, &mut op2, &mut op3) { Some(opcode) => opcode, None => return false }; // special opcodes match opcode { 0xB0 | 0xB8 | 0x40 | 0x48 | 0x50 | 0x58 | 0x0FC8 => { encode_opcode(opcode + op1.get_register().unwrap() as u64, emitter); op1 = Operand::null(); }, opcode => encode_opcode(opcode, emitter) } // encode operands if let Some(r1) = op1.get_register() { if let Some(r2) = op2.get_register() { // registers can be encoded in a single byte let mut r = 0b1100_0000 + r1; r += r2 << 3; if r < u8::max_value() as u16 { emitter.emit_byte(r as u8).expect("Could not emit byte."); return true } } } encode_operand(op1, emitter); encode_operand(op2, emitter); encode_operand(op3, emitter); true } } mod arm { }
pub fn compute() -> stretch::result::Layout { stretch::node::Node::new( stretch::style::Style { size: stretch::geometry::Size { width: stretch::style::Dimension::Points(500f32), height: stretch::style::Dimension::Points(200f32), ..Default::default() }, ..Default::default() }, vec![ &stretch::node::Node::new( stretch::style::Style { flex_grow: 0.2f32, flex_shrink: 0f32, flex_basis: stretch::style::Dimension::Points(40f32), ..Default::default() }, vec![], ), &stretch::node::Node::new( stretch::style::Style { flex_grow: 0.2f32, flex_shrink: 0f32, ..Default::default() }, vec![], ), &stretch::node::Node::new( stretch::style::Style { flex_grow: 0.4f32, flex_shrink: 0f32, ..Default::default() }, vec![], ), ], ) .compute_layout(stretch::geometry::Size::undefined()) .unwrap() }
//! The procedural macro for vulkano's shader system. //! Manages the compile-time compilation of GLSL into SPIR-V and generation of assosciated rust code. //! //! # Basic usage //! //! ``` //! mod vs { //! vulkano_shaders::shader!{ //! ty: "vertex", //! src: " //! #version 450 //! //! layout(location = 0) in vec3 position; //! //! void main() { //! gl_Position = vec4(position, 1.0); //! }" //! } //! } //! # fn main() {} //! ``` //! //! # Details //! //! If you want to take a look at what the macro generates, your best options //! are to either read through the code that handles the generation (the //! [`reflect`][reflect] function in the `vulkano-shaders` crate) or use a tool //! such as [cargo-expand][cargo-expand] to view the expansion of the macro in your //! own code. It is unfortunately not possible to provide a `generated_example` //! module like some normal macro crates do since derive macros cannot be used from //! the crate they are declared in. On the other hand, if you are looking for a //! high-level overview, you can see the below section. //! //! # Generated code overview //! //! The macro generates the following items of interest: //! * The `Shader` struct. This contains a single field, `shader`, which is an //! `Arc<ShaderModule>`. //! * The `Shader::load` constructor. This method takes an `Arc<Device>`, calls //! [`ShaderModule::new`][ShaderModule::new] with the passed-in device and the //! shader data provided via the macro, and returns `Result<Shader, OomError>`. //! Before doing so, it loops through every capability instruction in the shader //! data, verifying that the passed-in `Device` has the appropriate features //! enabled. **This function currently panics if a feature required by the shader //! is not enabled on the device.** At some point in the future it will return //! an error instead. //! * The `Shader::module` method. This method simply returns a reference to the //! `Arc<ShaderModule>` contained within the `shader` field of the `Shader` //! struct. //! * Methods for each entry point of the shader module. These construct and //! return the various entry point structs that can be found in the //! [vulkano::pipeline::shader][pipeline::shader] module. //! * A Rust struct translated from each struct contained in the shader data. //! By default each structure has a `Clone` and a `Copy` implemenetations. This //! behavior could be customized through the `types_meta` macro option(see below //! for details). //! * The `Layout` newtype. This contains a [`ShaderStages`][ShaderStages] struct. //! An implementation of [`PipelineLayoutDesc`][PipelineLayoutDesc] is also //! generated for the newtype. //! * The `SpecializationConstants` struct. This contains a field for every //! specialization constant found in the shader data. Implementations of //! `Default` and [`SpecializationConstants`][SpecializationConstants] are also //! generated for the struct. //! //! All of these generated items will be accessed through the module specified //! by `mod_name: foo` If you wanted to store the `Shader` in a struct of your own, //! you could do something like this: //! //! ``` //! # fn main() {} //! # use std::sync::Arc; //! # use vulkano::OomError; //! # use vulkano::device::Device; //! # //! # mod vs { //! # vulkano_shaders::shader!{ //! # ty: "vertex", //! # src: " //! # #version 450 //! # //! # layout(location = 0) in vec3 position; //! # //! # void main() { //! # gl_Position = vec4(position, 1.0); //! # }" //! # } //! # } //! // various use statements //! // `vertex_shader` module with shader derive //! //! pub struct Shaders { //! pub vs: vs::Shader //! } //! //! impl Shaders { //! pub fn load(device: Arc<Device>) -> Result<Self, OomError> { //! Ok(Self { //! vs: vs::Shader::load(device)?, //! }) //! } //! } //! ``` //! //! # Options //! //! The options available are in the form of the following attributes: //! //! ## `ty: "..."` //! //! This defines what shader type the given GLSL source will be compiled into. //! The type can be any of the following: //! //! * `vertex` //! * `fragment` //! * `geometry` //! * `tess_ctrl` //! * `tess_eval` //! * `compute` //! //! For details on what these shader types mean, [see Vulkano's documentation][pipeline]. //! //! ## `src: "..."` //! //! Provides the raw GLSL source to be compiled in the form of a string. Cannot //! be used in conjunction with the `path` or `bytes` field. //! //! ## `path: "..."` //! //! Provides the path to the GLSL source to be compiled, relative to `Cargo.toml`. //! Cannot be used in conjunction with the `src` or `bytes` field. //! //! ## `bytes: "..."` //! //! Provides the path to precompiled SPIR-V bytecode, relative to `Cargo.toml`. //! Cannot be used in conjunction with the `src` or `path` field. //! This allows using shaders compiled through a separate build system. //! **Note**: If your shader contains multiple entrypoints with different //! descriptor sets, you may also need to enable `exact_entrypoint_interface`. //! //! ## `include: ["...", "...", ..., "..."]` //! //! Specifies the standard include directories to be searched through when using the //! `#include <...>` directive within a shader source. Include directories can be absolute //! or relative to `Cargo.toml`. //! If `path` was specified, relative paths can also be used (`#include "..."`), without the need //! to specify one or more standard include directories. Relative paths are relative to the //! directory, which contains the source file the `#include "..."` directive is declared in. //! //! ## `define: [("NAME", "VALUE"), ...]` //! //! Adds the given macro definitions to the pre-processor. This is equivalent to passing `-DNAME=VALUE` //! on the command line. //! //! ## `types_meta: { use a::b; #[derive(Clone, Default, PartialEq ...)] impl Eq }` //! //! Extends implementations of Rust structs that represent Shader structs. //! //! By default each generated struct has a `Clone` and a `Copy` implementations //! only. If the struct has unsized members none of derives or impls applied on //! this struct. //! //! The block may have as many `use`, `derive` or `impl` statements as needed //! and in any order. //! //! Each `use` declaration will be added to generated `ty` module. And each //! `derive`'s trait and `impl` statement will be applied to each generated //! struct inside `ty` module. //! //! For `Default` derive implementation fills a struct data with all zeroes. //! For `Display` and `Debug` derive implementation prints all fields except `_dummyX`. //! For `PartialEq` derive implementation all non-`_dummyX` are checking for equality. //! //! The macro performs trivial checking for duplicate declarations. To see the //! final output of generated code the user can also use `dump` macro //! option(see below). //! //! ## `exact_entrypoint_interface: true` //! //! By default, the macro assumes that all resources (Uniforms, Storage Buffers, //! Images, Samplers, etc) need to be bound into a descriptor set, even if they are //! not used in the shader code. However, shaders with multiple entrypoints may have //! conflicting descriptor sets for each entrypoint. Enabling this option will force //! the macro to only generate descriptor information for resources that are used //! in each entrypoint. //! //! The macro determines which resources are used by looking at each entrypoint's //! interface and bytecode. See [`src/descriptor_sets.rs`][descriptor_sets] //! for the exact logic. //! //! ## `dump: true` //! //! The crate fails to compile but prints the generated rust code to stdout. //! //! [reflect]: https://github.com/vulkano-rs/vulkano/blob/master/vulkano-shaders/src/lib.rs#L67 //! [cargo-expand]: https://github.com/dtolnay/cargo-expand //! [ShaderModule::new]: https://docs.rs/vulkano/*/vulkano/pipeline/shader/struct.ShaderModule.html#method.new //! [OomError]: https://docs.rs/vulkano/*/vulkano/enum.OomError.html //! [pipeline::shader]: https://docs.rs/vulkano/*/vulkano/pipeline/shader/index.html //! [descriptor]: https://docs.rs/vulkano/*/vulkano/descriptor/index.html //! [ShaderStages]: https://docs.rs/vulkano/*/vulkano/descriptor/descriptor/struct.ShaderStages.html //! [PipelineLayoutDesc]: https://docs.rs/vulkano/*/vulkano/descriptor/pipeline_layout/trait.PipelineLayoutDesc.html //! [SpecializationConstants]: https://docs.rs/vulkano/*/vulkano/pipeline/shader/trait.SpecializationConstants.html //! [pipeline]: https://docs.rs/vulkano/*/vulkano/pipeline/index.html //! [descriptor_sets]: https://github.com/vulkano-rs/vulkano/blob/master/vulkano-shaders/src/descriptor_sets.rs#L142 #![doc(html_logo_url = "https://raw.githubusercontent.com/vulkano-rs/vulkano/master/logo.png")] #![recursion_limit = "1024"] #[macro_use] extern crate quote; #[macro_use] extern crate syn; extern crate proc_macro; use std::fs; use std::fs::File; use std::io::{Read, Result as IoResult}; use std::path::Path; use std::{env, iter::empty}; use syn::parse::{Parse, ParseStream, Result}; use syn::{ Ident, ItemUse, LitBool, LitStr, Meta, MetaList, NestedMeta, Path as SynPath, TypeImplTrait, }; mod codegen; mod descriptor_sets; mod entry_point; mod enums; mod parse; mod spec_consts; mod spirv_search; mod structs; use crate::codegen::ShaderKind; use std::slice::from_raw_parts; enum SourceKind { Src(String), Path(String), Bytes(String), } struct TypesMeta { custom_derives: Vec<SynPath>, clone: bool, copy: bool, display: bool, debug: bool, default: bool, partial_eq: bool, uses: Vec<ItemUse>, impls: Vec<TypeImplTrait>, } impl Default for TypesMeta { #[inline] fn default() -> Self { Self { custom_derives: vec![], clone: true, copy: true, partial_eq: false, debug: false, display: false, default: false, uses: Vec::new(), impls: Vec::new(), } } } impl TypesMeta { #[inline] fn empty() -> Self { Self { custom_derives: Vec::new(), clone: false, copy: false, partial_eq: false, debug: false, display: false, default: false, uses: Vec::new(), impls: Vec::new(), } } } struct MacroInput { shader_kind: ShaderKind, source_kind: SourceKind, include_directories: Vec<String>, macro_defines: Vec<(String, String)>, types_meta: TypesMeta, exact_entrypoint_interface: bool, dump: bool, } impl Parse for MacroInput { fn parse(input: ParseStream) -> Result<Self> { let mut dump = None; let mut shader_kind = None; let mut source_kind = None; let mut include_directories = Vec::new(); let mut macro_defines = Vec::new(); let mut types_meta = None; let mut exact_entrypoint_interface = None; while !input.is_empty() { let name: Ident = input.parse()?; input.parse::<Token![:]>()?; match name.to_string().as_ref() { "ty" => { if shader_kind.is_some() { panic!("Only one `ty` can be defined") } let ty: LitStr = input.parse()?; let ty = match ty.value().as_ref() { "vertex" => ShaderKind::Vertex, "fragment" => ShaderKind::Fragment, "geometry" => ShaderKind::Geometry, "tess_ctrl" => ShaderKind::TessControl, "tess_eval" => ShaderKind::TessEvaluation, "compute" => ShaderKind::Compute, _ => panic!("Unexpected shader type, valid values: vertex, fragment, geometry, tess_ctrl, tess_eval, compute") }; shader_kind = Some(ty); } "src" => { if source_kind.is_some() { panic!("Only one of `src`, `path`, or `bytes` can be defined") } let src: LitStr = input.parse()?; source_kind = Some(SourceKind::Src(src.value())); } "path" => { if source_kind.is_some() { panic!("Only one of `src`, `path`, or `bytes` can be defined") } let path: LitStr = input.parse()?; source_kind = Some(SourceKind::Path(path.value())); } "bytes" => { if source_kind.is_some() { panic!("Only one of `src`, `path`, or `bytes` can be defined") } let path: LitStr = input.parse()?; source_kind = Some(SourceKind::Bytes(path.value())); } "define" => { let array_input; bracketed!(array_input in input); while !array_input.is_empty() { let tuple_input; parenthesized!(tuple_input in array_input); let name: LitStr = tuple_input.parse()?; tuple_input.parse::<Token![,]>()?; let value: LitStr = tuple_input.parse()?; macro_defines.push((name.value(), value.value())); if !array_input.is_empty() { array_input.parse::<Token![,]>()?; } } } "include" => { let in_brackets; bracketed!(in_brackets in input); while !in_brackets.is_empty() { let path: LitStr = in_brackets.parse()?; include_directories.push(path.value()); if !in_brackets.is_empty() { in_brackets.parse::<Token![,]>()?; } } } "types_meta" => { let in_braces; braced!(in_braces in input); let mut meta = TypesMeta::empty(); while !in_braces.is_empty() { if in_braces.peek(Token![#]) { in_braces.parse::<Token![#]>()?; let in_brackets; bracketed!(in_brackets in in_braces); let derive_list: MetaList = in_brackets.parse()?; for derive in derive_list.nested { match derive { NestedMeta::Meta(Meta::Path(path)) => { let custom_derive = if let Some(derive_ident) = path.get_ident() { match derive_ident.to_string().as_str() { "Clone" => { if meta.default { return Err(in_brackets .error("Duplicate Clone derive")); } meta.clone = true; false } "Copy" => { if meta.copy { return Err(in_brackets .error("Duplicate Copy derive")); } meta.copy = true; false } "PartialEq" => { if meta.partial_eq { return Err(in_brackets .error("Duplicate PartialEq derive")); } meta.partial_eq = true; false } "Debug" => { if meta.debug { return Err(in_brackets .error("Duplicate Debug derive")); } meta.debug = true; false } "Display" => { if meta.display { return Err(in_brackets .error("Duplicate Display derive")); } meta.display = true; false } "Default" => { if meta.default { return Err(in_brackets .error("Duplicate Default derive")); } meta.default = true; false } _ => true, } } else { true }; if custom_derive { if meta .custom_derives .iter() .any(|candidate| candidate.eq(&path)) { return Err( in_braces.error("Duplicate derive declaration") ); } meta.custom_derives.push(path); } } _ => return Err(in_brackets.error("Unsupported syntax")), } } continue; } if in_braces.peek(Token![impl]) { let impl_trait: TypeImplTrait = in_braces.parse()?; if meta.impls.iter().any(|candidate| candidate == &impl_trait) { return Err(in_braces.error("Duplicate \"impl\" declaration")); } meta.impls.push(impl_trait); continue; } if in_braces.peek(Token![use]) { let item_use: ItemUse = in_braces.parse()?; if meta.uses.iter().any(|candidate| candidate == &item_use) { return Err(in_braces.error("Duplicate \"use\" declaration")); } meta.uses.push(item_use); continue; } return Err(in_braces.error("Type meta must by \"use a::b::c\", \"#[derive(Type1, Type2, ..)]\" or \"impl Type\"")); } types_meta = Some(meta); } "exact_entrypoint_interface" => { if exact_entrypoint_interface.is_some() { panic!("Only one `dump` can be defined") } let lit: LitBool = input.parse()?; exact_entrypoint_interface = Some(lit.value); } "dump" => { if dump.is_some() { panic!("Only one `dump` can be defined") } let dump_lit: LitBool = input.parse()?; dump = Some(dump_lit.value); } name => panic!("Unknown field name: {}", name), } if !input.is_empty() { input.parse::<Token![,]>()?; } } let shader_kind = match shader_kind { Some(shader_kind) => shader_kind, None => panic!("Please provide a shader type e.g. `ty: \"vertex\"`"), }; let source_kind = match source_kind { Some(source_kind) => source_kind, None => panic!("Please provide a source e.g. `path: \"foo.glsl\"` or `src: \"glsl source code here ...\"`") }; let dump = dump.unwrap_or(false); Ok(Self { shader_kind, source_kind, include_directories, dump, macro_defines, types_meta: types_meta.unwrap_or_else(|| TypesMeta::default()), exact_entrypoint_interface: exact_entrypoint_interface.unwrap_or(false), }) } } pub(self) fn read_file_to_string(full_path: &Path) -> IoResult<String> { let mut buf = String::new(); File::open(full_path).and_then(|mut file| file.read_to_string(&mut buf))?; Ok(buf) } #[proc_macro] pub fn shader(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as MacroInput); let root = env::var("CARGO_MANIFEST_DIR").unwrap_or(".".into()); let root_path = Path::new(&root); if let SourceKind::Bytes(path) = input.source_kind { let full_path = root_path.join(&path); let bytes = if full_path.is_file() { fs::read(full_path).expect(&format!("Error reading source from {:?}", path)) } else { panic!( "File {:?} was not found ; note that the path must be relative to your Cargo.toml", path ); }; // The SPIR-V specification essentially guarantees that // a shader will always be an integer number of words assert_eq!(0, bytes.len() % 4); codegen::reflect( "Shader", unsafe { from_raw_parts(bytes.as_slice().as_ptr() as *const u32, bytes.len() / 4) }, input.types_meta, empty(), input.exact_entrypoint_interface, input.dump, ) .unwrap() .into() } else { let (path, full_path, source_code) = match input.source_kind { SourceKind::Src(source) => (None, None, source), SourceKind::Path(path) => { let full_path = root_path.join(&path); let source_code = read_file_to_string(&full_path) .expect(&format!("Error reading source from {:?}", path)); if full_path.is_file() { (Some(path.clone()), Some(full_path), source_code) } else { panic!("File {:?} was not found ; note that the path must be relative to your Cargo.toml", path); } } SourceKind::Bytes(_) => unreachable!(), }; let include_paths = input .include_directories .iter() .map(|include_directory| { let include_path = Path::new(include_directory); let mut full_include_path = root_path.to_owned(); full_include_path.push(include_path); full_include_path }) .collect::<Vec<_>>(); let (content, includes) = match codegen::compile( path, &root_path, &source_code, input.shader_kind, &include_paths, &input.macro_defines, ) { Ok(ok) => ok, Err(e) => panic!("{}", e.replace("(s): ", "(s):\n")), }; let input_paths = includes.iter().map(|s| s.as_ref()).chain( full_path .as_ref() .map(|p| p.as_path()) .map(codegen::path_to_str), ); codegen::reflect( "Shader", content.as_binary(), input.types_meta, input_paths, input.exact_entrypoint_interface, input.dump, ) .unwrap() .into() } }
mod globals; mod player; use gdnative::prelude::*; fn init(handle: InitHandle) { handle.add_class::<globals::Globals>(); handle.add_class::<player::Player>(); } godot_gdnative_init!(); godot_nativescript_init!(init); godot_gdnative_terminate!();
#[cfg(test)] mod tests; use regex::Regex; use crate::{ web::scraping::{Find, FindNext, Html, Text}, net::url::{Url, PathError}, util::bytes, }; pub use crate::web::scraping::Error; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Metadata { // On private files, the track id is an image. It's not worth doing OCR on that. pub id: Result<Option<Box<str>>, Error>, pub size: Result<usize, Error>, // This is a compressed file, so the bitrate is always 64 kbps, but we can use it // to discover the duration. pub preview: Result<Url, Error>, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Data { Expired, Available { download: Result<Url, Error>, metadata: Metadata, }, } pub fn scrap(doc: &Html, url: &Url) -> Data { log::trace!("scraping html: {:#?}", doc); let expired = doc .find_regex("div", "File.* does not exist.* on this server") .is_ok(); if expired { Data::Expired } else { Data::Available { download: scrap_download(doc, url), metadata: Metadata { id: scrap_id(doc), size: scrap_size(doc), preview: scrap_preview(url), } } } } fn scrap_size(doc: &Html) -> Result<usize, Error> { let text = doc .find_regex("font", "Size: ?")? .find_next("font")? .text_first()?; let size = text .parse::<bytes::Mb>() .or_else( |_| Err( Error::Format( format!("invalid size: '{}'", text).into() ) ) )? .into(); log::debug!("zippy file size: {}", size); Ok(size) } fn scrap_id(doc: &Html) -> Result<Option<Box<str>>, Error> { let private_file = doc .find(r#"img[src ^= "/fileName?key="]"#) .is_ok(); if private_file { log::debug!("zippy private file"); return Ok(None); } let id = doc .find_regex("font", "Name: ?")? .find_next("font")? .text_first()? .into(); log::debug!("zippy file id: {}", id); Ok(Some(id)) } fn scrap_download(doc: &Html, url: &Url) -> Result<Url, Error> { let script = doc .find_regex("script", r#"document\.getElementById\('dlbutton'\)\.href *= *"#)? .text_first()?; log::debug!("zippy download script:\n{}", script); let code_regex = Regex ::new( r#"(?x) document\.getElementById\('dlbutton'\)\.href \s* = \s* "(?P<path1>.*?)" \s* \+ \s* \((?P<expr>.*?)\) \s* \+ \s* "(?P<path2>.*?)" "# ) .expect("invalid regex"); let captures = code_regex .captures(script) .ok_or( Error::NotFound( "download url (dlbutton)".into() ) )?; let path1 = &captures["path1"]; let path2 = &captures["path2"]; let expr = &captures["expr"]; let expr_result = fasteval ::ez_eval(&expr, &mut fasteval::EmptyNamespace) .or_else( |error| Err( Error::Format( format!("invalid math expression ({}): {:#?}", expr, error).into() ) ) )?; log::debug!("zippy download script expr result: {}", expr_result); let build_url = || -> Result<Url, PathError> { let url = url .clone() .dissect() .clear_path() .push_path(path1)? .push_path(expr_result.to_string())? .push_path( path2.trim_start_matches('/') )? .assemble(); Ok(url) }; let url = build_url() .map_err( |error| Error::Format( format!("invalid url '{}': {}", url, error).into() ) )?; log::debug!("zippy download url: {}", url); Ok(url) } fn scrap_preview(url: &Url) -> Result<Url, Error> { let mut dissected_url = url.dissect(); let key: Box<str> = dissected_url .path() .nth(1) .ok_or( Error::Format( "missing key from zippy url".into() ) )? .into(); dissected_url .push_path("/downloadMusicHQ") .map_err( |error| Error::Format( format!("invalid url '{}': {}", url, error).into() ) )? .append_query("key", &key); let preview_url = dissected_url.assemble(); log::debug!("zippy preview url: {}", preview_url); Ok(preview_url) } // TODO: write tests
macro_rules! parser { ($name:ident : $fun:ident -> $out:ty) => ( #[allow(non_camel_case_types)] pub struct $name; impl NomParser for $name { type Output = $out; fn parse2(data: &[u8])-> crate::ParseResult<Self::Output> { $fun(data).map_err(FontError::from) } } impl FixedSize for $name { const SIZE: usize = std::mem::size_of::<$out>(); } ) } macro_rules! parse_field { ($start:expr, $input:expr, ?$ptr:ident $parser:ident, $field:expr) => ({ let (i, offset) = <$ptr as NomParser>::parse2($input)?; if offset != 0 { let data = &$start[offset as usize ..]; let value = <$parser as Parser>::parse(data)?; (i, value) } else { (i, Default::default()) } }); ($start:expr, $input:expr, @ $ptr:ident $parser:ident, $field:expr) => ({ let (i, offset) = <$ptr as NomParser>::parse2($input)?; require!(offset != 0); let data = slice!($start, offset as usize ..); let value = <$parser as Parser>::parse(data)?; (i, value) }); ($start:expr, $input:expr, $parser:ident, $field:expr) => ( <$parser as NomParser>::parse2($input)? ); } macro_rules! field_size { (@ $ptr:ident $(?)* $parser:ident) => (<$ptr as FixedSize>::SIZE); ($parser:ident) => (<$parser as FixedSize>::SIZE); } macro_rules! table { ($name:ident { $( $(#[$meta:meta])* $(?$ptr_opt:ident)* $(@$ptr:ident)* $parser:ident $field:tt, )* } ) => ( #[derive(Clone, Debug)] pub struct $name { $( $(#[$meta])* pub $field: <$parser as Parser>::Output, )* } impl NomParser for $name { type Output = $name; fn parse2(input: &[u8]) -> crate::ParseResult<$name> { let i = input; $( let (i, $field) = parse_field!(input, i, $(?$ptr_opt)* $(@$ptr)* $parser, $field); )* Ok((i, $name { $( $field, )* })) } } impl FixedSize for $name { const SIZE: usize = 0 $(+ field_size!($(@$ptr_opt)* $(@$ptr)* $parser) )*; } ); }
//! Schannel is a pure-Rust wrapper to provide SSL functionality under windows by using schannel, which //! removes the requirement of openssl. #[macro_use] extern crate log; extern crate winapi; extern crate crypt32; extern crate secur32; extern crate rustc_serialize; #[cfg(feature = "hyper")] extern crate hyper; #[cfg(feature = "hyper")] pub mod hyperimpl; use std::error::Error; use std::fmt::{self, Display}; use std::io::prelude::*; use std::io::Error as IoError; use std::ptr; use std::ffi::OsStr; use std::os::windows::ffi::OsStrExt; use std::slice; use std::sync::Arc; use winapi::*; use secur32::*; use crypt32::*; use rustc_serialize::hex::{FromHex}; // TODO: General error handling and checks (if initialized for credential, stream_sizes, ...) // TODO: renegotiation, disconnect? // TODO: Manual certificate validation #[derive(Debug)] struct SchannelCertStore(*mut c_void); #[derive(Debug)] struct SchannelCertCtxt(*const winapi::wincrypt::CERT_CONTEXT); #[derive(Debug)] struct SchannelCredHandle(CredHandle); #[derive(Debug)] struct SchannelCtxtHandle(CtxtHandle); #[derive(Debug)] pub enum SslInfo { /// Configuration related to SSL clients Client(SslInfoClient), /// Configuration related to SSL servers Server(SslInfoServer) } /// SSL client wrapper configuration #[derive(Debug)] pub struct SslInfoClient { /// Whether to validate the peer certificate pub disable_peer_verification: bool, /// Whether to check for certificate revocation pub disable_revocation_check: bool, /// The allowed SSL versions pub ssl_method: SslMethod } impl SslInfoClient { /// Get defaults for client configuration pub fn new() -> SslInfoClient { return SslInfoClient { disable_peer_verification: false, disable_revocation_check: false, ssl_method: SslMethod::Tlsv1_X } } } /// SSL wrapper configuration, which only applies to SSL peers/servers #[derive(Debug)] pub struct SslInfoServer { /// The allowed SSL versions pub ssl_method: SslMethod, cert_store: Arc<SchannelCertStore>, cert_ctxt: Arc<SchannelCertCtxt> } unsafe impl Send for SslInfoServer {} unsafe impl Sync for SslInfoServer {} #[derive(Debug)] pub enum SslCertStore { /// HKEY_LOCAL_MACHINE LocalMachine, /// HKEY_CURRENT_USER CurrentUser, /// HKEY_USERS User } /// SSL certificate conditions /// used to load a certificate matching a condition from the windows certificate store pub enum SslCertCondition { /// Check if the sha1 thumbprint hash of a certificate matches a given string SHA1HashIdentical(String), /// Check if the subject name contains a given string SubjectContains(String) } /// internal, type of the value passed to the API to prevent the data from going out of scope enum SslCertConditionValue { None, U8Vector(Vec<u8>), U16Vector(Vec<u16>) } /// SSL wrapper for generic streams #[derive(Debug, Clone)] pub struct SslStream<S> { stream: S, info: Arc<SslInfo>, /// A pointer to a null-terminated string that uniquely identifies the target server (e.g. www.google.de) target_name: Option<String>, ctxt: Arc<SchannelCtxtHandle>, cred_handle: Arc<SchannelCredHandle>, stream_sizes: SecPkgContext_StreamSizes, read_buf: Vec<u8>, read_buf_raw: Vec<u8> } /// Possible errors which can occur when doing SSL operations (e.g. schannel failure) #[derive(Debug)] pub enum SslError { CertCommonNameInvalid, CertAuthorityInvalid, CertExpired, CertRevoced, CertInvalid, ProtocolError, VersionCipherMismatch, HandshakeFailedNoStreamSizes, CertificationStoreOpenFailed, CertNotFound, IoError(std::io::Error), UnknownError(i32) } #[derive(Debug)] #[allow(non_camel_case_types)] pub enum SslMethod { /// Allow any SSL version All, /// Allow every SSL version, which is atleast TLSv1 (default) Tlsv1_X, Tlsv1, Tlsv1_1, Tlsv1_2, Sslv2, Sslv3, Sslv23, Dtlsv1, Dtlsv1_2 } impl SslMethod { fn get_winapi_flags(&self) -> DWORD { match *self { SslMethod::All => SP_PROT_ALL, SslMethod::Tlsv1_X => SP_PROT_TLS1 | SP_PROT_TLS1_0 | SP_PROT_TLS1_1 | SP_PROT_TLS1_2, SslMethod::Tlsv1 => SP_PROT_TLS1_0, SslMethod::Tlsv1_1 => SP_PROT_TLS1_1, SslMethod::Tlsv1_2 => SP_PROT_TLS1_2, SslMethod::Sslv2 => SP_PROT_SSL2, SslMethod::Sslv3 => SP_PROT_SSL3, SslMethod::Sslv23 => SP_PROT_SSL2 | SP_PROT_SSL3, SslMethod::Dtlsv1 => SP_PROT_DTLS, SslMethod::Dtlsv1_2 => SP_PROT_DTLS1_X // todo: check this } } } impl Display for SslError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { SslError::UnknownError(err_code) => write!(f, "An unknown error with code({}) occurred", err_code), _ => write!(f, "{:?}", self) } } } impl Error for SslError { fn description(&self) -> &str { match *self { SslError::CertCommonNameInvalid => "server certificate error", SslError::CertAuthorityInvalid => "certificate chain error [invalid ca]", SslError::CertExpired => "certificate expired", SslError::CertRevoced => "certificate revoced", SslError::CertInvalid => "certificate invalid", SslError::ProtocolError => "internal error", SslError::VersionCipherMismatch => "insufficient security", SslError::HandshakeFailedNoStreamSizes => "received no stream information", SslError::CertificationStoreOpenFailed => "could not open the certificate store [permissions?]", SslError::CertNotFound => "certificate not found", SslError::IoError(_) => "i/o error occured", // This should never happen, since it should be handled separately (as in SslError::fmt) SslError::UnknownError(_) => "???" } } fn cause(&self) -> Option<&Error> { None } } macro_rules! map_security_error { ($x:expr) => (match $x { SEC_E_WRONG_PRINCIPAL|CERT_E_CN_NO_MATCH => SslError::CertCommonNameInvalid, SEC_E_UNTRUSTED_ROOT|CERT_E_UNTRUSTEDROOT => SslError::CertAuthorityInvalid, SEC_E_CERT_EXPIRED|CERT_E_EXPIRED => SslError::CertExpired, CRYPT_E_REVOKED => SslError::CertRevoced, SEC_E_CERT_UNKNOWN|CERT_E_ROLE => SslError::CertInvalid, // SSL Errors which we map to a protocol error SEC_E_ILLEGAL_MESSAGE | SEC_E_DECRYPT_FAILURE | SEC_E_MESSAGE_ALTERED | SEC_E_INTERNAL_ERROR => SslError::ProtocolError, // Errors which are releated to an invalid version or unsupported cipher SEC_E_UNSUPPORTED_FUNCTION| SEC_E_ALGORITHM_MISMATCH => SslError::VersionCipherMismatch, _ => SslError::UnknownError($x) }) } // Extract a value from an enum, ignoring default values and mapping to 0 pointer (since that case cannot occur when this is used) macro_rules! match_ptr_ignore { ($st:expr, $($pat:pat => $result:expr),*) => (match $st { $($pat => $result),*, // This case cannot happen since when this macro is used, only cases in $matches are possible -> silence the compiler _ => 0 as *mut _ }) } impl SslInfoServer { /// Create a new SslInfo containing the certificate loaded according to the params pub fn new(store: SslCertStore, cond: SslCertCondition) -> Result<SslInfoServer, SslError> { let store_location: DWORD = match store { SslCertStore::CurrentUser => CERT_SYSTEM_STORE_CURRENT_USER, SslCertStore::User => CERT_SYSTEM_STORE_USERS, SslCertStore::LocalMachine => CERT_SYSTEM_STORE_LOCAL_MACHINE, } | CERT_STORE_READONLY_FLAG; let mut store_name = OsStr::new("My").encode_wide().chain(Some(0)).collect::<Vec<_>>(); let handle = unsafe { CertOpenStore( CERT_STORE_PROV_SYSTEM as *mut i8, 0, 0, store_location, store_name.as_mut_ptr() as *mut c_void ) }; if handle == ptr::null_mut() { return Err(SslError::CertificationStoreOpenFailed) } let mut find_param; let mut find_param_data: SslCertConditionValue; let find_param_ptr; let find_type = match cond { SslCertCondition::SHA1HashIdentical(hash) => { find_param_data = SslCertConditionValue::U8Vector(hash.from_hex().unwrap()); let mut sha1_len: u32 = 0; let sha1_hash = match_ptr_ignore!(find_param_data, SslCertConditionValue::U8Vector(ref mut hash) => { sha1_len = hash.len() as u32; hash.as_mut_ptr() } ); find_param = CRYPT_HASH_BLOB { cbData: sha1_len, pbData: sha1_hash }; find_param_ptr = &mut find_param as *mut _ as *mut c_void; CERT_FIND_SHA1_HASH }, SslCertCondition::SubjectContains(name) => { find_param_data = SslCertConditionValue::U16Vector(OsStr::new(&name).encode_wide().chain(Some(0)).collect::<Vec<_>>()); let unicode_name = match_ptr_ignore!(find_param_data, SslCertConditionValue::U16Vector(ref mut name) => name.as_mut_ptr() ); find_param_ptr = unicode_name as *mut c_void; CERT_FIND_SUBJECT_STR } }; let ctxt = unsafe { CertFindCertificateInStore( handle, X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, 0, find_type, find_param_ptr, ptr::null_mut() ) }; if ctxt == ptr::null_mut() { return Err(SslError::CertNotFound) } return Ok(SslInfoServer { cert_store: Arc::new(SchannelCertStore(handle)), cert_ctxt: Arc::new(SchannelCertCtxt(ctxt)), ssl_method: SslMethod::Tlsv1_X }) } } /// ARC mut macro (unsafe) to fetch stored handles macro_rules! get_mut_handle( ($self_:ident, $field:ident) => { &(*$self_.$field).0 as *const SecHandle as *mut SecHandle }; ); impl<S: Read + Write> SslStream<S> { /// Instantiate a new SSL-stream pub fn new(stream: S, ssl_info: &Arc<SslInfo>) -> Result<SslStream<S>, SslError> { let ssl_stream = SslStream { stream: stream, info: ssl_info.clone(), target_name: None, stream_sizes: SecPkgContext_StreamSizes { cbHeader: 0, cbTrailer: 0, cbMaximumMessage: 0, cBuffers: 0, cbBlockSize: 0 }, read_buf: Vec::new(), read_buf_raw: Vec::new(), ctxt: Arc::new(SchannelCtxtHandle(CtxtHandle { dwLower: 0, dwUpper: 0 })), cred_handle: Arc::new(SchannelCredHandle(CredHandle { dwLower: 0, dwUpper: 0 })) }; return Ok(ssl_stream) } pub fn set_host(&mut self, host: &str) { self.target_name = Some(host.to_owned()); } pub fn get_ref(&self) -> &S { &self.stream } pub fn get_mut(&mut self) -> &mut S { &mut self.stream } fn get_credentials_handle(&mut self) -> Option<SslError> { let ssl_info = &*self.info; let mut cert_amount: DWORD = 0; let ssl_method; let mut flags = 0; let mut certs; let cert_ctxts = match ssl_info { &SslInfo::Client(ref info) => { flags = SCH_CRED_NO_DEFAULT_CREDS; if info.disable_peer_verification { flags |= SCH_CRED_MANUAL_CRED_VALIDATION; } else { flags |= SCH_CRED_AUTO_CRED_VALIDATION; } if info.disable_revocation_check { flags |= SCH_CRED_IGNORE_NO_REVOCATION_CHECK | SCH_CRED_IGNORE_REVOCATION_OFFLINE; } else { flags |= SCH_CRED_REVOCATION_CHECK_CHAIN; } ssl_method = info.ssl_method.get_winapi_flags(); ptr::null_mut() } &SslInfo::Server(ref info) => { cert_amount = 1; certs = [info.cert_ctxt.0]; ssl_method = info.ssl_method.get_winapi_flags(); certs.as_mut_ptr() as *mut *const CERT_CONTEXT } }; let mut creds = SCHANNEL_CRED { dwVersion: SCHANNEL_CRED_VERSION, grbitEnabledProtocols: ssl_method, dwFlags: flags, dwCredFormat: 0, aphMappers: ptr::null_mut(), paCred: cert_ctxts, cMappers: 0, palgSupportedAlgs: ptr::null_mut(), cSupportedAlgs: 0, dwSessionLifespan: 0, cCreds: cert_amount, dwMaximumCipherStrength: 0, hRootStore: ptr::null_mut(), dwMinimumCipherStrength: 0 }; let cred_use = match ssl_info { &SslInfo::Client(_) => SECPKG_CRED_OUTBOUND, &SslInfo::Server(_) => SECPKG_CRED_INBOUND }; let cred_handle = get_mut_handle!(self, cred_handle); let mut sec_package = OsStr::new(UNISP_NAME).encode_wide().chain(Some(0)).collect::<Vec<_>>(); let status = unsafe { secur32::AcquireCredentialsHandleW( ptr::null_mut(), sec_package.as_mut_ptr(), cred_use, ptr::null_mut(), &mut creds as *mut _ as *mut c_void, None, ptr::null_mut(), cred_handle as *mut CredHandle, ptr::null_mut() ) }; if status != SEC_E_OK { return Some(map_security_error!(status)) } return None } fn get_ssl_flags(&self) -> u32 { return ISC_REQ_SEQUENCE_DETECT | ISC_REQ_REPLAY_DETECT | ISC_REQ_CONFIDENTIALITY | ISC_REQ_ALLOCATE_MEMORY | //ISC_REQ_MANUAL_CRED_VALIDATION | ISC_REQ_STREAM; } fn do_handshake(&mut self) -> Option<SslError> { let ssl_info = &*self.info; let mut read_buffer = Vec::new(); let flags = self.get_ssl_flags(); let mut status = SEC_I_CONTINUE_NEEDED; let mut initial: bool = true; let mut do_read: bool = match ssl_info { &SslInfo::Client(_) => false, &SslInfo::Server(_) => true }; while status == SEC_I_CONTINUE_NEEDED || status == SEC_E_INCOMPLETE_MESSAGE || status == SEC_I_INCOMPLETE_CREDENTIALS { if do_read && (status == SEC_E_INCOMPLETE_MESSAGE || read_buffer.len() == 0) { let mut buf = [0; 8192]; let read_bytes = match self.stream.read(&mut buf) { Ok(x) => x, Err(_) => 0 }; // Nothing read, nothing about the state changes if read_bytes == 0 { debug!("Read nothing"); break; } read_buffer.extend(buf[..read_bytes].iter().cloned()); debug!("Reading {} bytes -> {}", read_bytes, read_buffer.len()); } // Setup input buffers, buffer 0 is used for data received from the server, leftover data will be placed in buffer 1 (with buffer type SECBUFFER_EXTRA) let mut in_buffers = [ SecBuffer { pvBuffer: &mut read_buffer[..] as *mut _ as *mut c_void, cbBuffer: read_buffer.len() as u32, BufferType: SECBUFFER_TOKEN }, SecBuffer { pvBuffer: ptr::null_mut(), cbBuffer: 0, BufferType: SECBUFFER_EMPTY } ]; let mut in_buffer_desc = SecBufferDesc { cBuffers: 2, pBuffers: &mut in_buffers[0] as *mut SecBuffer, ulVersion: SECBUFFER_VERSION }; // Setup output buffers let mut out_buffers = [ SecBuffer { pvBuffer: ptr::null_mut(), BufferType: SECBUFFER_TOKEN, cbBuffer: 0} ]; let mut out_buffer_desc = SecBufferDesc { cBuffers: 1, pBuffers: &mut out_buffers[0] as *mut SecBuffer, ulVersion: SECBUFFER_VERSION }; let mut out_flags: DWORD = 0; let ctxt = get_mut_handle!(self, ctxt); let cred_handle = get_mut_handle!(self, cred_handle); let stored_ctx = match initial { true => ptr::null_mut(), false => ctxt as *mut _ }; match ssl_info { &SslInfo::Client(_) => { status = unsafe { do_read = true; let mut target_name = ptr::null_mut(); let mut in_buffer_desc_ptr = ptr::null_mut(); if initial && self.target_name != None { target_name = OsStr::new(&self.target_name.as_mut().unwrap()).encode_wide().chain(Some(0).into_iter()).collect::<Vec<_>>().as_mut_ptr(); } else { in_buffer_desc_ptr = &mut in_buffer_desc; } secur32::InitializeSecurityContextW( cred_handle as *mut CredHandle, stored_ctx, target_name, flags, 0, 0, in_buffer_desc_ptr, 0, ctxt, &mut out_buffer_desc, &mut out_flags as *mut u32, ptr::null_mut() ) }; }, &SslInfo::Server(_) => { status = unsafe { secur32::AcceptSecurityContext( cred_handle as *mut CredHandle, stored_ctx, &mut in_buffer_desc, flags, 0, ctxt, &mut out_buffer_desc, &mut out_flags as *mut u32, ptr::null_mut() ) } } } if (status != SEC_E_OK && status != SEC_E_INVALID_TOKEN && status != SEC_I_CONTINUE_NEEDED) || ((out_flags & ISC_RET_EXTENDED_ERROR) != 0) { if !initial { continue; } else { return Some(map_security_error!(status)) } } else { initial = false; // We have some data to send to the server if out_buffers[0].cbBuffer != 0 && out_buffers[0].pvBuffer != ptr::null_mut() { debug!("--WRITING {}", out_buffers[0].cbBuffer); self.stream.write(unsafe { slice::from_raw_parts(out_buffers[0].pvBuffer as *mut u8, out_buffers[0].cbBuffer as usize) }).unwrap(); unsafe { FreeContextBuffer(out_buffers[0].pvBuffer); } } } if status == SEC_E_INCOMPLETE_MESSAGE { debug!("Incomplete; Continue"); continue; } if status == SEC_E_OK { let status = unsafe { QueryContextAttributesW(ctxt, SECPKG_ATTR_STREAM_SIZES, &mut self.stream_sizes as *mut _ as *mut c_void) }; if status != SEC_E_OK { return Some(SslError::HandshakeFailedNoStreamSizes); } debug!("-[HANDSHAKE] done {}", in_buffers[1].BufferType == SECBUFFER_EXTRA); return None } // There is extra data to be handled TODO: Handle extra data on success and this might be bugged if in_buffers[1].BufferType == SECBUFFER_EXTRA { debug!("extra data todo this is bugged"); let pos = read_buffer.len() - in_buffers[1].cbBuffer as usize; let end_pos = pos + in_buffers[1].cbBuffer as usize; read_buffer = read_buffer[pos..end_pos].to_vec(); } else { read_buffer.clear(); } } return Some(map_security_error!(status)) } /// Initialize the connection for the usage of SSL, including performing a handshake pub fn init(&mut self) -> Option<SslError> { match self.get_credentials_handle() { Some(x) => return Some(x), None => {} }; match self.do_handshake() { Some(x) => return Some(x), None => {} }; return None } } impl<S: Read + Write> Read for SslStream<S> { fn read(&mut self, dst: &mut [u8]) -> std::io::Result<usize> { // If we have some data in the buffer already just return it. if self.read_buf.len() > 0 { let mut dst_vec: Vec<u8> = Vec::new(); let iterator_len; let available_len; { available_len = self.read_buf.len(); let iterator = self.read_buf.iter().take(dst.len()); iterator_len = iterator.len(); dst_vec.extend(iterator); } // Make sure we do not read the same data multiple times if iterator_len < available_len { self.read_buf = self.read_buf[iterator_len..].to_vec(); } else { self.read_buf.clear(); } for (d, s) in dst.iter_mut().zip(dst_vec.iter()) { *d = *s; } return Ok(dst_vec.len()); } let mut dst_vec: Vec<u8> = Vec::new(); let mut data_left = dst.len(); let mut buffers = [ SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() } ]; let mut message = SecBufferDesc { ulVersion: SECBUFFER_VERSION, cBuffers: 5, pBuffers: &mut buffers[0] as *mut SecBuffer}; //TODO: maybe handle that as separate reads/more efficiently? let mut status; let ctxt = get_mut_handle!(self, ctxt); let mut buf = vec![0 as u8; 0]; loop { if data_left == 0 { break; } // If we have some raw data stored to decrypt, fetch it if self.read_buf_raw.len() > 0 { buf.extend(&self.read_buf_raw[..]); //is a .clone() necessary here? debug!("[EXTRA] read {}", self.read_buf_raw.len()); self.read_buf_raw.clear(); } else { let mut i_read_buf = vec![0 as u8; 8192]; let bytes = self.stream.read(&mut i_read_buf).unwrap(); //Error Handling TODO if bytes > 0 { buf.extend(&i_read_buf[..bytes]); } if bytes + buf.len() == 0 { //TODO: store unused buf data on break (read_buf_raw) break; } } buffers[0].pvBuffer = buf.as_mut_ptr() as *mut c_void; buffers[0].cbBuffer = buf.len() as u32; buffers[0].BufferType = SECBUFFER_DATA; buffers[1].BufferType = SECBUFFER_EMPTY; buffers[2].BufferType = SECBUFFER_EMPTY; buffers[3].BufferType = SECBUFFER_EMPTY; buffers[4].BufferType = SECBUFFER_EMPTY; unsafe { status = DecryptMessage(ctxt as *mut SecHandle, &mut message as *mut SecBufferDesc, 0, ptr::null_mut()); debug!("decrypt status: {} -> {}", buf.len(), status); // Store extra data (not decrypted yet = raw), if available if status == SEC_E_INCOMPLETE_MESSAGE { continue; } else if status != SEC_E_OK { return Err(IoError::new(std::io::ErrorKind::Other, format!("DecryptMessage failed with status {}", status))); } buf.clear(); let mut expecting_more = false; match buffers.iter().find(|&buf| buf.BufferType == SECBUFFER_EXTRA) { Some(extra_buf) => { let extra_buf = std::slice::from_raw_parts(extra_buf.pvBuffer as *mut u8, extra_buf.cbBuffer as usize); debug!("[EXTRA] store {}", extra_buf.len()); self.read_buf_raw.extend(extra_buf); expecting_more = true; }, None => () } // Store decrypted data match buffers.iter().find(|&buf| buf.BufferType == SECBUFFER_DATA) { Some(data_buffer) => { debug!("data length: {}", data_buffer.cbBuffer); let data_buffer = std::slice::from_raw_parts(data_buffer.pvBuffer as *mut u8, data_buffer.cbBuffer as usize); let iterator = data_buffer.iter().take(data_left); let iterator_len = iterator.len(); dst_vec.extend(iterator); data_left -= iterator_len; // store additional decrypted data if data_buffer.len() > iterator_len { self.read_buf.extend(data_buffer.iter().skip(iterator_len)); debug!("read_buf: {} bytes", self.read_buf.len()); } //println!("\n\nContent ({}) \n\n{}", iterator_len, std::str::from_utf8(&dst[..dst.len()-data_left]).unwrap()); buf.clear(); }, None => { debug!("No data buffer, incomplete: {}", status == SEC_E_INCOMPLETE_MESSAGE) } }; if !expecting_more { break; } } } if dst_vec.len() == 0 { return Ok(0) } debug!("conv_len: {}/{} ({})", dst_vec.len(), dst.len(), data_left); // Copy vector into output slice for (d, s) in dst.iter_mut().zip(dst_vec.iter()) { *d = *s; } //println!("req {}", std::str::from_utf8(dst).unwrap()); Ok(dst_vec.len()) } } impl<S: Read + Write> Write for SslStream<S> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { let mut buffers = [ SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() }, SecBuffer { BufferType: SECBUFFER_EMPTY, cbBuffer: 0, pvBuffer: ptr::null_mut() } ]; let mut message = SecBufferDesc { ulVersion: SECBUFFER_VERSION, cBuffers: 5, pBuffers: &mut buffers[0] as *mut SecBuffer }; if self.stream_sizes.cbHeader == 0 { return Err(IoError::new(std::io::ErrorKind::Other, "SSLStream doesn't seem initialized. Maybe you forgot to call .init?")); } let mut buffer = vec![0 as u8; self.stream_sizes.cbHeader as usize]; buffer.extend(buf.iter().cloned()); buffer.extend(vec![0 as u8; self.stream_sizes.cbTrailer as usize + self.stream_sizes.cbMaximumMessage as usize - buf.len()]); let mut ptr: *mut u8 = buffer.as_mut_ptr(); buffers[0].pvBuffer = ptr as *mut c_void; buffers[0].cbBuffer = self.stream_sizes.cbHeader; buffers[0].BufferType = SECBUFFER_STREAM_HEADER; ptr = (ptr as usize + self.stream_sizes.cbHeader as usize) as *mut u8; buffers[1].pvBuffer = ptr as *mut c_void; buffers[1].cbBuffer = buf.len() as u32; buffers[1].BufferType = SECBUFFER_DATA; ptr = (ptr as usize + buf.len()) as *mut u8; buffers[2].pvBuffer = ptr as *mut c_void; buffers[2].cbBuffer = self.stream_sizes.cbTrailer; buffers[2].BufferType = SECBUFFER_STREAM_TRAILER; buffers[3].BufferType = SECBUFFER_EMPTY; buffers[4].BufferType = SECBUFFER_EMPTY; let ctxt = get_mut_handle!(self, ctxt); //TODO: Respect stream_sizes.cbMaximumMessage (encryption length limit) unsafe { let status = EncryptMessage(ctxt as *mut SecHandle, 0, &mut message as *mut SecBufferDesc, 0); if status == SEC_E_OK { let len = buffers[0].cbBuffer as usize + buffers[1].cbBuffer as usize + buffers[2].cbBuffer as usize; debug!("Encrypted {}. Sending. {}", len, buffers[3].BufferType == SECBUFFER_EMPTY); self.stream.write(&buffer[..len]).unwrap(); } } Ok(buf.len()) } #[inline] fn flush(&mut self) -> std::io::Result<()> { self.stream.flush() } } impl Drop for SchannelCredHandle { fn drop(&mut self) { unsafe { assert!(FreeCredentialsHandle(&mut self.0 as *mut CredHandle) == SEC_E_OK); } } } impl Drop for SchannelCtxtHandle { fn drop(&mut self) { unsafe { assert!(DeleteSecurityContext(&mut self.0 as *mut CtxtHandle) == SEC_E_OK); } } } impl Drop for SchannelCertStore { fn drop(&mut self) { unsafe { assert!(CertCloseStore(self.0, 0) == 1); } } } impl Drop for SchannelCertCtxt { fn drop(&mut self) { unsafe { CertFreeCertificateContext(self.0); } } }