text
stringlengths
8
4.13M
use crate::thick_2_ofn::class_translation as class_translation; use crate::owl::typing as owl; use serde_json::{Value}; pub fn translate_subclass_of_axiom(sub: &str, sup: &str) -> Value { let subclass: owl::OWL = serde_json::from_str(sub).unwrap(); let superclass: owl::OWL = serde_json::from_str(sup).unwrap(); let lhs : Value = class_translation::translate(&subclass); let rhs: Value = class_translation::translate(&superclass); let operator = Value::String(String::from("SubClassOf")); let v = vec![operator, lhs, rhs]; Value::Array(v) } pub fn translate_equivalent_class(sub: &str, sup: &str) -> Value { let subject: owl::OWL = serde_json::from_str(sub).unwrap(); let object: owl::OWL = serde_json::from_str(sup).unwrap(); let lhs : Value = class_translation::translate(&subject); let mut rhs: Value = class_translation::translate(&object); match object { owl::OWL::RDFList(_) => { let operator = Value::String(String::from("EquivalentClasses")); let mut equivalent = vec![operator]; let arguments = rhs.as_array_mut().unwrap(); //equivalent.push(lhs); //LHS is a (generated) blank node equivalent.append(arguments); Value::Array(equivalent.to_vec()) }, _ => { let operator = Value::String(String::from("EquivalentClasses")); let v = vec![operator, lhs, rhs]; Value::Array(v) }, } } pub fn translate_disjoint_classes(ops: &str) -> Value { let operands : owl::OWL = serde_json::from_str(ops).unwrap(); let mut arguments: Value = class_translation::translate(&operands); let operator = Value::String(String::from("DisjointClasses")); let mut disjoint = vec![operator]; let arguments = arguments.as_array_mut().unwrap(); disjoint.append(arguments); Value::Array(disjoint.to_vec()) } pub fn translate_disjoint_with(lhs: &str, rhs: &str) -> Value { let l: owl::OWL = serde_json::from_str(lhs).unwrap(); let r: owl::OWL = serde_json::from_str(rhs).unwrap(); let lhs : Value = class_translation::translate(&l); let rhs: Value = class_translation::translate(&r); let operator = Value::String(String::from("DisjointClasses")); let v = vec![operator, lhs, rhs]; Value::Array(v) } pub fn translate_disjoint_union(u: &str, ops: &str) -> Value { let union: owl::OWL = serde_json::from_str(u).unwrap(); let operands: owl::OWL = serde_json::from_str(ops).unwrap(); let lhs : Value = class_translation::translate(&union); let mut rhs: Value = class_translation::translate(&operands); let operator = Value::String(String::from("DisjointUnionOf")); let mut union = vec![operator]; union.push(lhs); let arguments = rhs.as_array_mut().unwrap(); union.append(arguments); Value::Array(union.to_vec()) } pub fn translate_thin_triple(v : &Value) -> Value { let s = v["subject"].as_str().unwrap(); let p = v["predicate"].as_str().unwrap(); let o = v["object"].as_str().unwrap(); let subject = Value::String(String::from(s)); let predicate = Value::String(String::from(p)); let object = Value::String(String::from(o)); let operator = Value::String(String::from("ThinTriple")); let v = vec![operator, subject, predicate, object]; Value::Array(v) }
fn main() { println!("Hello, world!"); } fn parse(code: &str) -> Vec<i32> { let mut r = 0; let mut ret = Vec::new(); for c in code.chars() { match c { 'i' => r += 1, 'd' => r -= 1, 's' => r *= r, 'o' => ret.push(r), _ => (), } } ret } #[test] fn sample_tests() { assert_eq!(parse("iiisdoso"), vec![8, 64]); assert_eq!(parse("iiisdosodddddiso"), vec![8, 64, 3600]); }
use std::fmt::Display; use std::io::prelude::*; use std::process::{Command, Stdio}; use anyhow::Context; use which::which; pub fn is_available() -> bool { !which("fzf").is_err() } pub fn select<I, D>(items: I) -> anyhow::Result<Option<String>> where I: IntoIterator<Item = D>, D: Display, { let mut child = Command::new("fzf") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn() .context("Could not spawn fzf")?; let stdin = child.stdin.as_mut().expect("stdin not available"); for item in items { writeln!(stdin, "{}", item)?; } child.wait().context("fzf run failure")?; let mut line = String::new(); child .stdout .expect("stdout not available") .read_to_string(&mut line) .context("could not read output from fzf")?; let line = line.trim(); if line.is_empty() { Ok(None) } else { Ok(Some(line.to_string())) } }
pub mod lru; pub mod fifo;
use std::cell::RefCell; use std::rc::Rc; use super::windows::*; use super::*; pub struct Gui { queue: Rc<MessageQueue<super::Request>>, container: Rc<RefCell<Container>>, events: EventProcessor, commands: CommandProcessor, state: Rc<State>, } impl Gui { pub fn new(config: Config) -> Self { let queue = Rc::new(MessageQueue::new()); let state = Rc::new(State::new(Rc::clone(&queue), Rc::new(RefCell::new(config)))); let container = Rc::new(RefCell::new(Container::new(Rc::clone(&state)))); let events = EventProcessor::new(Rc::clone(&state), Rc::clone(&queue), Rc::clone(&container)); let commands = CommandProcessor::new(Rc::clone(&state), Rc::clone(&queue)); Self { queue, container, events, commands, state, } } pub fn run(&mut self) { loop { match { let s = self.container.borrow_mut().step(); s.clone() } { windows::ReadType::Line(line) => { if let Err(err) = self.commands.dispatch(&line) { debug!("command error: {:?}", err); match err { // TODO output-ize this ui::Error::InvalidArgument(s) | ui::Error::InvalidBuffer(s) => { trace!("{:?}", ui::Output::new().add(s).build()) } ui::Error::ClientError(err) => { let output = ui::Output::new() .fg(ui::Color::Red) .add("error: ") .add("irc client error: ") .fg(ui::Color::Cyan) .add(format!("{:?}", err)) .build(); trace!("{:?}", output); } ui::Error::AlreadyConnected => { let output = ui::Output::new() .fg(ui::Color::Red) .add("error: ") .add("already connected") .build(); trace!("{:?}", output); } ui::Error::NotConnected => { let output = ui::Output::new() .fg(ui::Color::Red) .add("error: ") .add("not connected") .build(); trace!("{:?}", output); } ui::Error::ForceExit => break, _ => error!("unknown error: {:?}", err), } }; { self.container.borrow_mut().add_and_clear(); } } // TODO merge this stuff windows::ReadType::FKey(key) if key == pancurses::Input::KeyF10 => break, windows::ReadType::FKey(key) => trace!("fkey: {:?}", key), windows::ReadType::None => if !self.read_buffers() { debug!("resetting the state"); // flush the queue before clearing self.events.process(); // wipe out the state self.state.reset(); self.state.buffers().activate(0); }, } } } fn read_buffers(&mut self) -> bool { if !self.read_errors() { return false; } self.events.process(); true } fn read_errors(&mut self) -> bool { if let Some(errors) = self.state.read_errors() { if let Some(err) = errors.try_recv() { let output = ui::Output::new() .fg(ui::Color::Red) .add("error: ") .add("irc c;ient error ") .fg(ui::Color::Cyan) .add(format!("{:?}", err)) .build(); trace!("{:?}", output); return false; } }; true } }
// Generates a set of points on the map use std::collections::*; use std::cmp::*; use fourthrail::*; fn is_on_map(coord: &Coord) -> bool { let (r, c) = *coord; r >= 0 && r < MAP_HEIGHT && c >= 0 && c < MAP_WIDTH } pub fn circle(centre: &Coord, rad: i32) -> Coords { let (r0, c0) = *centre; let mut result: Coords = BTreeSet::new(); for r in max((r0 - rad), 0)..min((r0 + 1 + rad), MAP_HEIGHT) { for c in max((c0 - rad), 0)..min((c0 + 1 + rad), MAP_WIDTH) { let dr = ((r - r0) as f64).abs() - 0.4; let dc = ((c - c0) as f64).abs() - 0.4; if (dr * dr) + (dc * dc) <= (rad * rad) as f64 { result.insert((r, c)); } } } result } pub fn line(start: &Coord, end: &Coord) -> Coords { let (mut r, mut c) = *start; let (r1, c1) = *end; let mut result: Coords = BTreeSet::new(); let dr = r1 - r; let dc = c1 - c; let mut e = 0.0; result.insert((r, c)); if dr.abs() > dc.abs() { let es = (dc as f64 / dr as f64).abs(); while r != r1 { e += es; if e > 0.5 { c += if dc > 0 {1} else {-1}; e -= 1.0; } r += if dr > 0 {1} else {-1}; result.insert((r, c)); } } else { let es = (dr as f64 / dc as f64).abs(); while c != c1 { e += es; if e > 0.5 { r += if dr > 0 {1} else {-1}; e -= 1.0; } c += if dc > 0 {1} else {-1}; result.insert((r, c)); } } result }
// Sprite data pub const PLAYER_SPRITE_PATH: &str = "assets/Bullethellplayer.png"; pub const PLAYER_SPRITE_ID: usize = 0; pub const BOSS_SPRITE_PATH: &str = "assets/BullethellBoss.png"; pub const BOSS_SPRITE_ID: usize = 1; pub const BULLET_SPRITE_PATH: &str = "assets/bullet.png"; pub const BULLET_SPRITE_ID: usize = 2; pub const EXPLOSION_SPRITE_PATH: &str = "assets/explo.png"; pub const EXPLOSION_SPRITE_ID: usize = 3; // tweak data pub const BULLET_SIZE: i32 = 16;
use std::fs; use std::collections::VecDeque; use permute::permutations_of; fn get_modes(modes_num: usize) -> Vec<bool> { (0..3).map(|i| {(modes_num / 10_usize.pow(i) % 10_usize.pow(i+1)) == 1}).collect() } fn execute(mut program: Vec<isize>, mut i: usize, mut input: VecDeque<isize>) -> ((Vec<isize>, usize, VecDeque<isize>), Option<isize>) { loop { let modes: Vec<bool> = get_modes((program[i]/100) as usize); match program[i]%100 { 1 => { let op1 = if modes[0] {program[i+1]} else {program[program[i+1] as usize]}; let op2 = if modes[1] {program[i+2]} else {program[program[i+2] as usize]}; let position = program[i+3] as usize; program[position] = op1 + op2; i += 4; }, 2 => { let op1 = if modes[0] {program[i+1]} else {program[program[i+1] as usize]}; let op2 = if modes[1] {program[i+2]} else {program[program[i+2] as usize]}; let position = program[i+3] as usize; program[position] = op1 * op2; i += 4; } 3 => { let position = program[i+1] as usize; program[position] = input.pop_front().unwrap(); i += 2; } 4 => { let op1 = if modes[0] {program[i+1]} else {program[program[i+1] as usize]}; i += 2; return ((program, i, input), Some(op1)); } 5 => { let op1 = if modes[0] {program[i+1]} else {program[program[i+1] as usize]}; let op2 = if modes[1] {program[i+2]} else {program[program[i+2] as usize]}; i = if op1 != 0 { op2 as usize } else { i + 3 } } 6 => { let op1 = if modes[0] {program[i+1]} else {program[program[i+1] as usize]}; let op2 = if modes[1] {program[i+2]} else {program[program[i+2] as usize]}; i = if op1 == 0 { op2 as usize } else { i + 3 } } 7 => { let op1 = if modes[0] {program[i+1]} else {program[program[i+1] as usize]}; let op2 = if modes[1] {program[i+2]} else {program[program[i+2] as usize]}; let position = program[i+3] as usize; program[position] = if op1 < op2 { 1 } else { 0 }; i += 4; } 8 => { let op1 = if modes[0] {program[i+1]} else {program[program[i+1] as usize]}; let op2 = if modes[1] {program[i+2]} else {program[program[i+2] as usize]}; let position = program[i+3] as usize; program[position] = if op1 == op2 { 1 } else { 0 }; i += 4; } 99 => { return ((program, i, input), None); } _ => { panic!() } } } } fn part1(program: Vec<isize>) -> isize { let phases: Vec<isize> = vec![0,1,2,3,4]; permutations_of(&phases) .map(|setting| { let mut stage_var = 0; for i in setting { let input = VecDeque::from(vec![*i, stage_var]); let result = execute(program.clone(), 0, input); stage_var = result.1.unwrap() } stage_var }).max().unwrap_or(0) } fn part2(program: Vec<isize>) -> isize { let phases: Vec<isize> = vec![5,6,7,8,9]; permutations_of(&phases) .map(|setting| { let mut states: Vec<(Vec<isize>, usize, VecDeque<isize>)> = setting.map(|phase| (program.clone(), 0 as usize, VecDeque::from(vec![*phase]))).collect(); let (mut i, mut output) = (0, 0); loop { states[i%5].2.push_back(output); let result = execute(states[i%5].0.clone(), states[i%5].1, states[i%5].2.clone()); if result.1 == None { return output; } states[i%5] = result.0.clone(); output = result.1.unwrap(); i += 1; } }).max().unwrap_or(0) } fn main() { let contents = fs::read_to_string("input.txt").expect("Could not read file"); let program : Vec<isize> = contents.split(",").map(|i| i.parse::<isize>().unwrap()).collect(); println!("{}", part1(program.clone())); println!("{}", part2(program.clone())); }
// 2019-04-05 // Les fermetures (closures) sont capable de capturer leur environnement et // d'accéder à des variables du scope où elles sont définies fn main() { let x = 4; // Définition de la fermeture. // dans z == x, x n'est pas une variable, la fermeture a le droit de l'utiliser let egal_a_x = |z| z == x; let y = 4; assert!(egal_a_x(y)); }
use glam::{vec3, Mat4}; use macroquad::*; #[derive(Clone, Copy)] pub struct CedCam2D { pub zoom: f32, pub flip_x: bool, pub iso: na::Isometry2<f32>, } impl Default for CedCam2D { fn default() -> CedCam2D { CedCam2D { zoom: 10.0, flip_x: false, iso: na::Isometry2::identity(), } } } impl CedCam2D { pub fn with_zoom(zoom: f32) -> Self { CedCam2D { zoom, ..Default::default() } } /// Returns the screen space position for a 2D camera world space position pub fn world_to_screen(&self, point: na::Vector2<f32>) -> na::Vector2<f32> { let mat = self.matrix().inverse(); let transform = mat.transform_point3(vec3(point.x, point.y, 0.0)); na::Vector2::new(transform.x(), transform.y()) } // Returns the world space position for a 2D camera screen space position pub fn screen_to_world(&self, point: na::Vector2<f32>) -> na::Vector2<f32> { let inv_mat = self.matrix(); let transform = inv_mat.transform_point3(vec3(point.x, point.y, 0.0)); na::Vector2::new(transform.x(), transform.y()) } fn scale_matrix(&self) -> glam::Mat4 { let Self { zoom, .. } = *self; let (w, h) = (screen_width(), screen_height()); Mat4::from_scale(vec3(1.0, -(w / h), 1.0) / zoom) } } impl Camera for CedCam2D { fn matrix(&self) -> glam::Mat4 { self.scale_matrix() * Mat4::from_translation(vec3( self.iso.translation.vector.x, self.iso.translation.vector.y, 0.0, )) * Mat4::from_axis_angle(vec3(0.0, 0.0, 1.0), self.iso.rotation.angle()) * Mat4::from_scale(vec3(if self.flip_x { -1.0 } else { 1.0 }, 1.0, 1.0)) } fn depth_enabled(&self) -> bool { false } fn render_pass(&self) -> Option<miniquad::RenderPass> { None } }
use super::eval::*; use big_s::S; use itertools::Itertools; use std::collections::HashMap; pub fn make_global_env() -> HashMap<String, Value> { let mut env = HashMap::new(); env.insert( S("print"), Value::Callable(|values| { for value in values.iter() { println!("{}", value); } Ok(last_or_nil(values)) }), ); env.insert( S("exit"), Value::Callable(|values| { let code = values.into_iter().last().unwrap_or(Value::Number(0)); std::process::exit(code.into_num() as i32) }), ); env.insert( S("begin"), Value::Callable(|values| Ok(last_or_nil(values))), ); env.insert( S("+"), Value::Callable(|values| { Ok(Value::Number( values.iter().map(|n| n.clone().into_num()).sum(), )) }), ); env.insert( S("*"), Value::Callable(|values| { Ok(Value::Number( values.iter().map(|n| n.clone().into_num()).product(), )) }), ); env.insert( S("-"), Value::Callable(|values| { Ok(Value::Number( if let Some((first, rest)) = values.split_first() { let first = first.clone().into_num(); if rest.is_empty() { -first } else { rest.iter().fold(first, |n, m| n - m.clone().into_num()) } } else { 0 }, )) }), ); env.insert( S("/"), Value::Callable(|values| { if let Some((first, rest)) = values.split_first() { let first = first.clone().into_num(); Ok(Value::Number(if rest.is_empty() { 1 / first } else { rest.iter().fold(first, |n, m| n / m.clone().into_num()) })) } else { Err(EvalError(S("Wrong number of arguments: /, 0"))) } }), ); env.insert( S("="), Value::Callable(|values| { let first = values.first().unwrap().clone().into_num(); Ok(if values.iter().any(|x| x.clone().into_num() != first) { Value::Nil } else { Value::Number(1) }) }), ); env.insert(S("eq"), env["="].clone()); env.insert( S("!"), Value::Callable(|values| match values.len() { 1 => Ok(if values.first().unwrap().is_truthy() { Value::Nil } else { Value::Number(1) }), n if n > 1 => Err(EvalError(S("too many arguments given to NOT"))), _ => Err(EvalError(S("too few arguments givien to NOT"))), }), ); env.insert(S("not"), env["!"].clone()); env.insert( S("<"), Value::Callable(|values| { let vs = Values::from(values); Ok(if let Some(v) = vs.to_tuples() { if v.iter() .filter(|(a, b)| !(a.clone().into_num() < b.clone().into_num())) .collect::<Vec<_>>() .is_empty() { Value::Number(1) } else { Value::Nil } } else { Value::Nil }) }), ); env.insert( S(">"), Value::Callable(|values| { let vs = Values::from(values); Ok(if let Some(v) = vs.to_tuples() { if v.iter() .filter(|(a, b)| !(a.clone().into_num() > b.clone().into_num())) .collect::<Vec<_>>() .is_empty() { Value::Number(1) } else { Value::Nil } } else { Value::Nil }) }), ); env.insert( S("<="), Value::Callable(|values| { let vs = Values::from(values); Ok(if let Some(v) = vs.to_tuples() { if v.iter() .filter(|(a, b)| !(a.clone().into_num() <= b.clone().into_num())) .collect::<Vec<_>>() .is_empty() { Value::Number(1) } else { Value::Nil } } else { Value::Nil }) }), ); env.insert( S(">="), Value::Callable(|values| { let vs = Values::from(values); Ok(if let Some(v) = vs.to_tuples() { if v.iter() .filter(|(a, b)| !(a.clone().into_num() >= b.clone().into_num())) .collect::<Vec<_>>() .is_empty() { Value::Number(1) } else { Value::Nil } } else { Value::Nil }) }), ); env.insert( S("list"), Value::Callable(|values| { if values.len() > 1 { let (first, rest) = values.split_first().unwrap(); let mut ret = Cons::new(first.clone(), Value::Nil); for value in rest { ret.append(value.clone()); } Ok(Value::Cons(ret)) } else { Ok(Value::Cons(Cons::new( values.first().unwrap().clone(), Value::Nil, ))) } }), ); env.insert( S("cons"), Value::Callable(|values| { if let Some((a, b)) = values.iter().next_tuple() { Ok(Value::Cons(Cons::new(a.clone(), b.clone()))) } else { Ok(Value::Nil) } }), ); env.insert( S("car"), Value::Callable(|values| match values.first() { Some(Value::Cons(cons)) => Ok(cons.clone().car()), _ => Err(EvalError(S("Wrong argument type: car require cons"))), }), ); env.insert( S("cdr"), Value::Callable(|values| match values.first() { Some(Value::Cons(cons)) => Ok(cons.clone().cdr()), _ => Err(EvalError(S("Wrong argument type: car require cons"))), }), ); env.insert(S("T"), Value::Number(1)); env.insert(S("t"), Value::Number(1)); env } fn last_or_nil(values: Vec<Value>) -> Value { values.last().cloned().unwrap_or(Value::Nil) } struct Values(Vec<Value>); impl Values { pub fn from(v: Vec<Value>) -> Self { Self(v) } pub fn to_tuples(&self) -> Option<Vec<(Value, Value)>> { if self.0.len() < 2 { None } else { let mut ret = Vec::new(); for (i, v) in self.0.iter().enumerate() { match self.0.iter().nth(i + 1) { Some(n) => ret.push((v.clone(), n.clone())), None => break, } } Some(ret) } } }
use structopt::StructOpt; use structopt::clap::{App, Shell}; use crate::errors::AppResultU; #[derive(Debug, StructOpt)] pub struct Opt { /// Shell shell: Shell, /// Output to directory: String, } pub fn generate(opt: Opt, mut app: App) -> AppResultU { app.gen_completions(env!("CARGO_PKG_NAME"), opt.shell, &opt.directory); Ok(()) }
use std::cmp::Ordering; use crate::Ray; use crate::*; pub struct Metal { albedo: Vector3, fuzz: f32, } impl Metal { pub fn new(albedo: &Vector3, fuzz: f32) -> Metal { Metal { albedo: *albedo, fuzz, } } } impl Material for Metal { fn scatter(&self, ray_in: &Ray, hit_record: &HitRecord) -> Option<(Vector3, Ray)> { let direction = &(&ray_in.direction().reflect(&hit_record.normal) + &(self.fuzz * &Random::gen::<Vector3>())) .normalized(); match direction.dot(&hit_record.normal).partial_cmp(&0.0f32) { None => None, Some(ordering) => match ordering { Ordering::Greater => Some(( self.albedo, Ray::new(&hit_record.position, direction, ray_in.time()), )), _ => None, }, } } }
$NetBSD: patch-src_tools_cargo_tests_testsuite_build.rs,v 1.11 2023/01/23 18:49:04 he Exp $ Don't attempt incremental operations on sparc64, ref. https://sources.debian.org/patches/cargo/0.29.0-1/2007_sparc64_disable_incremental_build.patch/ --- src/tools/cargo/tests/testsuite/build.rs.orig 2019-08-13 06:27:40.000000000 +0000 +++ src/tools/cargo/tests/testsuite/build.rs @@ -47,6 +47,7 @@ fn cargo_fail_with_no_stderr() { /// Checks that the `CARGO_INCREMENTAL` environment variable results in /// `rustc` getting `-C incremental` passed to it. +#[cfg(not(target_arch = "sparc64"))] #[cargo_test] fn cargo_compile_incremental() { let p = project() @@ -69,6 +70,7 @@ fn cargo_compile_incremental() { .run(); } +#[cfg(not(target_arch = "sparc64"))] #[cargo_test] fn incremental_profile() { let p = project() @@ -111,6 +113,7 @@ fn incremental_profile() { .run(); } +#[cfg(not(target_arch = "sparc64"))] #[cargo_test] fn incremental_config() { let p = project()
#[doc = "Register `APB3FZR` reader"] pub type R = crate::R<APB3FZR_SPEC>; #[doc = "Register `APB3FZR` writer"] pub type W = crate::W<APB3FZR_SPEC>; #[doc = "Field `DBG_I3C2_STOP` reader - I3C2 SCL stall counter stop in debug"] pub type DBG_I3C2_STOP_R = crate::BitReader; #[doc = "Field `DBG_I3C2_STOP` writer - I3C2 SCL stall counter stop in debug"] pub type DBG_I3C2_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DBG_LPTIM1_STOP` reader - LPTIM1 stop in debug"] pub type DBG_LPTIM1_STOP_R = crate::BitReader; #[doc = "Field `DBG_LPTIM1_STOP` writer - LPTIM1 stop in debug"] pub type DBG_LPTIM1_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `DBG_RTC_STOP` reader - RTC stop in debug"] pub type DBG_RTC_STOP_R = crate::BitReader; #[doc = "Field `DBG_RTC_STOP` writer - RTC stop in debug"] pub type DBG_RTC_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 12 - I3C2 SCL stall counter stop in debug"] #[inline(always)] pub fn dbg_i3c2_stop(&self) -> DBG_I3C2_STOP_R { DBG_I3C2_STOP_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 17 - LPTIM1 stop in debug"] #[inline(always)] pub fn dbg_lptim1_stop(&self) -> DBG_LPTIM1_STOP_R { DBG_LPTIM1_STOP_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 30 - RTC stop in debug"] #[inline(always)] pub fn dbg_rtc_stop(&self) -> DBG_RTC_STOP_R { DBG_RTC_STOP_R::new(((self.bits >> 30) & 1) != 0) } } impl W { #[doc = "Bit 12 - I3C2 SCL stall counter stop in debug"] #[inline(always)] #[must_use] pub fn dbg_i3c2_stop(&mut self) -> DBG_I3C2_STOP_W<APB3FZR_SPEC, 12> { DBG_I3C2_STOP_W::new(self) } #[doc = "Bit 17 - LPTIM1 stop in debug"] #[inline(always)] #[must_use] pub fn dbg_lptim1_stop(&mut self) -> DBG_LPTIM1_STOP_W<APB3FZR_SPEC, 17> { DBG_LPTIM1_STOP_W::new(self) } #[doc = "Bit 30 - RTC stop in debug"] #[inline(always)] #[must_use] pub fn dbg_rtc_stop(&mut self) -> DBG_RTC_STOP_W<APB3FZR_SPEC, 30> { DBG_RTC_STOP_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "DBGMCU APB3 peripheral freeze register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`apb3fzr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`apb3fzr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct APB3FZR_SPEC; impl crate::RegisterSpec for APB3FZR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`apb3fzr::R`](R) reader structure"] impl crate::Readable for APB3FZR_SPEC {} #[doc = "`write(|w| ..)` method takes [`apb3fzr::W`](W) writer structure"] impl crate::Writable for APB3FZR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets APB3FZR to value 0"] impl crate::Resettable for APB3FZR_SPEC { const RESET_VALUE: Self::Ux = 0; }
use anyhow::Context; use chrono::{DateTime, Utc}; use drogue_cloud_console_common::UserInfo; use http::{Response, Uri}; use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; use std::{sync::RwLock, time::Duration}; use url::Url; use yew::{format::Text, prelude::*, services::fetch::*, utils::window}; /// Backend information #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct BackendInformation { pub url: Url, #[serde(default)] pub login_note: Option<String>, } impl BackendInformation { pub fn url<S: AsRef<str>>(&self, path: S) -> Url { let mut result = self.url.clone(); result.set_path(path.as_ref()); result } pub fn uri<S: AsRef<str>>(&self, path: S) -> Uri { self.url(path).to_string().parse().unwrap() } pub fn url_str<S: AsRef<str>>(&self, path: S) -> String { self.url(path).into() } pub fn request<S, IN, OUT: 'static>( &self, method: http::Method, path: S, payload: IN, headers: Vec<(&str, &str)>, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { self.request_with(method, path, payload, headers, Default::default(), callback) } pub fn request_with<S, IN, OUT: 'static>( &self, method: http::Method, path: S, payload: IN, headers: Vec<(&str, &str)>, options: RequestOptions, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { let request = http::request::Builder::new() .method(method) .uri(self.uri(path)); let token = match Backend::access_token() { Some(token) => token, None => { if !options.disable_reauth { Backend::reauthenticate().ok(); return Err(anyhow::anyhow!("Performing re-auth")); } return Err(anyhow::anyhow!("Missing token")); } }; let mut request = request.header("Authorization", format!("Bearer {}", token)); for (k, v) in headers { request = request.header(k, v); } let request = request.body(payload).context("Failed to create request")?; let task = FetchService::fetch_with_options( request, FetchOptions { cache: Some(Cache::NoCache), credentials: Some(Credentials::Include), redirect: Some(Redirect::Follow), mode: Some(Mode::Cors), ..Default::default() }, callback.reform(move |response: Response<_>| { log::info!("Backend response code: {}", response.status().as_u16()); match response.status().as_u16() { 401 | 403 | 408 if !options.disable_reauth => { // 408 is "sent" by yew if the request fails, which it does when CORS is in play Backend::reauthenticate().ok(); } _ => {} }; response }), ) .map_err(|err| anyhow::anyhow!("Failed to fetch: {:?}", err))?; Ok(task) } } #[derive(Clone, Debug, Default)] pub struct RequestOptions { pub disable_reauth: bool, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Backend { pub info: BackendInformation, token: Option<Token>, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Token { pub access_token: String, pub expires: Option<DateTime<Utc>>, pub id_token: String, pub refresh_token: Option<String>, pub userinfo: Option<UserInfo>, } impl Token { pub fn is_expired(&self) -> bool { self.valid_for() .map_or(false, |timeout| timeout.as_secs() < 30) } pub fn valid_for(&self) -> Option<Duration> { self.expires .map(|expires| expires.signed_duration_since(Utc::now())) .and_then(|expires| expires.to_std().ok()) } pub fn if_valid(&self) -> Option<&Self> { if self.is_expired() { None } else { Some(self) } } } static CONSOLE_BACKEND: Lazy<RwLock<Option<Backend>>> = Lazy::new(|| RwLock::new(None)); impl Backend { /// Return the backend endpoint, or [`Option::None`]. pub fn get() -> Option<Backend> { CONSOLE_BACKEND.read().unwrap().clone() } pub fn url<S: AsRef<str>>(path: S) -> Option<Url> { Self::get().map(|backend| backend.info.url(path)) } #[allow(dead_code)] pub fn uri<S: AsRef<str>>(path: S) -> Option<Uri> { Self::get().map(|backend| backend.info.uri(path)) } pub fn url_str<S: AsRef<str>>(path: S) -> Option<String> { Self::get().map(|backend| backend.info.url_str(path)) } /// Get the access token, if it is not expired yet pub fn access_token() -> Option<String> { Self::get() .and_then(|b| b.token) .as_ref() .and_then(|t| t.if_valid()) .map(|token| token.access_token.clone()) } /// Get full token information pub fn token() -> Option<Token> { Self::get().and_then(|b| b.token) } pub(crate) fn set(info: Option<BackendInformation>) { *CONSOLE_BACKEND.write().unwrap() = info.map(|info| Backend { info, token: None }); } fn update<F>(f: F) where F: FnOnce(&mut Backend), { let mut backend = CONSOLE_BACKEND.write().unwrap(); if let Some(ref mut backend) = *backend { f(backend); } } pub(crate) fn update_token(token: Option<Token>) { Self::update(|backend| backend.token = token); } pub fn current_url(&self) -> String { self.info.url.to_string() } pub fn request<S, IN, OUT: 'static>( method: http::Method, path: S, payload: IN, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { Self::request_with(method, path, payload, Default::default(), callback) } pub fn request_with<S, IN, OUT: 'static>( method: http::Method, path: S, payload: IN, options: RequestOptions, callback: Callback<Response<OUT>>, ) -> Result<FetchTask, anyhow::Error> where S: AsRef<str>, IN: Into<Text>, OUT: From<Text>, { Self::get() .ok_or_else(|| anyhow::anyhow!("Missing backend"))? .info .request_with(method, path, payload, vec![], options, callback) } pub fn reauthenticate() -> Result<(), anyhow::Error> { Self::navigate_to( "/api/console/v1alpha1/ui/login", "Trigger re-authenticate flow", ) } pub fn logout() -> Result<(), anyhow::Error> { Self::navigate_to("/api/console/v1alpha1/ui/logout", "Trigger logout flow") } fn navigate_to<S: AsRef<str>>(path: S, op: &str) -> Result<(), anyhow::Error> { let target = Backend::url_str(path).context("Backend information missing"); log::debug!("{}: {:?}", op, target); window().location().set_href(&target?).unwrap(); Ok(()) } } #[cfg(test)] mod test { use super::*; use chrono::DateTime; fn setup() { /* env_logger::builder() .filter_level(log::LevelFilter::Debug) .init(); */ } #[test] fn test_date_parser() { setup(); let str = "2020-11-30T11:33:37.437915952Z"; let date = DateTime::parse_from_rfc3339(str); assert!(date.is_ok()); } #[test] fn test_valid_for() { setup(); let date = Utc::now() + chrono::Duration::seconds(120); let token = Token { access_token: String::new(), id_token: String::new(), refresh_token: None, expires: Some(date), userinfo: None, }; assert!(!token.is_expired()); assert!(token.valid_for().is_some()); } }
use core::marker::PhantomData; use necsim_core::{ cogs::{Backup, CoalescenceRngSample, EmigrationExit, Habitat, RngCore}, landscape::{IndexedLocation, Location}, lineage::{GlobalLineageReference, MigratingLineage}, simulation::partial::emigration_exit::PartialSimulation, }; use necsim_core_bond::{NonNegativeF64, PositiveF64}; use crate::{ cogs::lineage_store::independent::IndependentLineageStore, decomposition::Decomposition, }; pub mod choice; use choice::EmigrationChoice; #[allow(clippy::module_name_repetitions)] #[derive(Debug)] pub struct IndependentEmigrationExit<H: Habitat, C: Decomposition<H>, E: EmigrationChoice<H>> { decomposition: C, choice: E, emigrant: Option<(u32, MigratingLineage)>, _marker: PhantomData<H>, } #[contract_trait] impl<H: Habitat, C: Decomposition<H>, E: EmigrationChoice<H>> Backup for IndependentEmigrationExit<H, C, E> { unsafe fn backup_unchecked(&self) -> Self { Self { decomposition: self.decomposition.backup_unchecked(), choice: self.choice.backup_unchecked(), emigrant: self .emigrant .as_ref() .map(|(partition, migrating_lineage)| { (*partition, migrating_lineage.backup_unchecked()) }), _marker: PhantomData::<H>, } } } #[contract_trait] impl<H: Habitat, C: Decomposition<H>, E: EmigrationChoice<H>, G: RngCore> EmigrationExit<H, G, GlobalLineageReference, IndependentLineageStore<H>> for IndependentEmigrationExit<H, C, E> { #[must_use] #[inline] #[debug_requires(self.emigrant.is_none(), "can only hold one emigrant")] #[debug_ensures(ret.is_some() == ( ( old(self.decomposition.map_location_to_subdomain_rank( &dispersal_target, &simulation.habitat )) == self.decomposition.get_subdomain_rank() ) || !old(self.choice.should_lineage_emigrate( &dispersal_origin, event_time, &simulation.habitat, )) ), "lineage only emigrates to other subdomains")] fn optionally_emigrate( &mut self, lineage_reference: GlobalLineageReference, dispersal_origin: IndexedLocation, dispersal_target: Location, prior_time: NonNegativeF64, event_time: PositiveF64, simulation: &mut PartialSimulation< H, G, GlobalLineageReference, IndependentLineageStore<H>, >, rng: &mut G, ) -> Option<( GlobalLineageReference, IndexedLocation, Location, NonNegativeF64, PositiveF64, )> { let target_subdomain = self .decomposition .map_location_to_subdomain_rank(&dispersal_target, &simulation.habitat); if (target_subdomain == self.decomposition.get_subdomain_rank()) || !self.choice.should_lineage_emigrate( &dispersal_origin, event_time, &simulation.habitat, ) { return Some(( lineage_reference, dispersal_origin, dispersal_target, prior_time, event_time, )); } self.emigrant = Some(( target_subdomain, MigratingLineage { global_reference: lineage_reference, dispersal_origin, dispersal_target, prior_time, event_time, coalescence_rng_sample: CoalescenceRngSample::new(rng), }, )); None } } impl<H: Habitat, C: Decomposition<H>, E: EmigrationChoice<H>> IndependentEmigrationExit<H, C, E> { #[must_use] pub fn new(decomposition: C, choice: E) -> Self { Self { decomposition, choice, emigrant: None, _marker: PhantomData::<H>, } } pub fn len(&self) -> usize { self.emigrant.is_some() as usize } pub fn is_empty(&self) -> bool { self.emigrant.is_none() } pub fn take(&mut self) -> Option<(u32, MigratingLineage)> { self.emigrant.take() } }
// Name resolution use {core, ast, matchers, scopes, typeinf}; use core::SearchType::{self, ExactMatch, StartsWith}; use core::{Match, Src, Session, Coordinate, SessionExt, Ty, Point}; use core::MatchType::{Module, Function, Struct, Enum, FnArg, Trait, StructField, Impl, TraitImpl, MatchArm, Builtin}; use core::Namespace; use util::{self, closure_valid_arg_scope, symbol_matches, txt_matches, find_ident_end, get_rust_src_path}; use matchers::find_doc; use cargo; use std::path::{Path, PathBuf}; use std::{self, vec, iter}; use matchers::PendingImports; lazy_static! { pub static ref RUST_SRC_PATH: PathBuf = get_rust_src_path().unwrap(); } fn search_struct_fields(searchstr: &str, structmatch: &Match, search_type: SearchType, session: &Session) -> vec::IntoIter<Match> { let src = session.load_file(&structmatch.filepath); let struct_start = scopes::expect_stmt_start(src.as_src(), structmatch.point); let structsrc = scopes::end_of_next_scope(&src[struct_start..]); let fields = ast::parse_struct_fields(structsrc.to_owned(), core::Scope::from_match(structmatch)); let mut out = Vec::new(); for (field, field_point, ty) in fields.into_iter() { if symbol_matches(search_type, searchstr, &field) { let contextstr = if let Some(t) = ty { t.to_string() } else { field.clone() }; out.push(Match { matchstr: field, filepath: structmatch.filepath.clone(), point: field_point + struct_start, coords: None, local: structmatch.local, mtype: StructField, contextstr: contextstr, generic_args: Vec::new(), generic_types: Vec::new(), docs: find_doc(structsrc, field_point), }); } } out.into_iter() } pub fn search_for_impl_methods(match_request: &Match, fieldsearchstr: &str, point: Point, fpath: &Path, local: bool, search_type: SearchType, session: &Session) -> vec::IntoIter<Match> { let implsearchstr: &str = &match_request.matchstr; debug!("searching for impl methods |{:?}| |{}| {:?}", match_request, fieldsearchstr, fpath.display()); let mut out = Vec::new(); for m in search_for_impls(point, implsearchstr, fpath, local, true, session, &PendingImports::empty()) { debug!("found impl!! |{:?}| looking for methods", m); if m.matchstr == "Deref" { out.extend(search_for_deref_matches(&m, match_request, fieldsearchstr, fpath, session)); } let src = session.load_file(&m.filepath); // find the opening brace and skip to it. src[m.point..].find('{').map(|n| { let point = m.point + n + 1; for m in search_scope_for_methods(point, src.as_src(), fieldsearchstr, &m.filepath, search_type) { out.push(m); } }); for gen_m in search_for_generic_impls(m.point, &m.matchstr, match_request, &m.filepath, session) { debug!("found generic impl!! {:?}", gen_m); let src = session.load_file(&gen_m.filepath); // find the opening brace and skip to it. src[gen_m.point..].find('{').map(|n| { let point = gen_m.point + n + 1; for gen_method in search_generic_impl_scope_for_methods(point, src.as_src(), fieldsearchstr, &gen_m, search_type) { out.push(gen_method); } }); } if m.matchstr == "Iterator" && fieldsearchstr == "into_iter" { let mut m_copy = m.clone(); if let Ok(mut m_filestring) = m_copy.filepath.into_os_string().into_string() { m_filestring = m_filestring.replace("iterator.rs", "traits.rs"); m_copy.filepath = PathBuf::from(&m_filestring); for m in search_for_generic_impls(m_copy.point, &m_copy.matchstr, match_request, &m_copy.filepath, session) { debug!("found generic impl!! {:?}", m); let src = session.load_file(&m.filepath); // find the opening brace and skip to it. src[m.point..].find('{').map(|n| { let point = m.point + n + 1; for m in search_generic_impl_scope_for_methods(point, src.as_src(), fieldsearchstr, &m, search_type) { out.push(m); } }); } } } }; out.into_iter() } fn search_scope_for_methods(point: Point, src: Src, searchstr: &str, filepath: &Path, search_type: SearchType) -> vec::IntoIter<Match> { debug!("searching scope for methods {} |{}| {:?}", point, searchstr, filepath.display()); let scopesrc = src.from(point); let mut out = Vec::new(); for (blobstart,blobend) in scopesrc.iter_stmts() { let blob = &scopesrc[blobstart..blobend]; blob.find(|c| {c == '{' || c == ';'}).map(|n| { let signature = blob[..n].trim_right(); if txt_matches(search_type, &format!("fn {}", searchstr), signature) && typeinf::first_param_is_self(blob) { debug!("found a method starting |{}| |{}|", searchstr, blob); // TODO: parse this properly let start = blob.find(&format!("fn {}", searchstr)).unwrap() + 3; let end = find_ident_end(blob, start); let l = &blob[start..end]; // TODO: make a better context string for functions let m = Match { matchstr: l.to_owned(), filepath: filepath.to_path_buf(), point: point + blobstart + start, coords: None, local: true, mtype: Function, contextstr: signature.to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: find_doc(&scopesrc, blobstart + start), }; out.push(m); } }); } out.into_iter() } fn search_generic_impl_scope_for_methods(point: Point, src: Src, searchstr: &str, contextm: &Match, search_type: SearchType) -> vec::IntoIter<Match> { debug!("searching generic impl scope for methods {} |{}| {:?}", point, searchstr, contextm.filepath.display()); let scopesrc = src.from(point); let mut out = Vec::new(); for (blobstart,blobend) in scopesrc.iter_stmts() { let blob = &scopesrc[blobstart..blobend]; blob.find(|c| {c == '{' || c == ';'}).map(|n| { let signature = blob[..n].trim_right(); if txt_matches(search_type, &format!("fn {}", searchstr), signature) && typeinf::first_param_is_self(blob) { debug!("found a method starting |{}| |{}|", searchstr, blob); // TODO: parse this properly let start = blob.find(&format!("fn {}", searchstr)).unwrap() + 3; let end = find_ident_end(blob, start); let l = &blob[start..end]; // TODO: make a better context string for functions let m = Match { matchstr: l.to_owned(), filepath: contextm.filepath.clone(), point: point + blobstart + start, coords: None, local: true, mtype: Function, contextstr: signature.to_owned(), generic_args: contextm.generic_args.clone(), // Attach impl generic args generic_types: contextm.generic_types.clone(), // Attach impl generic types docs: find_doc(&scopesrc, blobstart + start), }; out.push(m); } }); } out.into_iter() } /// Look for static trait functions. This fn doesn't search for _method_ declarations /// or implementations as `search_scope_for_methods` already handles that. fn search_scope_for_static_trait_fns(point: Point, src: Src, searchstr: &str, filepath: &Path, search_type: SearchType) -> vec::IntoIter<Match> { debug!("searching scope for trait fn declarations {} |{}| {:?}", point, searchstr, filepath.display()); let scopesrc = src.from(point); let mut out = Vec::new(); for (blobstart,blobend) in scopesrc.iter_stmts() { let blob = &scopesrc[blobstart..blobend]; blob.find(|c| c == '{' || c == ';').map(|n| { let signature = blob[..n].trim_right(); if txt_matches(search_type, &format!("fn {}", searchstr), signature) // filtering out methods here prevents duplicate results with // `search_scope_for_methods` && !typeinf::first_param_is_self(blob) { debug!("found a method starting |{}| |{}|", searchstr, blob); // TODO: parse this properly let start = blob.find(&format!("fn {}", searchstr)).unwrap() + 3; let end = find_ident_end(blob, start); let l = &blob[start..end]; // TODO: make a better context string for functions let m = Match { matchstr: l.to_owned(), filepath: filepath.to_path_buf(), point: point + blobstart + start, coords: None, local: true, mtype: Function, contextstr: signature.to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: find_doc(&scopesrc, blobstart + start), }; out.push(m); } }); } out.into_iter() } pub fn search_for_impls(pos: Point, searchstr: &str, filepath: &Path, local: bool, include_traits: bool, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { debug!("search_for_impls {}, {}, {:?}", pos, searchstr, filepath.display()); let s = session.load_file(filepath); let scope_start = scopes::scope_start(s.as_src(), pos); let src = s.from(scope_start); let mut out = Vec::new(); for (start, end) in src.iter_stmts() { let blob = &src[start..end]; if blob.starts_with("impl") { blob.find('{').map(|n| { let ref decl = blob[..n+1]; if decl.contains('!') { // Guard against macros debug!("impl was probably a macro: {} {}", filepath.display(), start); return; } let mut decl = decl.to_owned(); decl.push_str("}"); if txt_matches(ExactMatch, searchstr, &decl) { debug!("impl decl {}", decl); let implres = ast::parse_impl(decl); let is_trait_impl = implres.trait_path.is_some(); let mtype = if is_trait_impl { TraitImpl } else { Impl }; implres.name_path.map(|name_path| { name_path.segments.last().map(|name| { if symbol_matches(ExactMatch, searchstr, &name.name) { let m = Match { matchstr: name.name.clone(), filepath: filepath.to_path_buf(), point: scope_start + start + 5, coords: None, // items in trait impls have no "pub" but are // still accessible from other modules local: local || is_trait_impl, mtype: mtype, contextstr: "".into(), generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }; out.push(m); } }); }); // find trait if include_traits && is_trait_impl { let trait_path = implres.trait_path.unwrap(); let mut m = resolve_path(&trait_path, filepath, scope_start + start, ExactMatch, Namespace::Type, session, pending_imports).nth(0); debug!("found trait |{:?}| {:?}", trait_path, m); if let Some(ref mut m) = m { if m.matchstr == "Deref" { let impl_block = &blob[n..]; if let Some(pos) = impl_block.find('=') { let deref_type_start = n + pos + 1; if let Some(pos) = blob[deref_type_start..].find(';') { let deref_type_end = deref_type_start + pos; let deref_type = blob[deref_type_start..deref_type_end].trim(); debug!("Deref to {} found", deref_type); m.generic_args = vec![deref_type.to_owned()]; }; }; } } m.map(|m| out.push(m)); } } }); } } out.into_iter() } pub fn search_for_generic_impls(pos: Point, searchstr: &str, contextm: &Match, filepath: &Path, session: &Session) -> vec::IntoIter<Match> { debug!("search_for_generic_impls {}, {}, {:?}", pos, searchstr, filepath.display()); let s = session.load_file(filepath); let scope_start = scopes::scope_start(s.as_src(), pos); let src = s.from(scope_start); let mut out = Vec::new(); for (start, end) in src.iter_stmts() { let blob = &src[start..end]; if blob.starts_with("impl") { blob.find('{').map(|n| { let ref decl = blob[..n+1]; if decl.contains('!') { // Guard against macros debug!("impl was probably a macro: {} {}", filepath.display(), start); return; } let mut decl = decl.to_owned(); decl.push_str("}"); let generics = ast::parse_generics(decl.clone()); let implres = ast::parse_impl(decl.clone()); if let (Some(name_path), Some(trait_path)) = (implres.name_path, implres.trait_path) { if let (Some(name), Some(trait_name)) = (name_path.segments.last(), trait_path.segments.last()) { for gen_arg in generics.generic_args { if symbol_matches(ExactMatch, &gen_arg.name, &name.name) && gen_arg.bounds.len() == 1 && gen_arg.bounds[0] == searchstr { debug!("generic impl decl {}", decl); let trait_pos = blob.find(&trait_name.name).unwrap(); let self_path = core::Path::from_vec(false, vec![&contextm.matchstr]); let self_pathsearch = core::PathSearch { path: self_path, filepath: contextm.filepath.clone(), point: contextm.point }; let m = Match { matchstr: trait_name.name.clone(), filepath: filepath.to_path_buf(), point: scope_start + start + trait_pos, coords: None, local: true, mtype: TraitImpl, contextstr: "".into(), generic_args: vec![gen_arg.name], generic_types: vec![self_pathsearch], docs: String::new(), }; debug!("Found a trait! {:?}", m); out.push(m); } } } } }); } } out.into_iter() } // scope headers include fn decls, if let, while let etc.. fn search_scope_headers(point: Point, scopestart: Point, msrc: Src, searchstr: &str, filepath: &Path, search_type: SearchType) -> vec::IntoIter<Match> { debug!("search_scope_headers for |{}| pt: {}", searchstr, scopestart); if let Some(stmtstart) = scopes::find_stmt_start(msrc, scopestart) { let preblock = &msrc[stmtstart..scopestart]; debug!("search_scope_headers preblock is |{}|", preblock); if preblock_is_fn(preblock) { return search_fn_args(stmtstart, scopestart, &msrc, searchstr, filepath, search_type, true); // 'if let' can be an expression, so might not be at the start of the stmt } else if let Some(n) = preblock.find("if let") { let ifletstart = stmtstart + n; let src = msrc[ifletstart..scopestart+1].to_owned() + "}"; if txt_matches(search_type, searchstr, &src) { let mut out = matchers::match_if_let(&src, 0, src.len(), searchstr, filepath, search_type, true); for m in &mut out { m.point += ifletstart; } return out.into_iter(); } } else if preblock.starts_with("while let") { let src = msrc[stmtstart..scopestart+1].to_owned() + "}"; if txt_matches(search_type, searchstr, &src) { let mut out = matchers::match_while_let(&src, 0, src.len(), searchstr, filepath, search_type, true); for m in &mut out { m.point += stmtstart; } return out.into_iter(); } } else if preblock.starts_with("for ") { let src = msrc[stmtstart..scopestart+1].to_owned() + "}"; if txt_matches(search_type, searchstr, &msrc[..scopestart]) { let mut out = matchers::match_for(&src, 0, src.len(), searchstr, filepath, search_type, true); for m in &mut out { m.point += stmtstart; } return out.into_iter(); } } else if let Some(n) = preblock.rfind("match ") { // TODO: this code is crufty. refactor me! let matchstart = stmtstart + n; let matchstmt = typeinf::get_first_stmt(msrc.from(matchstart)); // The definition could be in the match LHS arms. Try to find this let masked_matchstmt = mask_matchstmt(&matchstmt, scopestart + 1 - matchstart); debug!("found match stmt, masked is len {} |{}|", masked_matchstmt.len(), masked_matchstmt); // Locate the match arm LHS by finding the => just before point and then backtracking // be sure to be on the right side of the ... => ... arm let arm = match masked_matchstmt[..point-matchstart].rfind("=>") { None => // we are in the first arm enum return Vec::new().into_iter(), Some(arm) => { // be sure not to be in the next arm enum if let Some(next_arm) = masked_matchstmt[arm+2..].find("=>") { let enum_start = scopes::get_start_of_pattern(&masked_matchstmt, arm+next_arm+1); if point > matchstart+enum_start { return Vec::new().into_iter(); } } arm } }; debug!("PHIL matched arm rhs is |{}|", &masked_matchstmt[arm..]); let lhs_start = scopes::get_start_of_pattern(&msrc, matchstart + arm); let lhs = &msrc[lhs_start..matchstart + arm]; // Now create a pretend match expression with just the one match arm in it let faux_prefix_size = scopestart - matchstart + 1; let fauxmatchstmt = format!("{}{{{} => () }};", &msrc[matchstart..scopestart], lhs); debug!("PHIL arm lhs is |{}|", lhs); debug!("PHIL arm fauxmatchstmt is |{}|, {}", fauxmatchstmt, faux_prefix_size); let mut out = Vec::new(); for (start,end) in ast::parse_pat_idents(fauxmatchstmt) { let (start,end) = (lhs_start + start - faux_prefix_size, lhs_start + end - faux_prefix_size); let s = &msrc[start..end]; if symbol_matches(search_type, searchstr, s) { out.push(Match { matchstr: s.to_owned(), filepath: filepath.to_path_buf(), point: start, coords: None, local: true, mtype: MatchArm, contextstr: lhs.trim().to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }); if let SearchType::ExactMatch = search_type { break; } } } return out.into_iter(); } else if let Some(vec) = search_closure_args(searchstr, preblock, stmtstart, filepath, search_type) { return vec.into_iter(); } } Vec::new().into_iter() } /// Checks if a scope preblock is a function declaration. /// /// TODO: Handle `extern` functions fn preblock_is_fn(preblock: &str) -> bool { // Perform simple checks if preblock.starts_with("fn") || preblock.starts_with("pub fn") || preblock.starts_with("const fn") { return true; } // Remove visibility declarations, such as restricted visibility let trimmed = if preblock.starts_with("pub") { util::trim_visibility(preblock) } else { preblock }; trimmed.starts_with("fn") || trimmed.starts_with("const fn") } #[test] fn is_fn() { assert!(preblock_is_fn("pub fn bar()")); assert!(preblock_is_fn("fn foo()")); assert!(preblock_is_fn("const fn baz()")); assert!(preblock_is_fn("pub(crate) fn bar()")); assert!(preblock_is_fn("pub(in foo::bar) fn bar()")); } fn mask_matchstmt(matchstmt_src: &str, innerscope_start: Point) -> String { let s = scopes::mask_sub_scopes(&matchstmt_src[innerscope_start..]); matchstmt_src[..innerscope_start].to_owned() + &s } #[test] fn does_it() { let src = " match foo { Some(a) => { something } }"; let res = mask_matchstmt(src, src.find('{').unwrap()+1); debug!("PHIL res is |{}|",res); } fn search_fn_args(fnstart: Point, open_brace_pos: Point, msrc: &str, searchstr: &str, filepath: &Path, search_type: SearchType, local: bool) -> vec::IntoIter<Match> { let mut out = Vec::new(); let mut fndecl = String::new(); // wrap in 'impl blah {}' so that methods get parsed correctly too fndecl.push_str("impl blah {"); let impl_header_len = fndecl.len(); fndecl.push_str(&msrc[fnstart..(open_brace_pos+1)]); fndecl.push_str("}}"); debug!("search_fn_args: found start of fn!! {} |{}| {}", fnstart, fndecl, searchstr); if txt_matches(search_type, searchstr, &fndecl) { let coords = ast::parse_fn_args(fndecl.clone()); for (start,end) in coords { let s = &fndecl[start..end]; debug!("search_fn_args: arg str is |{}|", s); if symbol_matches(search_type, searchstr, s) { let m = Match { matchstr: s.to_owned(), filepath: filepath.to_path_buf(), point: fnstart + start - impl_header_len, coords: None, local: local, mtype: FnArg, contextstr: s.to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }; debug!("search_fn_args matched: {:?}", m); out.push(m); } } } out.into_iter() } #[test] fn test_do_file_search() { let cache = core::FileCache::default(); let session = Session::new(&cache); let mut matches = do_file_search("std", &Path::new("."), &session); assert!(matches.len() > 1); assert!(matches.any(|ma| ma.filepath.ends_with("src/libstd/lib.rs"))); } pub fn do_file_search( searchstr: &str, currentdir: &Path, session: &Session ) -> vec::IntoIter<Match> { debug!("do_file_search with search string \"{}\"", searchstr); let mut out = Vec::new(); let srcpath = RUST_SRC_PATH.as_ref(); debug!("do_file_search srcpath: {:?}", srcpath); let v = &[srcpath, currentdir][..]; debug!("do_file_search v: {:?}", v); for srcpath in v { if let Ok(iter) = std::fs::read_dir(srcpath) { for fpath_buf in iter.filter_map(|res| res.ok().map(|entry| entry.path())) { // skip filenames that can't be decoded let fname = match fpath_buf.file_name().and_then(|n| n.to_str()) { Some(fname) => fname, None => continue, }; if fname.starts_with(&format!("lib{}", searchstr)) { let filepath = fpath_buf.join("lib.rs"); if filepath.exists() || session.contains_file(&filepath) { let m = Match { matchstr: fname[3..].to_owned(), filepath: filepath.to_path_buf(), point: 0, coords: Some(Coordinate { line: 1, column: 1 }), local: false, mtype: Module, contextstr: fname[3..].to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }; out.push(m); } } if fname.starts_with(searchstr) { for name in &[&format!("{}.rs", fname)[..], "mod.rs", "lib.rs"] { let filepath = fpath_buf.join(name); if filepath.exists() || session.contains_file(&filepath) { let m = Match { matchstr: fname.to_owned(), filepath: filepath.to_path_buf(), point: 0, coords: Some(Coordinate { line: 1, column: 1 }), local: false, mtype: Module, contextstr: filepath.to_str().unwrap().to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }; out.push(m); } } // try just <name>.rs if fname.ends_with(".rs") && (fpath_buf.exists() || session.contains_file(&fpath_buf)) { let m = Match { matchstr: fname[..(fname.len()-3)].to_owned(), filepath: fpath_buf.clone(), point: 0, coords: Some(Coordinate { line: 1, column: 1 }), local: false, mtype: Module, contextstr: fpath_buf.to_str().unwrap().to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }; out.push(m); } } } } } out.into_iter() } pub fn search_crate_root(pathseg: &core::PathSegment, modfpath: &Path, searchtype: SearchType, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { debug!("search_crate_root |{:?}| {:?}", pathseg, modfpath.display()); let crateroots = find_possible_crate_root_modules(modfpath.parent().unwrap(), session); let mut out = Vec::new(); for crateroot in crateroots { if *modfpath == *crateroot { continue; } debug!("going to search for {:?} in crateroot {:?}", pathseg, crateroot.display()); for m in resolve_name(pathseg, &crateroot, 0, searchtype, namespace, session, pending_imports) { out.push(m); if let ExactMatch = searchtype { break; } } break } out.into_iter() } pub fn find_possible_crate_root_modules(currentdir: &Path, session: &Session) -> Vec<PathBuf> { let mut res = Vec::new(); for root in &["lib.rs", "main.rs"] { let filepath = currentdir.join(root); if filepath.exists() || session.contains_file(&filepath) { res.push(filepath); return res; // for now stop at the first match } } // recurse up the directory structure if let Some(parentdir) = currentdir.parent() { if parentdir != currentdir { // PD: this was using the vec.push_all() api, but that is now unstable res.extend(find_possible_crate_root_modules(parentdir, session).iter().cloned()); return res; // for now stop at the first match } } res } pub fn search_next_scope(mut startpoint: Point, pathseg: &core::PathSegment, filepath:&Path, search_type: SearchType, local: bool, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { let filesrc = session.load_file(filepath); if startpoint != 0 { // is a scope inside the file. Point should point to the definition // (e.g. mod blah {...}), so the actual scope is past the first open brace. let src = &filesrc[startpoint..]; //debug!("search_next_scope src1 |{}|",src); // find the opening brace and skip to it. src.find('{').map(|n| { startpoint += n + 1; }); } search_scope(startpoint, startpoint, filesrc.as_src(), pathseg, filepath, search_type, local, namespace, session, pending_imports) } pub fn get_crate_file(name: &str, from_path: &Path, session: &Session) -> Option<PathBuf> { debug!("get_crate_file {}, {:?}", name, from_path); if let Some(p) = cargo::get_crate_file(name, from_path) { debug!("get_crate_file - found the crate file! {:?}", p); return Some(p); } let srcpath = &*RUST_SRC_PATH; { // try lib<name>/lib.rs, like in the rust source dir let cratelibname = format!("lib{}", name); let filepath = srcpath.join(cratelibname).join("lib.rs"); if filepath.exists() || session.contains_file(&filepath) { return Some(filepath); } } { // try <name>/lib.rs let filepath = srcpath.join(name).join("lib.rs"); if filepath.exists() || session.contains_file(&filepath) { return Some(filepath); } } None } pub fn get_module_file(name: &str, parentdir: &Path, session: &Session) -> Option<PathBuf> { { // try just <name>.rs let filepath = parentdir.join(format!("{}.rs", name)); if filepath.exists() || session.contains_file(&filepath) { return Some(filepath); } } { // try <name>/mod.rs let filepath = parentdir.join(name).join("mod.rs"); if filepath.exists() || session.contains_file(&filepath) { return Some(filepath); } } None } pub fn search_scope(start: Point, point: Point, src: Src, pathseg: &core::PathSegment, filepath:&Path, search_type: SearchType, local: bool, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { let searchstr = &pathseg.name; let mut out = Vec::new(); debug!("searching scope {:?} start: {} point: {} '{}' {:?} {:?} local: {}, session: {:?}", namespace, start, point, searchstr, filepath.display(), search_type, local, session); let scopesrc = src.from(start); let mut skip_next_block = false; let mut delayed_single_imports = Vec::new(); let mut delayed_glob_imports = Vec::new(); let mut codeit = scopesrc.iter_stmts(); let mut v = Vec::new(); // collect up to point so we can search backwards for let bindings // (these take precidence over local fn declarations etc.. for (blobstart, blobend) in &mut codeit { // (e.g. #[cfg(test)]) if skip_next_block { skip_next_block = false; continue; } let blob = &scopesrc[blobstart..blobend]; // for now skip stuff that's meant for testing. Often the test // module hierarchy is incompatible with the non-test // hierarchy and we get into recursive loops if blob.starts_with("#[cfg(test)") { skip_next_block = true; continue; } v.push((blobstart,blobend)); if blobstart > point { break; } } // search backwards from point for let bindings for &(blobstart, blobend) in v.iter().rev() { if (start+blobend) >= point { continue; } for m in matchers::match_let(&src, start+blobstart, start+blobend, searchstr, filepath, search_type, local).into_iter() { out.push(m); if let ExactMatch = search_type { return out.into_iter(); } } } // since we didn't find a `let` binding, now search from top of scope for items etc.. let mut codeit = v.into_iter().chain(codeit); for (blobstart, blobend) in &mut codeit { // sometimes we need to skip blocks of code if the preceeding attribute disables it // (e.g. #[cfg(test)]) if skip_next_block { skip_next_block = false; continue; } let blob = &scopesrc[blobstart..blobend]; // for now skip stuff that's meant for testing. Often the test // module hierarchy is incompatible with the non-test // hierarchy and we get into recursive loops if blob.starts_with("#[cfg(test)") { skip_next_block = true; continue; } let is_an_import = blob.starts_with("use") || blob.starts_with("pub use"); if is_an_import { // A `use` item can import a value // with the same name as a "type" (type/module/etc.) in the same scope. // However, that type might appear after the `use`, // so we need to process the type first and the `use` later (if necessary). // If we didn't delay imports, // we'd try to resolve such a `use` item by recursing onto itself. // Optimisation: if the search string is not in the blob and it is not // a glob import, this cannot match so fail fast! let is_glob_import = blob.contains("::*"); if !is_glob_import { if !blob.contains(searchstr.trim_right_matches('!')) { continue; } } if is_glob_import { delayed_glob_imports.push((blobstart, blobend)); } else { delayed_single_imports.push((blobstart, blobend)); } continue; } if searchstr == "core" && blob.starts_with("#![no_std]") { debug!("Looking for core and found #![no_std], which implicitly imports it"); get_crate_file("core", filepath, session).map(|cratepath| { let context = cratepath.to_str().unwrap().to_owned(); out.push(Match { matchstr: "core".into(), filepath: cratepath, point: 0, coords: Some(Coordinate { line: 1, column: 1 }), local: false, mtype: Module, contextstr: context, generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }); }); } // Optimisation: if the search string is not in the blob, // this cannot match so fail fast! if !blob.contains(searchstr.trim_right_matches('!')) { continue; } // There's a good chance of a match. Run the matchers out.extend(run_matchers_on_blob(src, start+blobstart, start+blobend, searchstr, filepath, search_type, local, namespace, session, pending_imports)); if let ExactMatch = search_type { if !out.is_empty() { return out.into_iter(); } } } let delayed_import_len = delayed_single_imports.len() + delayed_glob_imports.len(); if delayed_import_len > 0 { trace!("Searching {} delayed imports for `{}`", delayed_import_len, searchstr); } // Finally, process the imports that we skipped before. // Process single imports first, because they shadow glob imports. for (blobstart, blobend) in delayed_single_imports.into_iter().chain(delayed_glob_imports) { // There's a good chance of a match. Run the matchers for m in run_matchers_on_blob(src, start+blobstart, start+blobend, searchstr, filepath, search_type, local, namespace, session, pending_imports).into_iter() { out.push(m); if let ExactMatch = search_type { return out.into_iter(); } } } if let Some(vec) = search_closure_args( searchstr, &scopesrc[0..], start, filepath, search_type) { for mat in vec { out.push(mat) } if let ExactMatch = search_type { return out.into_iter(); } } debug!("search_scope found matches {:?} {:?}", search_type, out); out.into_iter() } fn search_closure_args(searchstr: &str, scope_src: &str, scope_src_pos: Point, filepath: &Path, search_type: SearchType) -> Option<Vec<Match>> { if searchstr.is_empty() { return None; } trace!("Closure definition match is looking for `{}` in {} characters", searchstr, scope_src.len()); if let Some((left_pipe, _, pipe_scope)) = closure_valid_arg_scope(scope_src) { debug!("search_closure_args found valid closure arg scope: {}", pipe_scope); if txt_matches(search_type, searchstr, pipe_scope) { // Add a fake body for parsing let closure_def = String::from(pipe_scope) + "{}"; let coords = ast::parse_fn_args(closure_def.clone()); let mut out: Vec<Match> = Vec::new(); for (start,end) in coords { let s = &closure_def[start..end]; if symbol_matches(search_type, searchstr, s) { let m = Match { matchstr: s.to_owned(), filepath: filepath.to_path_buf(), point: scope_src_pos + left_pipe + start, coords: None, local: true, mtype: FnArg, contextstr: pipe_scope.to_owned(), generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }; debug!("search_closure_args matched: {:?}", m); out.push(m); } } return Some(out) } } None } fn run_matchers_on_blob(src: Src, start: Point, end: Point, searchstr: &str, filepath: &Path, search_type: SearchType, local: bool, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> Vec<Match> { let mut out = Vec::new(); match namespace { Namespace::Type => for m in matchers::match_types(src, start, end, searchstr, filepath, search_type, local, session, pending_imports) { out.push(m); if let ExactMatch = search_type { return out; } }, Namespace::Value => for m in matchers::match_values(src, start, end, searchstr, filepath, search_type, local) { out.push(m); if let ExactMatch = search_type { return out; } }, Namespace::Both => { for m in matchers::match_types(src, start, end, searchstr, filepath, search_type, local, session, pending_imports) { out.push(m); if let ExactMatch = search_type { return out; } } for m in matchers::match_values(src, start, end, searchstr, filepath, search_type, local) { out.push(m); if let ExactMatch = search_type { return out; } } } } out } fn search_local_scopes(pathseg: &core::PathSegment, filepath: &Path, msrc: Src, point: Point, search_type: SearchType, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { debug!("search_local_scopes {:?} {:?} {} {:?} {:?}", pathseg, filepath.display(), point, search_type, namespace); if point == 0 { // search the whole file search_scope(0, 0, msrc, pathseg, filepath, search_type, true, namespace, session, pending_imports) } else { let mut out = Vec::new(); let mut start = point; // search each parent scope in turn while start > 0 { start = scopes::scope_start(msrc, start); for m in search_scope(start, point, msrc, pathseg, filepath, search_type, true, namespace, session, pending_imports) { out.push(m); if let ExactMatch = search_type { return out.into_iter(); } } if start == 0 { break; } start -= 1; let searchstr = &pathseg.name; // scope headers = fn decls, if let, match, etc.. for m in search_scope_headers(point, start, msrc, searchstr, filepath, search_type) { out.push(m); if let ExactMatch = search_type { return out.into_iter(); } } } out.into_iter() } } pub fn search_prelude_file(pathseg: &core::PathSegment, search_type: SearchType, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { debug!("search_prelude file {:?} {:?} {:?}", pathseg, search_type, namespace); let mut out : Vec<Match> = Vec::new(); // find the prelude file from the search path and scan it let srcpath = &*RUST_SRC_PATH; let filepath = srcpath.join("libstd").join("prelude").join("v1.rs"); if filepath.exists() || session.contains_file(&filepath) { let msrc = session.load_file_and_mask_comments(&filepath); let is_local = true; for m in search_scope(0, 0, msrc.as_src(), pathseg, &filepath, search_type, is_local, namespace, session, pending_imports) { out.push(m); } } out.into_iter() } pub fn resolve_path_with_str(path: &core::Path, filepath: &Path, pos: Point, search_type: SearchType, namespace: Namespace, session: &Session) -> vec::IntoIter<Match> { debug!("resolve_path_with_str {:?}", path); let mut out = Vec::new(); // HACK if path.segments.len() == 1 && path.segments[0].name == "str" { debug!("{:?} == {:?}", path.segments[0], "str"); if let Some(module) = resolve_path(&core::Path::from_vec(true, vec!["std","str"]), filepath, pos, search_type, namespace, session, &PendingImports::empty()).nth(0) { out.push(Match { matchstr: "str".into(), filepath: module.filepath, point: 0, coords: Some(Coordinate { line: 1, column: 1 }), local: false, mtype: Builtin, contextstr: "str".into(), generic_args: vec![], generic_types: vec![], docs: String::new(), }); } } else { for m in resolve_path(path, filepath, pos, search_type, namespace, session, &PendingImports::empty()) { out.push(m); if let ExactMatch = search_type { break; } } } out.into_iter() } #[derive(PartialEq,Debug)] pub struct Search { path: Vec<String>, filepath: String, pos: Point } /// Attempt to resolve a name which occurs in a given file. pub fn resolve_name(pathseg: &core::PathSegment, filepath: &Path, pos: Point, search_type: SearchType, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { let mut out = Vec::new(); let searchstr = &pathseg.name; debug!("resolve_name {} {:?} {} {:?} {:?}", searchstr, filepath.display(), pos, search_type, namespace); let msrc = session.load_file(filepath); let is_exact_match = match search_type { ExactMatch => true, StartsWith => false }; if is_exact_match && &searchstr[..] == "Self" { if let Some(Ty::Match(m)) = typeinf::get_type_of_self(pos, filepath, true, msrc.as_src(), session) { out.push(m.clone()); } } if (is_exact_match && &searchstr[..] == "std") || (!is_exact_match && "std".starts_with(searchstr)) { get_crate_file("std", filepath, session).map(|cratepath| { let context = cratepath.to_str().unwrap().to_owned(); out.push(Match { matchstr: "std".into(), filepath: cratepath, point: 0, coords: Some(Coordinate { line: 1, column: 1 }), local: false, mtype: Module, contextstr: context, generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }); }); if let ExactMatch = search_type { if !out.is_empty() { return out.into_iter(); } } } for m in search_local_scopes(pathseg, filepath, msrc.as_src(), pos, search_type, namespace, session, pending_imports) { out.push(m); if let ExactMatch = search_type { return out.into_iter(); } } for m in search_crate_root(pathseg, filepath, search_type, namespace, session, pending_imports) { out.push(m); if let ExactMatch = search_type { return out.into_iter(); } } for m in search_prelude_file(pathseg, search_type, namespace, session, pending_imports) { out.push(m); if let ExactMatch = search_type { return out.into_iter(); } } // filesearch. Used to complete e.g. extern crate blah or mod foo if let StartsWith = search_type { for m in do_file_search(searchstr, filepath.parent().unwrap(), session) { out.push(m); } } out.into_iter() } // Get the scope corresponding to super:: pub fn get_super_scope(filepath: &Path, pos: Point, session: &Session, pending_imports: &PendingImports) -> Option<core::Scope> { let msrc = session.load_file_and_mask_comments(filepath); let mut path = scopes::get_local_module_path(msrc.as_src(), pos); debug!("get_super_scope: path: {:?} filepath: {:?} {} {:?}", path, filepath, pos, session); if path.is_empty() { let moduledir = if filepath.ends_with("mod.rs") || filepath.ends_with("lib.rs") { // Need to go up to directory above // TODO(PD): fix: will crash if mod.rs is in the root fs directory filepath.parent().unwrap().parent().unwrap() } else { // module is in current directory filepath.parent().unwrap() }; for filename in &[ "mod.rs", "lib.rs" ] { let f_path = moduledir.join(&filename); if f_path.exists() || session.contains_file(&f_path) { return Some(core::Scope{ filepath: f_path, point: 0 }) } } None } else if path.len() == 1 { Some(core::Scope{ filepath: filepath.to_path_buf(), point: 0 }) } else { path.pop(); let path = core::Path::from_svec(false, path); debug!("get_super_scope looking for local scope {:?}", path); resolve_path(&path, filepath, 0, SearchType::ExactMatch, Namespace::Type, session, pending_imports).nth(0) .and_then(|m| msrc[m.point..].find('{') .map(|p| core::Scope{ filepath: filepath.to_path_buf(), point:m.point + p + 1 })) } } pub fn resolve_path(path: &core::Path, filepath: &Path, pos: Point, search_type: SearchType, namespace: Namespace, session: &Session, pending_imports: &PendingImports) -> vec::IntoIter<Match> { debug!("resolve_path {:?} {:?} {} {:?}", path, filepath.display(), pos, search_type); let len = path.segments.len(); if len == 1 { let pathseg = &path.segments[0]; resolve_name(pathseg, filepath, pos, search_type, namespace, session, pending_imports) } else if len != 0 { if path.segments[0].name == "self" { // just remove self let mut newpath: core::Path = path.clone(); newpath.segments.remove(0); return resolve_path(&newpath, filepath, pos, search_type, namespace, session, pending_imports); } if path.segments[0].name == "super" { if let Some(scope) = get_super_scope(filepath, pos, session, pending_imports) { debug!("PHIL super scope is {:?}", scope); let mut newpath: core::Path = path.clone(); newpath.segments.remove(0); return resolve_path(&newpath, &scope.filepath, scope.point, search_type, namespace, session, pending_imports); } else { // can't find super scope. Return no matches debug!("can't resolve path {:?}, returning no matches", path); return Vec::new().into_iter(); } } let mut out = Vec::new(); let mut parent_path: core::Path = path.clone(); parent_path.segments.remove(len-1); let context = resolve_path(&parent_path, filepath, pos, ExactMatch, Namespace::Type, session, pending_imports).nth(0); context.map(|m| { match m.mtype { Module => { let mut searchstr: &str = &path.segments[len-1].name; if let Some(i) = searchstr.rfind(',') { searchstr = searchstr[i+1..].trim(); } if searchstr.starts_with('{') { searchstr = &searchstr[1..]; } let pathseg = core::PathSegment{name: searchstr.to_owned(), types: Vec::new()}; debug!("searching a module '{}' for {} (whole path: {:?})", m.matchstr, pathseg.name, path); for m in search_next_scope(m.point, &pathseg, &m.filepath, search_type, false, namespace, session, pending_imports) { out.push(m); } } Enum => { let pathseg = &path.segments[len-1]; debug!("searching an enum '{}' (whole path: {:?}) searchtype: {:?}", m.matchstr, path, search_type); let filesrc = session.load_file(&m.filepath); let scopestart = scopes::find_stmt_start(filesrc.as_src(), m.point).unwrap(); let scopesrc = filesrc.from(scopestart); scopesrc.iter_stmts().nth(0).map(|(blobstart,blobend)| { for m in matchers::match_enum_variants(&filesrc, scopestart+blobstart, scopestart+blobend, &pathseg.name, &m.filepath, search_type, true) { debug!("Found enum variant: {}", m.matchstr); out.push(m); } }); // TODO remove code duplication with the struct branch below. The two implementations are identical. for m_impl in search_for_impls(m.point, &m.matchstr, &m.filepath, m.local, true, session, pending_imports) { debug!("found impl!! {:?}", m_impl); let pathseg = &path.segments[len-1]; let src = session.load_file(&m_impl.filepath); // find the opening brace and skip to it. src[m_impl.point..].find('{').map(|n| { let point = m_impl.point + n + 1; for m_impl in search_scope(point, point, src.as_src(), pathseg, &m_impl.filepath, search_type, m_impl.local, namespace, session, pending_imports) { out.push(m_impl); } }); for m_gen in search_for_generic_impls(m_impl.point, &m_impl.matchstr, &m, &m_impl.filepath, session) { debug!("found generic impl!! {:?}", m_gen); let pathseg = &path.segments[len-1]; let src = session.load_file(&m_gen.filepath); // find the opening brace and skip to it. src[m_gen.point..].find('{').map(|n| { let point = m_gen.point + n + 1; for m_gen in search_scope(point, point, src.as_src(), pathseg, &m_gen.filepath, search_type, m_gen.local, namespace, session, pending_imports) { out.push(m_gen); } }); } }; } Struct => { debug!("found a struct. Now need to look for impl"); for m_impl in search_for_impls(m.point, &m.matchstr, &m.filepath, m.local, true, session, pending_imports) { debug!("found impl!! {:?}", m_impl); let pathseg = &path.segments[len-1]; let src = session.load_file(&m_impl.filepath); // find the opening brace and skip to it. src[m_impl.point..].find('{').map(|n| { let point = m_impl.point + n + 1; for m_impl in search_scope(point, point, src.as_src(), pathseg, &m_impl.filepath, search_type, m_impl.local, namespace, session, pending_imports) { out.push(m_impl); } }); for m_gen in search_for_generic_impls(m_impl.point, &m_impl.matchstr, &m, &m_impl.filepath, session) { debug!("found generic impl!! {:?}", m_gen); let pathseg = &path.segments[len-1]; let src = session.load_file(&m_gen.filepath); // find the opening brace and skip to it. src[m_gen.point..].find('{').map(|n| { let point = m_gen.point + n + 1; for m_gen in search_scope(point, point, src.as_src(), pathseg, &m_gen.filepath, search_type, m_gen.local, namespace, session, pending_imports) { out.push(m_gen); } }); } }; } _ => () } }); debug!("resolve_path returning {:?}", out); out.into_iter() } else { // TODO: Should this better be an assertion ? Why do we have a core::Path // with empty segments in the first place ? Vec::new().into_iter() } } pub fn resolve_method(point: Point, msrc: Src, searchstr: &str, filepath: &Path, search_type: SearchType, session: &Session, pending_imports: &PendingImports) -> Vec<Match> { let scopestart = scopes::scope_start(msrc, point); debug!("resolve_method for |{}| pt: {} ({:?}); scopestart: {} ({:?})", searchstr, point, msrc.src.point_to_coords(point), scopestart, msrc.src.point_to_coords(scopestart)); if let Some(stmtstart) = scopes::find_stmt_start(msrc, (scopestart - 1)) { let preblock = &msrc[stmtstart..scopestart]; debug!("search_scope_headers preblock is |{}|", preblock); if preblock.starts_with("impl") { if let Some(n) = preblock.find(" for ") { let start = scopes::get_start_of_search_expr(preblock, n); let expr = &preblock[start..n]; debug!("found impl of trait : expr is |{}|", expr); let path = core::Path::from_vec(false, expr.split("::").collect::<Vec<_>>()); let m = resolve_path(&path, filepath, stmtstart + n - 1, SearchType::ExactMatch, Namespace::Both, session, pending_imports) .filter(|m| m.mtype == Trait) .nth(0); if let Some(m) = m { debug!("found trait : match is |{:?}|", m); let mut out = Vec::new(); let src = session.load_file(&m.filepath); src[m.point..].find('{').map(|n| { let point = m.point + n + 1; for m in search_scope_for_static_trait_fns(point, src.as_src(), searchstr, &m.filepath, search_type) { out.push(m); } for m in search_scope_for_methods(point, src.as_src(), searchstr, &m.filepath, search_type) { out.push(m); } }); trace!( "Found {} methods matching `{}` for trait `{}`", out.len(), searchstr, m.matchstr); return out; } } } } Vec::new() } pub fn do_external_search(path: &[&str], filepath: &Path, pos: Point, search_type: SearchType, namespace: Namespace, session: &Session) -> vec::IntoIter<Match> { debug!("do_external_search path {:?} {:?}", path, filepath.display()); let mut out = Vec::new(); if path.len() == 1 { let searchstr = path[0]; // hack for now let pathseg = core::PathSegment{name: searchstr.to_owned(), types: Vec::new()}; for m in search_next_scope(pos, &pathseg, filepath, search_type, false, namespace, session, &PendingImports::empty()) { out.push(m); } get_module_file(searchstr, filepath.parent().unwrap(), session).map(|path| { let context = path.to_str().unwrap().to_owned(); out.push(Match { matchstr: searchstr.to_owned(), filepath: path, point: 0, coords: Some(Coordinate { line: 1, column: 1 }), local: false, mtype: Module, contextstr: context, generic_args: Vec::new(), generic_types: Vec::new(), docs: String::new(), }); }); } else { let parent_path = &path[..(path.len()-1)]; let context = do_external_search(parent_path, filepath, pos, ExactMatch, Namespace::Type, session).nth(0); context.map(|m| { let pending_imports = &PendingImports::empty(); match m.mtype { Module => { debug!("found an external module {}", m.matchstr); // deal with started with "{", so that "foo::{bar" will be same as "foo::bar" let searchstr = match path[path.len()-1].chars().next() { Some('{') => &path[path.len()-1][1..], _ => path[path.len()-1] }; let pathseg = core::PathSegment{name: searchstr.to_owned(), types: Vec::new()}; for m in search_next_scope(m.point, &pathseg, &m.filepath, search_type, false, namespace, session, pending_imports) { out.push(m); } } Struct => { debug!("found a pub struct. Now need to look for impl"); for m in search_for_impls(m.point, &m.matchstr, &m.filepath, m.local, false, session, pending_imports) { debug!("found impl2!! {}", m.matchstr); // deal with started with "{", so that "foo::{bar" will be same as "foo::bar" let searchstr = match path[path.len()-1].chars().next() { Some('{') => &path[path.len()-1][1..], _ => path[path.len()-1] }; let pathseg = core::PathSegment{name: searchstr.to_owned(), types: Vec::new()}; debug!("about to search impl scope..."); for m in search_next_scope(m.point, &pathseg, &m.filepath, search_type, m.local, namespace, session, pending_imports) { out.push(m); } }; } _ => () } }); } out.into_iter() } pub fn search_for_field_or_method(context: Match, searchstr: &str, search_type: SearchType, session: &Session) -> vec::IntoIter<Match> { let m = context; let mut out = Vec::new(); match m.mtype { Struct => { debug!("got a struct, looking for fields and impl methods!! {}", m.matchstr); for m in search_struct_fields(searchstr, &m, search_type, session) { out.push(m); } for m in search_for_impl_methods(&m, searchstr, m.point, &m.filepath, m.local, search_type, session) { out.push(m); } }, Builtin => { for m in search_for_impl_methods(&m, searchstr, m.point, &m.filepath, m.local, search_type, session) { out.push(m); } }, Enum => { debug!("got an enum, looking for impl methods {}", m.matchstr); for m in search_for_impl_methods(&m, searchstr, m.point, &m.filepath, m.local, search_type, session) { out.push(m); } }, Trait => { debug!("got a trait, looking for methods {}", m.matchstr); let src = session.load_file(&m.filepath); src[m.point..].find('{').map(|n| { let point = m.point + n + 1; for m in search_scope_for_methods(point, src.as_src(), searchstr, &m.filepath, search_type) { out.push(m); } }); } _ => { debug!("WARN!! context wasn't a Struct, Enum, Builtin or Trait {:?}",m);} }; out.into_iter() } fn search_for_deref_matches(impl_match: &Match, type_match: &Match, fieldsearchstr: &str, fpath: &Path, session: &Session) -> vec::IntoIter<Match> { debug!("Found a Deref Implementation for {}, Searching for Methods on the Deref Type", type_match.matchstr); let mut out = Vec::new(); if let Some(type_arg) = impl_match.generic_args.first() { // If Deref to a generic type if let Some(inner_type_path) = generic_arg_to_path(&type_arg, type_match) { let type_match = resolve_path_with_str(&inner_type_path.path, &inner_type_path.filepath, 0, SearchType::ExactMatch, Namespace::Type, session).nth(0); let subpath = get_subpathsearch(&inner_type_path); if let Some(mut m) = type_match { if let Some(path) = subpath { m.generic_types.push(path); } let methods = search_for_field_or_method(m, fieldsearchstr, SearchType::StartsWith, session); out.extend(methods); }; } // If Deref to an ordinary type else { let deref_type_path = core::Path { global: false, segments: vec![core::PathSegment { name: impl_match.generic_args.first().unwrap().clone(), types: Vec::new() }] }; let type_match = resolve_path_with_str(&deref_type_path, fpath, 0, SearchType::ExactMatch, Namespace::Type, session).nth(0); if let Some(m) = type_match { let methods = search_for_field_or_method(m, fieldsearchstr, SearchType::StartsWith, session); out.extend(methods); } } } out.into_iter() } fn generic_arg_to_path(type_str: &str, m: &Match) -> Option<core::PathSearch> { debug!("Attempting to find type match for {} in {:?}", type_str, m); if let Some(match_pos) = m.generic_args.iter().position(|x| *x == type_str) { if let Some(gen_type) = m.generic_types.get(match_pos) { return Some(gen_type.clone()); } } None } fn get_subpathsearch(pathsearch: &core::PathSearch) -> Option<core::PathSearch> { pathsearch.path.segments.get(0) .and_then(|seg| {seg.types.get(0) .and_then(|first_type| { Some(core::PathSearch { path: first_type.clone(), filepath: pathsearch.filepath.clone(), point: pathsearch.point }) }) }) }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ //! Hermes Interface //! //! This library exports selected Hermes interfaces to Rust #[allow(dead_code)] pub mod parser; #[allow(dead_code)] pub mod utf;
fn main() { println!("Hello, world!"); } // In Rust, packages of code are referred to as crates //cargo commands // cargo build --> build's the execuables which can then further be executed from ./target/debug/hello_cargo // cargo run --> builds the executables and then executes them for you. // cargo check --> checks if the code is compiing correcly but eon't be building the executable // cargo build --release --> When your project is finally ready for release, you can use cargo build --release to compile it with optimizations. This command will create an executable in target/release
//! JA3 Hash //! //! A small TLS fingerprinting library written in Rust. //! //! This crate enables a consumer to fingerprint the ClientHello portion of a TLS handshake. //! It can hash TLS handshakes over IPv4 and IPv6. It heavily depends on the [tls-parser //! project](https://github.com/rusticata/tls-parser) from Rusticata. //! //! It supports generating fingerprints from packet capture files as well as live-captures //! on a network interface, both using libpcap. //! //! See the original [JA3 project](https://github.com/salesforce/ja3) for more information. //! //! Example of fingerprinting a packet capture file: //! //! ```rust,no_run //! use ja3::Ja3; //! //! let mut ja3 = Ja3::new("test.pcap") //! .process_pcap() //! .unwrap(); //! //! // Now we have a Vec of Ja3Hash objects //! for hash in ja3 { //! println!("{}", hash); //! } //! ``` //! //! Example of fingerprinting a live capture: //! //! ```rust,ignore //! use ja3::Ja3; //! //! let mut ja3 = Ja3::new("eth0") //! .process_live() //! .unwrap(); //! while let Some(hash) = ja3.next() { //! println!("{}", hash); //! } //! //! ``` use std::fs::File; use std::ffi::{OsStr, OsString}; use std::fmt; use std::net::IpAddr; use lazy_static::*; use log::{info, debug}; use md5::{self, Digest}; #[cfg(feature = "live-capture")] use pcap::{Active, Capture}; use pcap_parser::{LegacyPcapReader, PcapBlockOwned, PcapError}; use pcap_parser::traits::PcapReaderIterator; use pnet::packet::ethernet::EtherType; use pnet::packet::ip::IpNextHeaderProtocol; use pnet::packet::ip::IpNextHeaderProtocols; use pnet::packet::*; use tls_parser::parse_tls_plaintext; use tls_parser::tls::{TlsMessage, TlsMessageHandshake, TlsRecordType}; use tls_parser::tls_extensions::{parse_tls_extensions, TlsExtension, TlsExtensionType}; mod errors; use errors::*; use failure::Error; lazy_static! { static ref IPTYPE: IpNextHeaderProtocol = IpNextHeaderProtocol::new(6); static ref GREASE: Vec<u16> = vec![ 0x0a0a, 0x1a1a, 0x2a2a, 0x3a3a, 0x4a4a, 0x5a5a, 0x6a6a, 0x7a7a, 0x8a8a, 0x9a9a, 0xaaaa, 0xbaba, 0xcaca, 0xdada, 0xeaea, 0xfafa ]; } /// A JA3 hash builder. This provides options about how to extract a JA3 hash from a TLS handshake. #[derive(Debug)] pub struct Ja3 { i: Ja3Inner, } // TODO: add support for RAW captures #[derive(Debug)] struct Ja3Inner { path: OsString, tls_port: u16, } /// The output of a JA3 hash object. This consists of the JA3 string and MD5 hash. #[derive(Debug, Eq)] pub struct Ja3Hash { /// The string consisting of the SSLVersion,Cipher,SSLExtension,EllipticCurve,EllipticCurvePointFormat /// See the original [JA3 specification](https://github.com/salesforce/ja3#how-it-works) for more info. pub ja3_str: String, /// The MD5 hash of `ja3_str`. pub hash: Digest, /// The destination IP address of the TLS handshake. pub source: IpAddr, /// The source IP address of the TLS handshake. pub destination: IpAddr, } /// Iterator of JA3 hashes captured during a live capture. #[cfg(feature = "live-capture")] pub struct Ja3Live { cap: Capture<Active>, ja3_inner: Ja3, } #[cfg(feature = "live-capture")] impl Iterator for Ja3Live { type Item = Ja3Hash; fn next(&mut self) -> Option<Self::Item> { while let Ok(packet) = self.cap.next() { match self.ja3_inner.process_packet_common(&packet) { Ok(s) => return Some(s), Err(_) => continue, }; } None } } impl Ja3 { /// Creates a new Ja3 object. /// /// It will extract JA3 hashes from the packet capture located at `pcap_path` or /// the network interface named `pcap_path`, depending on whether the consumer calls /// `process_pcap` or `process_live`. pub fn new<S: AsRef<OsStr>>(pcap_path: S) -> Self { let mut path = OsString::new(); path.push(pcap_path); let i = Ja3Inner { path: path, tls_port: 443, }; Ja3 { i: i } } /// Change the hasher behavior to scan for TLS handshakes occuring on *any* TCP port. By /// default we only fingerprint handshakes on TCP 443. pub fn any_port<'a>(&'a mut self) -> &'a mut Self { self.i.tls_port = 0; self } /// Scans the provided packet capture for TLS handshakes and returns JA3 hashes for any found. pub fn process_pcap(&self) -> Result<Vec<Ja3Hash>, Error> { let mut results: Vec<Ja3Hash> = Vec::new(); let file = File::open(&self.i.path)?; let mut reader = LegacyPcapReader::new(65536, file).expect("LegacyPcapReader"); loop { match reader.next() { Ok((offset, block)) => { match block { PcapBlockOwned::LegacyHeader(_hdr) => { // save hdr.network (linktype) }, PcapBlockOwned::Legacy(block) => { let ja3_hash = match self.process_packet_common(&block.data) { Ok(s) => s, Err(_) => { reader.consume(offset); continue; }, }; debug!("Adding JA3: {:?}", ja3_hash); results.push(ja3_hash); }, PcapBlockOwned::NG(_) => unreachable!(), } reader.consume(offset); }, Err(PcapError::Eof) => break, Err(PcapError::Incomplete) => { reader.refill().unwrap(); }, Err(e) => return Err(e.into()), } } Ok(results) } /// Opens a live packet capture and scans packets for TLS handshakes and returns an iterator of /// JA3 hashes found. #[cfg(feature = "live-capture")] pub fn process_live(self) -> Result<Ja3Live, Error> { let cap = Capture::from_device(self.i.path.to_str().unwrap())?.open()?; info!("cap: {:?}", self.i.path); //while let Ok(packet) = cap.next() { // let ja3_hash = match self.process_packet_common(&packet) { // Ok(s) => s, // Err(_) => continue, // }; // info!("Calling callback with JA3: {:?}", ja3_hash); // cb(&ja3_hash); //} Ok(Ja3Live { cap: cap, ja3_inner: self, }) } fn process_packet_common(&self, packet: &[u8]) -> Result<Ja3Hash, Error> { let saddr; let daddr; let ether = ethernet::EthernetPacket::new(&packet).ok_or(Ja3Error::ParseError)?; info!("\nether packet: {:?} len: {}", ether, ether.packet_size()); let tcp_start = match ether.get_ethertype() { EtherType(0x0800) => { let ip = ipv4::Ipv4Packet::new(&packet[ether.packet_size()..]) .ok_or(Ja3Error::ParseError)?; info!("\nipv4 packet: {:?}", ip); if ip.get_next_level_protocol() != *IPTYPE { return Err(Ja3Error::ParseError)?; } let iphl = ip.get_header_length() as usize * 4; saddr = IpAddr::V4(ip.get_source()); daddr = IpAddr::V4(ip.get_destination()); iphl + ether.packet_size() } EtherType(0x86dd) => { let ip = ipv6::Ipv6Packet::new(&packet[ether.packet_size()..]) .ok_or(Ja3Error::ParseError)?; info!("\nipv6 packet: {:?}", ip); saddr = IpAddr::V6(ip.get_source()); daddr = IpAddr::V6(ip.get_destination()); if ip.get_next_header() != IpNextHeaderProtocols::Tcp { return Err(Ja3Error::NotHandshake)?; } let iphl = 40; iphl + ether.packet_size() } _ => return Err(Ja3Error::ParseError)?, }; let tcp = tcp::TcpPacket::new(&packet[tcp_start..]).ok_or(Ja3Error::ParseError)?; info!("tcp: {:?}", tcp); if self.i.tls_port != 0 { if tcp.get_destination() != 443 { return Err(Ja3Error::NotHandshake)?; } } info!("pack size: {}", tcp.packet_size()); let handshake_start = tcp_start + tcp.packet_size(); info!("handshake_start: {}", handshake_start); let handshake = &packet[handshake_start..]; if handshake.len() <= 0 { return Err(Ja3Error::NotHandshake)?; } if handshake[0] != 0x16 { return Err(Ja3Error::NotHandshake)?; } info!("handshake: {:x?}", handshake); info!("sending handshake {:?}", handshake); let ja3_string = self.ja3_string_client_hello(&handshake).unwrap(); if ja3_string == "" { return Err(Ja3Error::NotHandshake)?; } let hash = md5::compute(&ja3_string.as_bytes()); let ja3_res = Ja3Hash { ja3_str: ja3_string, hash: hash, source: saddr, destination: daddr, }; Ok(ja3_res) } fn process_extensions(&self, extensions: &[u8]) -> Option<String> { let mut ja3_exts = String::new(); let mut supported_groups = String::new(); let mut ec_points = String::new(); let (_, exts) = parse_tls_extensions(extensions).unwrap(); for extension in exts { let ext_val = u16::from(TlsExtensionType::from(&extension)); if GREASE.contains(&ext_val) { continue; } info!("Ext: {:?}", ext_val); ja3_exts.push_str(&format!("{}-", ext_val)); match extension { TlsExtension::EllipticCurves(curves) => { for curve in curves { if !GREASE.contains(&curve.0) { info!("curve: {}", curve.0); supported_groups.push_str(&format!("{}-", curve.0)); } } } TlsExtension::EcPointFormats(points) => { info!("Points: {:x?}", points); for point in points { ec_points.push_str(&format!("{}-", point)); } } _ => {} } } ja3_exts.pop(); supported_groups.pop(); ec_points.pop(); info!("Supported groups: {}", supported_groups); info!("EC Points: {}", ec_points); let ret = format!("{},{},{}", ja3_exts, supported_groups, ec_points); Some(ret) } fn ja3_string_client_hello(&self, packet: &[u8]) -> Option<String> { info!("PACKET: {:?}", packet); let mut ja3_string = String::new(); let res = parse_tls_plaintext(packet); match res { Ok((rem, record)) => { info!("Rem: {:?}, record: {:?}", rem, record); info!("record type: {:?}", record.hdr.record_type); if record.hdr.record_type != TlsRecordType::Handshake { return None; } for rec in record.msg { if let TlsMessage::Handshake(handshake) = rec { if let TlsMessageHandshake::ClientHello(contents) = handshake { info!("handshake contents: {:?}", contents); info!("handshake tls version: {:?}", u16::from(contents.version)); ja3_string.push_str(&format!("{},", u16::from(contents.version))); for cipher in contents.ciphers { info!("handshake cipher: {}", u16::from(cipher)); if !GREASE.contains(&cipher) { ja3_string.push_str(&format!("{}-", u16::from(cipher))); } } ja3_string.pop(); ja3_string.push(','); if let Some(extensions) = contents.ext { let ext = self.process_extensions(extensions).unwrap(); ja3_string.push_str(&ext); } } } } } _ => { info!("ERROR"); return None; } } info!("ja3_string: {}", ja3_string); Some(ja3_string) } } impl fmt::Display for Ja3Hash { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "[{} --> {}] {} {:x}", self.source, self.destination, self.ja3_str, self.hash ) } } impl PartialEq for Ja3Hash { fn eq(&self, other: &Self) -> bool { self.hash == other.hash } } #[cfg(test)] mod tests { use super::*; use env_logger; use nix::unistd::{fork, ForkResult}; use pretty_assertions::assert_eq; use rusty_fork::rusty_fork_id; use rusty_fork::rusty_fork_test; use rusty_fork::rusty_fork_test_name; use std::net::{IpAddr, Ipv4Addr}; use std::process::Command; // NOTE: Any test for the live-capture feature requires elevated privileges. #[cfg(feature = "live-capture")] rusty_fork_test! { #[test] #[ignore] fn test_ja3_client_hello_chrome_grease_single_packet_live() { let expected_str = "771,4865-4866-4867-49195-49199-49196-49200-52393-52392-49171-49172-156-157-47-53-10,0-23-65281-10-11-35-16-5-13-18-51-45-43-27-21,29-23-24,0"; let expected_hash = "66918128f1b9b03303d77c6f2eefd128"; let expected_daddr = IpAddr::V6("2607:f8b0:4004:814::2002".parse().unwrap()); match fork() { Ok(ForkResult::Parent { child: _, .. }) => { let mut ja3 = Ja3::new("lo") .process_live().unwrap(); if let Some(x) = ja3.next() { assert_eq!(x.ja3_str, expected_str); assert_eq!(format!("{:x}", x.hash), expected_hash); assert_eq!(expected_daddr, x.destination); std::process::exit(0); } }, Ok(ForkResult::Child) => { let _out = Command::new("tcpreplay") .arg("-i") .arg("lo") .arg("chrome-grease-single.pcap") .output() .expect("failed to execute process"); }, Err(_) => println!("Fork failed"), } } } #[test] fn test_ja3_client_hello_chrome_grease_single_packet() { let expected_str = "771,4865-4866-4867-49195-49199-49196-49200-52393-52392-49171-49172-156-157-47-53-10,0-23-65281-10-11-35-16-5-13-18-51-45-43-27-21,29-23-24,0"; let expected_hash = "66918128f1b9b03303d77c6f2eefd128"; let expected_daddr = IpAddr::V6("2607:f8b0:4004:814::2002".parse().unwrap()); let mut ja3 = Ja3::new("tests/chrome-grease-single.pcap") .process_pcap() .unwrap(); let ja3_hash = ja3.pop().unwrap(); assert_eq!(ja3_hash.ja3_str, expected_str); assert_eq!(format!("{:x}", ja3_hash.hash), expected_hash); assert_eq!(expected_daddr, ja3_hash.destination); } #[test] fn test_ja3_client_hello_firefox_single_packet() { let expected_str = "771,49195-49199-52393-52392-49196-49200-49162-49161-49171-49172-51-57-47-53-10,0-23-65281-10-11-35-16-5-13-28,29-23-24-25,0"; let expected_hash = "839bbe3ed07fed922ded5aaf714d6842"; let expected_daddr = IpAddr::V4("34.209.18.179".parse().unwrap()); let mut ja3 = Ja3::new("tests/test.pcap").process_pcap().unwrap(); let ja3_hash = ja3.pop().unwrap(); assert_eq!(ja3_hash.ja3_str, expected_str); assert_eq!(format!("{:x}", ja3_hash.hash), expected_hash); assert_eq!(expected_daddr, ja3_hash.destination); } #[test] fn test_ja3_curl_full_stream() { let expected_str = "771,4866-4867-4865-49196-49200-159-52393-52392-52394-49195-49199-158-49188-49192-107-49187-49191-103-49162-49172-57-49161-49171-51-157-156-61-60-53-47-255,0-11-10-13172-16-22-23-13-43-45-51-21,29-23-30-25-24,0-1-2"; let expected_hash = "456523fc94726331a4d5a2e1d40b2cd7"; let expected_daddr = IpAddr::V4("93.184.216.34".parse().unwrap()); let mut ja3s = Ja3::new("tests/curl.pcap").process_pcap().unwrap(); let ja3 = ja3s.pop().unwrap(); assert_eq!(ja3.ja3_str, expected_str); assert_eq!(format!("{:x}", ja3.hash), expected_hash); assert_eq!(expected_daddr, ja3.destination); } #[test] fn test_ja3_curl_full_stream_ipv6() { let expected_str = "771,4866-4867-4865-49196-49200-159-52393-52392-52394-49195-49199-158-49188-49192-107-49187-49191-103-49162-49172-57-49161-49171-51-157-156-61-60-53-47-255,0-11-10-13172-16-22-23-13-43-45-51-21,29-23-30-25-24,0-1-2"; let expected_hash = "456523fc94726331a4d5a2e1d40b2cd7"; let expected_daddr = IpAddr::V6("2606:2800:220:1:248:1893:25c8:1946".parse().unwrap()); let mut ja3s = Ja3::new("tests/curl-ipv6.pcap").process_pcap().unwrap(); let ja3 = ja3s.pop().unwrap(); assert_eq!(ja3.ja3_str, expected_str); assert_eq!(format!("{:x}", ja3.hash), expected_hash); assert_eq!(expected_daddr, ja3.destination); } #[test] fn test_ja3_client_hello_ncat_full_stream_non_tls_port() { let expected_str = "771,4866-4867-4865-49196-49200-163-159-52393-52392-52394-49327-49325-49315-49311-49245-49249-49239-49235-49188-49192-107-106-49267-49271-196-195-49162-49172-57-56-136-135-157-49313-49309-49233-61-192-53-132-49195-49199-162-158-49326-49324-49314-49310-49244-49248-49238-49234-49187-49191-103-64-49266-49270-190-189-49161-49171-51-50-154-153-69-68-156-49312-49308-49232-60-186-47-150-65-255,0-11-10-35-22-23-13-43-45-51-21,29-23-30-25-24,0-1-2"; let expected_hash = "10a6b69a81bac09072a536ce9d35dd43"; let mut ja3 = Ja3::new("tests/ncat-port-4450.pcap") .any_port() .process_pcap() .unwrap(); let ja3_hash = ja3.pop().unwrap(); assert_eq!(ja3_hash.ja3_str, expected_str); assert_eq!(format!("{:x}", ja3_hash.hash), expected_hash); assert_eq!( IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), ja3_hash.destination ); } }
#![allow(dead_code, unused_imports)] use super::super::{TypeID, BuiltinType}; #[derive(Clone)] pub struct BuiltinFunction { pub name: &'static str, pub real: String, pub args: Vec <&'static TypeID>, pub ret: &'static TypeID } impl BuiltinFunction { pub fn add_all() { // Self::add("__sinf", "sinf", vec![&TypeID { // idx: BuiltinType::SignedInt as usize, // ptr: 0, // mutable: true // }], &TypeID { // idx: BuiltinType::SignedInt as usize, // ptr: 0, // mutable: true // }) } pub fn add(name: &'static str, real: &'static str, args: Vec <&'static TypeID>, ret: &'static TypeID) { Self::fns().push(Self { name, real: format!("::qas::builtin::{}", real), args, ret }) } pub fn fns() -> &'static mut Vec <BuiltinFunction> { static mut FNS: Vec <BuiltinFunction> = Vec::new(); unsafe { &mut FNS } } }
pub mod inventory; pub mod reforge; pub mod stats; pub mod talisman;
#[derive(Default, Debug)] pub struct Ray { origin: glam::Vec3A, direction: glam::Vec3A, } pub fn ray_sphere_intersection(ray: &Ray, sphere_radius: f32) -> Option<(f32, glam::Vec3A, glam::Vec3A)> { let parallel = ray.direction*ray.origin.dot(ray.direction); let orthogonal = ray.origin-parallel; let orthogonal_dist = orthogonal.length(); if orthogonal_dist > sphere_radius { return None; } let parallel_dist = (1.0-(orthogonal_dist/sphere_radius).powi(2)).sqrt()*sphere_radius; let origin = ray.origin-parallel; return Some((parallel_dist*2.0, origin-parallel_dist*ray.direction, origin+parallel_dist*ray.direction)); } #[derive(Default, Debug)] pub struct Camera { near: f32, far: f32, world_matrix: glam::Mat4, inverse_world_matrix: glam::Mat4, projection_matrix: glam::Mat4, view_matrix: glam::Mat4, inverse_view_matrix: glam::Mat4, bounding_volume: crate::bounding_volume::BoundingVolume, } impl Camera { fn update(&mut self) { self.inverse_world_matrix = self.world_matrix.inverse(); self.view_matrix = self.projection_matrix*self.inverse_world_matrix; self.inverse_view_matrix = self.view_matrix.inverse(); } pub fn set_orthographic(&mut self, near: f32, far: f32, width: f32, height: f32) { self.near = near; self.far = far; self.projection_matrix = glam::Mat4::orthographic_rh( -0.5*width, 0.5*width, -0.5*height, 0.5*height, self.near, self.far ); self.bounding_volume = crate::bounding_volume::BoundingVolume::Box(crate::bounding_volume::BoundingBox { min: glam::Vec3A::new(-0.5*width, -0.5*height, near), max: glam::Vec3A::new(0.5*width, 0.5*height, far), }); self.update(); } pub fn set_perspective(&mut self, near: f32, far: f32, fov_y_radians: f32, aspect_ratio: f32) { self.near = near; self.far = far; self.projection_matrix = glam::Mat4::perspective_rh( fov_y_radians, aspect_ratio, self.near, self.far ); let height_at_far = fov_y_radians.tan()*far; self.bounding_volume = crate::bounding_volume::BoundingVolume::Pyramid(crate::bounding_volume::BoundingPyramid { near, far, width_at_far: height_at_far*aspect_ratio, height_at_far, }); self.update(); } pub fn set_world_matrix(&mut self, world_matrix: glam::Mat4) { self.world_matrix = world_matrix; self.update(); } pub fn get_world_matrix(&self) -> glam::Mat4{ self.world_matrix } pub fn get_projection_matrix(&self) -> glam::Mat4{ self.projection_matrix } pub fn get_view_matrix(&self) -> glam::Mat4{ self.view_matrix } pub fn get_inverse_view_matrix(&self) -> glam::Mat4{ self.inverse_view_matrix } pub fn get_view_ray(&self, x: f32, y: f32) -> Ray { let ndc_pos = glam::Vec3A::new(x, y, 0.0); let mut clip_pos = ndc_pos.extend(self.projection_matrix.w_axis().z()/(ndc_pos.z()-self.projection_matrix.z_axis().z()/self.projection_matrix.z_axis().w())); *clip_pos.x_mut() *= clip_pos.w(); *clip_pos.y_mut() *= clip_pos.w(); *clip_pos.z_mut() *= clip_pos.w(); let origin = self.world_matrix.w_axis().truncate(); let hit = (self.inverse_view_matrix*clip_pos).truncate(); let direction = (hit-origin).normalize(); Ray { origin, direction } } }
use log::*; use tokio::task; use crate::cli; use crate::bus::ModuleMsgEnum; use crate::conf; use crate::cla::cla_manager::ClaManager; use crate::cla::HandleId; use crate::processor; use crate::routing; use crate::agent; use crate::user; use strum_macros::*; use msgbus::{MsgBus, MsgBusHandle}; use msgbus::Message::*; use std::path::PathBuf; use std::sync::Arc; pub type BusHandle = MsgBusHandle<SystemModules, ModuleMsgEnum>; pub struct CmdLineOpts { pub config_file: String, } #[derive(EnumIter, Debug, PartialEq, Eq, Hash, Clone)] pub enum SystemMessage { ShutdownRequested, } #[derive(EnumIter, Debug, PartialEq, Eq, Hash, Clone)] pub enum SystemModules { Processing, // Actually reads the Bundle and decides what to do with it ClaManager, // Manages the various CLA creation/deletion Cla(HandleId), // Each CLA CLI, // User interface Logging, // Catches and distributes all logging Storage, // Bundles being written to disk AppAgent, // Registering clients, send/receive bundles AgentClient(agent::AgentId), // Represents the actual connected application client of the Agent UserMgr, // All things to do with add/remove/verify users Routing, // Updates and lookups to the forwarding table Configuration, // Reads, stores, updates the config. Let's other modules know Bus, // The messaging backbone System, // System to control the system } #[tokio::main (core_threads=2)] //#[tokio::main(core_threads = 2)] pub async fn start(conf_file: String) { //conf.store_file(&conf_file).unwrap(); //println!("{}", toml::to_string_pretty(&conf).unwrap()); let (mut bus, bus_handle) = MsgBus::<SystemModules, ModuleMsgEnum>::new(); let mut rx = bus_handle.clone().register(SystemModules::System).await.unwrap(); // let (mut msg_bus_old, bus_tx, bus_rx) = bus::Bus::new(); // let han_bus = msg_bus_old.start(bus_rx); let mut conf_mgr = conf::ConfManager::new(PathBuf::from(conf_file), bus_handle.clone()).await; // Storage here let proc_mgr = Arc::new(processor::Processor::new(bus_handle.clone()).await); let mut cla_mgr = ClaManager::new(bus_handle.clone()).await; let cli_mgr = cli::CliManager::new(bus_handle.clone()).await; let router = Arc::new(routing::router::Router::new(bus_handle.clone()).await); let agent = agent::Agent::new(bus_handle.clone()).await; let user_mgr = user::UserMgr::new(bus_handle.clone()).await; let han_conf = task::spawn(async move { conf_mgr.start().await; }); let han_rout = task::spawn(async move { router.clone().start().await; }); let han_proc = task::spawn(async move { proc_mgr.clone().start().await }); let _han_clim = task::spawn(async move { cli_mgr.start().await; }); let han_clam = task::spawn(async move { cla_mgr.start().await; }); let han_agent = task::spawn(async move { agent.start().await; }); let han_user = task::spawn(async move { user_mgr.start().await; }); // let mut processor = Processor::new(); // task::spawn_blocking(|| {cli::start()}); // processor.start().await; // cli::start_shell(); // info!("Waiting for threads"); // // tokio::join!(han_clam, han_conf, han_proc); // // tokio::join!(han_bus, han_conf, han_proc, han_clam); // info!("All threads shut down."); // tokio::join!(han_clim); trace!("About to enter system control loop"); while let Some(msg) = rx.recv().await { match msg { Shutdown => { break; } Message(ModuleMsgEnum::MsgSystem(SystemMessage::ShutdownRequested)) => { debug!("Received shutdown request"); // bus_handle.broadcast(ModuleMsgEnum::ShutdownNow).await; bus.shutdown().await.unwrap(); }, _ => {}, } } info!("Waiting on threads to exit"); #[allow(unused_must_use)] { tokio::join!(han_conf, han_proc, han_clam, han_rout, han_agent, han_user); } info!("System Halted"); } // ***************************************************************************************** // Messaging helpers // ***************************************************************************************** pub async fn halt(bh: &mut BusHandle) { bh.send(SystemModules::System, ModuleMsgEnum::MsgSystem(SystemMessage::ShutdownRequested)).await.unwrap(); }
mod buildtest; mod integration; mod utils;
mod parser; struct Machine { stack: [u8; 4048], stack_pointer: usize, program: [u16; 4048], program_counter: usize, } enum Instruction { Next, Halt, Jump(u8), } impl Machine { fn new() -> Machine { Machine{ stack: [0; 4048], stack_pointer: 0, program: [0; 4048], program_counter: 0, } } fn current(& self) -> u8 { self.stack[self.stack_pointer - 1] } fn previous(& self) -> u8 { self.stack[self.stack_pointer - 2] } fn u8const(& mut self, num : u8) -> Instruction { println!("const"); self.stack[self.stack_pointer] = num; self.stack_pointer += 1; Instruction::Next } fn add(& mut self) -> Instruction { println!("add"); self.stack[self.stack_pointer - 2] = self.current().wrapping_add(self.previous()); self.stack_pointer -= 1; Instruction::Next } fn sub(& mut self) -> Instruction { println!("sub"); self.stack[self.stack_pointer - 2] = self.previous().wrapping_sub(self.current()); self.stack_pointer -= 1; Instruction::Next } fn jump_if_zero(& mut self, jump_address: u8) -> Instruction { println!("jumpif {:?}", self.current()); if self.current() == 0 { Instruction::Jump(jump_address) } else { Instruction::Next } } fn jump(& mut self, jump_address: u8) -> Instruction { println!("jump"); Instruction::Jump(jump_address) } fn decode(& mut self, opcode: [u8; 2]) -> Instruction { match opcode { [0x0, 0x0] => Instruction::Halt, [0x1, x] => self.u8const(x), [0x2, _] => self.add(), [0x3, _] => self.sub(), [0x4, x] => self.jump(x), [0x5, x] => self.jump_if_zero(x), // Need op codes for copy and drop // Maybe swap or rotate? // Then we might want an assembler _ => Instruction::Halt } } fn run_once(& mut self) -> Instruction { self.decode(self.program[self.program_counter].to_ne_bytes()) } fn run(& mut self) { loop { match self.run_once() { Instruction::Next => { self.program_counter += 1; continue }, Instruction::Halt => break, Instruction::Jump(n) => { self.program_counter = n as usize; continue } } } } } // 2 fn main() { parser::parse_file("/Users/jimmyhmiller/Documents/Code/PlayGround/rust/editor/src/main.rs".to_string()); // let mut machine = Machine::new(); // machine.program[0] = 0x0A01; // machine.program[1] = 0x0505; // machine.program[2] = 0x0101; // machine.program[3] = 0x0003; // machine.program[4] = 0x0104; // machine.run(); // let result = machine.current(); // println!("{:?}", result); }
/********************************************** > File Name : lib.rs > Author : lunar > Email : lunar_ubuntu@qq.com > Created Time : Tue 15 Feb 2022 10:56:26 PM CST > Location : Shanghai > Copyright@ https://github.com/xiaoqixian **********************************************/ mod tree;
use std::fmt::Formatter; use std::fs::File; use std::io::{self, BufRead}; use std::num::ParseIntError; use std::path::Path; use std::{env, error, fmt, num, result}; use regex::{self, Regex}; #[derive(fmt::Debug)] struct Error { message: String, } impl Error { fn new(message: String) -> Error { Error { message } } } type Result<T> = result::Result<T, Error>; impl fmt::Display for Error { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{}", self.message) } } impl error::Error for Error {} impl From<io::Error> for Error { fn from(e: io::Error) -> Self { Error::new(format!("io error:{}", e)) } } impl From<num::ParseIntError> for Error { fn from(e: ParseIntError) -> Self { Error::new(format!("parse int error:{}", e)) } } impl From<regex::Error> for Error { fn from(e: regex::Error) -> Self { Error::new(format!("regex error: {}", e)) } } fn read_lines<P: AsRef<Path>>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>> { let file = File::open(filename)?; Ok(io::BufReader::new(file).lines()) } struct Parser { re: Regex, } impl Parser { fn new() -> Result<Parser> { let re = Regex::new(r"^(?P<from>\d+)-(?P<to>\d+)\s(?P<letter>\w):\s(?P<password>.+)$")?; Ok(Parser { re }) } fn parse(&self, line: &str) -> Result<Record> { if let Some(caps) = self.re.captures(line) { let from = caps["from"].parse::<u64>()?; let to = caps["to"].parse::<u64>()?; let letter = caps["letter"].chars().collect::<Vec<char>>()[0]; let password = caps["password"].to_string(); Ok(Record { policy: Policy { from, to, letter }, password, }) } else { Err(Error::new("Invalid record".to_string())) } } } struct Policy { from: u64, to: u64, letter: char, } struct Record { policy: Policy, password: String, } impl Record { fn validate_old(&self) -> bool { let chars = self.password.chars(); let count = chars.filter(|c| *c == self.policy.letter).count() as u64; count >= self.policy.from && count <= self.policy.to } fn validate_new(&self) -> bool { let char_vec: Vec<char> = self.password.chars().collect(); (char_vec.len() as u64 >= self.policy.from && char_vec[(self.policy.from - 1) as usize] == self.policy.letter) != (char_vec.len() as u64 >= self.policy.to && char_vec[(self.policy.to - 1) as usize] == self.policy.letter) } } fn lines_to_records(lines: impl Iterator<Item=io::Result<String>>) -> Result<Vec<Record>> { let parser = Parser::new()?; Ok(lines .filter(|res| res.is_ok()) .map(|res| res.unwrap()) // OK to unwrap here .map(|line| parser.parse(&line)) .filter(|res| res.is_ok()) .map(|res| res.unwrap()) // OK to unwrap here .collect()) } fn count_old(recs: &[Record]) -> u64 { recs.iter().filter(|rec| rec.validate_old()).count() as u64 } fn count_new(recs: &[Record]) -> u64 { recs.iter().filter(|rec| rec.validate_new()).count() as u64 } fn main() -> Result<()> { let args = env::args().collect::<Vec<String>>(); if args.len() > 1 { let lines = read_lines(&args[1])?; let recs = lines_to_records(lines)?; println!( "The number of valid records by the old method is {}", count_old(&recs) ); println!( "The number of valid records by the new method is {}", count_new(&recs) ); Ok(()) } else { Err(Error::new("filename argument required".to_string())) } } #[cfg(test)] mod tests { use super::*; #[test] fn parses_a_valid_db_record() -> result::Result<(), Error> { let record = Parser::new()?.parse("3-11 z: zzzzzdzzzzlzz")?; assert_eq!(3, record.policy.from); assert_eq!(11, record.policy.to); assert_eq!('z', record.policy.letter); assert_eq!("zzzzzdzzzzlzz", record.password); Ok(()) } #[test] fn validates_a_valid_password() -> result::Result<(), Error> { let record = Parser::new()?.parse("1-3 a: abc")?; assert!(record.validate()); Ok(()) } #[test] fn does_not_validate_an_invalid_password() -> result::Result<(), Error> { let record = Parser::new()?.parse("1-3 a: aaaa")?; assert!(!record.validate()); Ok(()) } }
use std::collections::HashSet; use std::io::{self}; #[derive(Debug, Clone, Copy)] struct CityId(usize); #[derive(Debug, Clone, Copy)] struct Distance(f64); fn get_vertex_with_min_distance( min_distances: &Vec<f64>, visited: &HashSet<usize>, ) -> (usize, f64) { let mut current_min_distance = f64::MAX; let mut vertex = usize::MAX; for (vertex_idx, distance) in min_distances.iter().enumerate() { if visited.contains(&vertex_idx) { continue; } if distance <= &current_min_distance { vertex = vertex_idx; current_min_distance = *distance } } (vertex, current_min_distance) } fn dijkstra_algorithm(start: usize, edges: &Vec<Vec<(CityId, Distance)>>) -> Vec<f64> { let number_of_vertices = edges.len(); let mut min_distances = vec![f64::MAX; number_of_vertices]; min_distances[start] = 0.0; let mut visited: HashSet<usize> = HashSet::new(); while visited.len() != number_of_vertices { let (vertex, current_min_distance) = get_vertex_with_min_distance(&min_distances, &visited); if current_min_distance == f64::MAX { break; } visited.insert(vertex); for edge in edges[vertex].iter() { let (destination, distance_to_destination) = edge; if visited.contains(&destination.0) { continue; } let new_path_distance = current_min_distance + distance_to_destination.0; if min_distances[destination.0] > new_path_distance { min_distances[destination.0] = new_path_distance; } } } min_distances } fn main() -> io::Result<()> { let files_results = vec![ ("test.txt", 605_f64, 982_f64), ("input.txt", 117_f64, 909_f64), ]; for (f, result_1, result_2) in files_results.into_iter() { println!("File: {}", f); let file_content: Vec<String> = std::fs::read_to_string(f)? .lines() .map(|x| x.to_string()) .collect(); let mut cities: Vec<&str> = Vec::new(); let mut distances: Vec<Vec<(CityId, Distance)>> = Vec::new(); for line in file_content.iter() { let cities_value: Vec<&str> = line.split(" = ").collect(); let city_names: Vec<&str> = cities_value[0].split(" to ").collect(); let value = cities_value[1].parse::<f64>().unwrap(); let city_1 = city_names[0]; let city_2 = city_names[1]; let id_1 = if let Some(i) = (0..cities.len()).find(|&i| cities[i] == city_1) { i } else { cities.push(city_1); distances.push(Vec::new()); cities.len() - 1 }; let id_2 = if let Some(i) = (0..cities.len()).find(|&i| cities[i] == city_2) { i } else { cities.push(city_2); distances.push(Vec::new()); cities.len() - 1 }; distances[id_1].push((CityId(id_2), Distance(value))); distances[id_2].push((CityId(id_1), Distance(value))); } println!("{:?}", cities); for row in distances.iter() { println!("{:?}", row); } let mut total_distance = 0_f64; let mut visited: HashSet<usize> = HashSet::new(); let mut current = 0; while !visited.len() != cities.len() { visited.insert(current); let current_dijkstra = dijkstra_algorithm(current, &distances); let mut dijkstra_with_indices: Vec<(f64, usize)> = current_dijkstra .iter() .enumerate() .filter(|(i, _)| !visited.contains(i)) .map(|(x, y)| (*y, x)) .collect(); dijkstra_with_indices.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap()); let (val, min_id) = if let Some((val, min_id)) = dijkstra_with_indices.first() { (val, min_id) } else { break; }; println!("Dijkstra: {:?}", current_dijkstra); println!("MinId {}, Val {}", min_id, val); current = *min_id; total_distance += current_dijkstra[current]; } println!("Total distance {}", total_distance); assert_eq!(total_distance, result_1); let mut inverted_distances = vec![Vec::new(); distances.len()]; for (i, edges) in distances.iter().enumerate() { for edge in edges.iter() { let (destination, distance_to_destination) = edge; inverted_distances[i] .push((*destination, Distance(1.0 / distance_to_destination.0))); } } let mut max_total_distance = 0_f64; for i in 0..distances.len() { let mut current = i; let mut total_distance = 0_f64; let mut visited: HashSet<usize> = HashSet::new(); while !visited.len() != cities.len() { visited.insert(current); let current_dijkstra = dijkstra_algorithm(current, &inverted_distances); let mut dijkstra_with_indices: Vec<(f64, usize)> = current_dijkstra .iter() .enumerate() .filter(|(i, _)| !visited.contains(i)) .map(|(x, y)| (*y, x)) .collect(); dijkstra_with_indices.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap()); let (val, max_id) = if let Some((val, max_id)) = dijkstra_with_indices.first() { (val, max_id) } else { break; }; println!("Dijkstra: {:?}", current_dijkstra); println!("maxId {}, Val {}", max_id, val); current = *max_id; total_distance += 1.0 / current_dijkstra[current]; } if total_distance > max_total_distance { max_total_distance = total_distance; } println!("Total distance {} for {}", total_distance, i); } println!("Max total distance {}", max_total_distance); assert_eq!(max_total_distance, result_2); } Ok(()) }
cfg_if::cfg_if! { if #[cfg(target_os = "macos")] { use uvm_core::unity; #[test] fn downloads_editor_installer_for_version() { let component = unity::Component::Editor; let version = unity::Version::f(2018, 2, 6, 1); let manifest = unity::Manifest::load(&version).expect("a unity manifest"); let loader = uvm_install_core::Loader::new(component, &manifest); let installer_path = loader.download().expect("path to installer"); assert!(installer_path.exists()); } } }
#[doc = "Register `OTG_DAINT` reader"] pub type R = crate::R<OTG_DAINT_SPEC>; #[doc = "Field `IEPINT` reader - IEPINT"] pub type IEPINT_R = crate::FieldReader<u16>; #[doc = "Field `OEPINT` reader - OEPINT"] pub type OEPINT_R = crate::FieldReader<u16>; impl R { #[doc = "Bits 0:15 - IEPINT"] #[inline(always)] pub fn iepint(&self) -> IEPINT_R { IEPINT_R::new((self.bits & 0xffff) as u16) } #[doc = "Bits 16:31 - OEPINT"] #[inline(always)] pub fn oepint(&self) -> OEPINT_R { OEPINT_R::new(((self.bits >> 16) & 0xffff) as u16) } } #[doc = "When a significant event occurs on an endpoint, a OTG_DAINT register interrupts the application using the device OUT endpoints interrupt bit or device IN endpoints interrupt bit of the OTG_GINTSTS register (OEPINT or IEPINT in OTG_GINTSTS, respectively). There is one interrupt bit per endpoint, up to a maximum of 16 bits for OUT endpoints and 16 bits for IN endpoints. For a bidirectional endpoint, the corresponding IN and OUT interrupt bits are used. Bits in this register are set and cleared when the application sets and clears bits in the corresponding device endpoint-x interrupt register (OTG_DIEPINTx/OTG_DOEPINTx).\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`otg_daint::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct OTG_DAINT_SPEC; impl crate::RegisterSpec for OTG_DAINT_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`otg_daint::R`](R) reader structure"] impl crate::Readable for OTG_DAINT_SPEC {} #[doc = "`reset()` method sets OTG_DAINT to value 0"] impl crate::Resettable for OTG_DAINT_SPEC { const RESET_VALUE: Self::Ux = 0; }
//! EFOY fuel cells power our ATLAS system in the winter. //! //! The EFOYs provide their own status information via their own type of heartbeats (contained in //! the full ATLAS heartbeat messages). In order to construct the history of the EFOY systems, we //! need to process the full stream of heartbeats for a season. use atlas::{Error, Result}; use regex::Regex; use std::slice::Iter; use std::str::FromStr; lazy_static! { static ref HEARTBEAT_REGEX: Regex = Regex::new(r"(?x)^ (?P<state>.*), cartridge\s(?P<cartridge>.*)\sconsumed\s(?P<consumed>\d+\.\d+)l, (?P<voltage>.*), (?P<current>.*) $").unwrap(); } /// Instantaneous status report from one of our EFOY fuel cell systems. #[derive(Clone, Debug, Default, PartialEq, PartialOrd, Serialize)] pub struct Heartbeat { /// The state of the efoy system at time of heartbeat. pub state: State, /// The active cartridge. /// /// The ATLAS EFOYs have four cartridges, named "1.1", "1.2", "2.1", and "2.2". pub cartridge: String, /// The fuel consumed so far by the active cartridge. pub consumed: f32, /// The voltage level of the efoy. pub voltage: f32, /// The current level of the efoy. pub current: f32, } /// The operating state/mode of an EFOY fuel cell system. #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Serialize)] pub enum State { /// The efoy is in auto mode, and is off. AutoOff, /// The efoy is in auto mode, and is on. AutoOn, /// The efoy is in an error state. Error, /// The efoy is heating itself to avoid freezing. FreezeProtection, } /// Stateful representation of an EFOY system. /// /// Used to calculate the fuel status of an EFOY through time from a series of `Heartbeat`s. #[derive(Clone, Debug)] pub struct Efoy { cartridges: Vec<Cartridge>, } /// An efoy cartridge. #[derive(Clone, Debug)] pub struct Cartridge { name: String, capacity: f32, consumed: f32, emptied: bool, } /// An iterator over an EFOY's cartridges. #[derive(Debug)] pub struct Cartridges<'a> { iter: Iter<'a, Cartridge>, } impl FromStr for Heartbeat { type Err = Error; fn from_str(s: &str) -> Result<Heartbeat> { if let Some(ref captures) = HEARTBEAT_REGEX.captures(s) { Ok(Heartbeat { state: parse_name_from_captures!(captures, "state"), cartridge: captures.name("cartridge").unwrap().as_str().to_string(), consumed: parse_name_from_captures!(captures, "consumed"), voltage: parse_name_from_captures!(captures, "voltage"), current: parse_name_from_captures!(captures, "current"), }) } else { Err(Error::EfoyHeartbeatFormat(s.to_string())) } } } impl Heartbeat { /// Returns true if this efoy is on. pub fn is_on(&self) -> bool { match self.state { State::AutoOn => true, _ => false, } } } impl Default for State { fn default() -> State { State::AutoOff } } impl FromStr for State { type Err = Error; fn from_str(s: &str) -> Result<State> { match s { "auto off" => Ok(State::AutoOff), "auto on" => Ok(State::AutoOn), "error" => Ok(State::Error), "freeze protection" => Ok(State::FreezeProtection), _ => Err(Error::UnknownEfoyState(s.to_string())), } } } impl From<State> for String { fn from(efoy_state: State) -> String { match efoy_state { State::AutoOff => "auto off".to_string(), State::AutoOn => "auto on".to_string(), State::Error => "error".to_string(), State::FreezeProtection => "freeze protection".to_string(), } } } impl Efoy { /// Creates a new efoy with no fuel cartridges. /// /// # Examples /// /// ``` /// # use glacio::atlas::Efoy; /// let efoy = Efoy::new(); /// ``` pub fn new() -> Efoy { Default::default() } /// Adds a cartridge to this EFOY. /// /// Order matters. Because of the way heartbeats work, we don't get an explicit "this cartridge /// is empty" message, the EFOY just moves on to the next cartridge. Therefore, once we've /// moved on to a "later" cartridge, all cartridges "before" it are emptied. /// /// Returns an error if that a cartridge already exists with that name. /// /// # Examples /// /// ``` /// # use glacio::atlas::Efoy; /// let mut efoy = Efoy::new(); /// efoy.add_cartridge("1.1", 8.0).unwrap(); /// ``` pub fn add_cartridge(&mut self, name: &str, capacity: f32) -> Result<()> { if self.cartridges.iter().any( |cartridge| cartridge.name == name, ) { return Err(Error::DuplicateEfoyCartridge(name.to_string())); } self.cartridges.push(Cartridge::new(name, capacity)); Ok(()) } /// Returns the fuel level for the named cartridge. /// /// Returns none if there is no cartridge with the provided name. /// /// # Examples /// /// ``` /// # use glacio::atlas::Efoy; /// let mut efoy = Efoy::new(); /// efoy.add_cartridge("1.1", 8.0); /// assert_eq!(Some(8.0), efoy.fuel("1.1")); /// assert_eq!(None, efoy.fuel("not a cartridge")); /// ``` pub fn fuel(&self, name: &str) -> Option<f32> { self.cartridge(name).map(|cartridge| cartridge.fuel()) } /// Returns the fuel in a cartridge as a percentage of its capacity. /// /// # Examples /// /// ``` /// # use glacio::atlas::Efoy; /// let mut efoy = Efoy::new(); /// efoy.add_cartridge("1.1", 8.0); /// assert_eq!(Some(100.0), efoy.fuel_percentage("1.1")); /// ``` pub fn fuel_percentage(&self, name: &str) -> Option<f32> { self.cartridge(name).map( |cartridge| cartridge.fuel_percentage(), ) } /// Returns the total fuel reamining in this EFOY. /// /// # Examples /// /// ``` /// # use glacio::atlas::Efoy; /// let mut efoy = Efoy::new(); /// efoy.add_cartridge("1.1", 8.0); /// assert_eq!(8.0, efoy.total_fuel()); /// efoy.add_cartridge("1.2", 8.0); /// assert_eq!(16.0, efoy.total_fuel()); /// ``` pub fn total_fuel(&self) -> f32 { self.cartridges.iter().map(|c| c.fuel()).sum() } /// Returns the total fuel in this EFOY as a percentage of full capacity. /// /// # Examples /// /// ``` /// # use glacio::atlas::Efoy; /// let mut efoy = Efoy::new(); /// efoy.add_cartridge("1.1", 8.0); /// assert_eq!(100.0, efoy.total_fuel_percentage()); /// ``` pub fn total_fuel_percentage(&self) -> f32 { let (fuel, capacity) = self.cartridges.iter().fold((0., 0.), |(fuel, capacity), cartridge| { (fuel + cartridge.fuel(), capacity + cartridge.capacity) }); 100. * fuel / capacity } /// Process an efoy heartbeat. /// /// The named cartridge is set to the starting fuel level minus the consumed fuel. All /// "earlier" cartridges are set to zero. Order is defined by the order the cartridges were /// added to the efoy. /// /// If a "later" cartridge has already been processed, returns an error. /// /// ``` /// # use glacio::atlas::efoy::{Efoy, Heartbeat}; /// let heartbeat = Heartbeat { /// cartridge: "1.1".to_string(), /// consumed: 4.2, /// ..Default::default() /// }; /// let mut efoy = Efoy::new(); /// efoy.add_cartridge("1.1", 8.0); /// efoy.process(&heartbeat).unwrap(); /// assert_eq!(8.0 - 4.2, efoy.fuel("1.1").unwrap()); /// ``` pub fn process(&mut self, heartbeat: &Heartbeat) -> Result<()> { if let Some(cartridge) = self.cartridge(&heartbeat.cartridge) { if cartridge.emptied { // FIXME return Ok(()); // return Err(Error::EmptyCartridge(cartridge.name.clone())); } } else if heartbeat.cartridge.contains("->") { return Ok(()); // Transition heartbeat, just ignore it } else { return Err(Error::CartridgeName(heartbeat.cartridge.to_string())); } for cartridge in self.cartridges.iter_mut() { if cartridge.name == heartbeat.cartridge { cartridge.consumed = heartbeat.consumed; return Ok(()); } else { cartridge.empty(); } } unreachable!() } /// Returns an iterator over this efoy's cartridges. /// /// # Examples /// /// ``` /// # use glacio::atlas::Efoy; /// let mut efoy = Efoy::new(); /// efoy.add_cartridge("1.1", 8.0); /// assert_eq!(1, efoy.iter().count()); pub fn iter(&self) -> Cartridges { Cartridges { iter: self.cartridges.iter() } } fn cartridge(&self, name: &str) -> Option<&Cartridge> { self.cartridges.iter().find( |&cartridge| cartridge.name == name, ) } } impl Default for Efoy { fn default() -> Efoy { Efoy { cartridges: Vec::new() } } } impl Cartridge { fn new(name: &str, capacity: f32) -> Cartridge { Cartridge { name: name.to_string(), capacity: capacity, consumed: 0., emptied: false, } } /// Returns the name of this cartridge. pub fn name(&self) -> &str { &self.name } fn fuel(&self) -> f32 { self.capacity - self.consumed } /// Returns the fuel percentage of this cartridge. pub fn fuel_percentage(&self) -> f32 { 100. * self.fuel() / self.capacity } fn empty(&mut self) { self.consumed = self.capacity; self.emptied = true; } } impl<'a> Iterator for Cartridges<'a> { type Item = &'a Cartridge; fn next(&mut self) -> Option<Self::Item> { self.iter.next() } } #[cfg(test)] mod tests { use super::*; #[test] fn efoy_add_cartridge() { let mut efoy = Efoy::new(); assert!(efoy.add_cartridge("1.1", 8.0).is_ok()); assert!(efoy.add_cartridge("1.2", 8.0).is_ok()); assert!(efoy.add_cartridge("1.1", 8.0).is_err()); } #[test] fn efoy_fuel() { let mut efoy = Efoy::new(); efoy.add_cartridge("1.1", 8.0).unwrap(); assert_eq!(Some(8.0), efoy.fuel("1.1")); assert_eq!(None, efoy.fuel("1.2")); assert_eq!(Some(100.0), efoy.fuel_percentage("1.1")); assert_eq!(8.0, efoy.total_fuel()); assert_eq!(100.0, efoy.total_fuel_percentage()); assert!(efoy.add_cartridge("1.2", 8.0).is_ok()); assert_eq!(Some(8.0), efoy.fuel("1.2")); assert_eq!(16.0, efoy.total_fuel()); assert_eq!(100.0, efoy.total_fuel_percentage()); } #[test] fn efoy_process() { let mut efoy = Efoy::new(); efoy.add_cartridge("1.1", 8.0).unwrap(); efoy.add_cartridge("1.2", 8.0).unwrap(); efoy.add_cartridge("2.1", 8.0).unwrap(); efoy.add_cartridge("2.2", 8.0).unwrap(); let mut heartbeat = Heartbeat { cartridge: "1.1".to_string(), consumed: 4.2, ..Default::default() }; efoy.process(&heartbeat).unwrap(); assert_eq!(8.0 - 4.2, efoy.fuel("1.1").unwrap()); assert_eq!(32.0 - 4.2, efoy.total_fuel()); assert_eq!( 100. * (8.0 - 4.2) / 8.0, efoy.fuel_percentage("1.1").unwrap() ); assert_eq!(100. * ((32.0 - 4.2) / 32.0), efoy.total_fuel_percentage()); heartbeat.cartridge = "3.1".to_string(); assert!(efoy.process(&heartbeat).is_err()); assert_eq!(8.0 - 4.2, efoy.fuel("1.1").unwrap()); heartbeat.cartridge = "1.2".to_string(); efoy.process(&heartbeat).unwrap(); assert_eq!(0.0, efoy.fuel("1.1").unwrap()); assert_eq!(8.0 - 4.2, efoy.fuel("1.2").unwrap()); heartbeat.cartridge = "1.1".to_string(); // FIXME //assert!(efoy.process(&heartbeat).is_err()); } }
fn main() { let a = if true { "abc" } else { 12 }; println!("{}", a); }
#![deny(warnings)] extern crate warp; #[test] fn cookie() { let foo = warp::cookie("foo"); let req = warp::test::request() .header("cookie", "foo=bar"); assert_eq!(req.filter(&foo).unwrap(), "bar"); let req = warp::test::request() .header("cookie", "abc=def; foo=baz"); assert_eq!(req.filter(&foo).unwrap(), "baz"); let req = warp::test::request() .header("cookie", "abc=def"); assert!(!req.matches(&foo)); let req = warp::test::request() .header("cookie", "foobar=quux"); assert!(!req.matches(&foo)); } #[test] fn optional() { let foo = warp::cookie::optional("foo"); let req = warp::test::request() .header("cookie", "foo=bar"); assert_eq!(req.filter(&foo).unwrap().unwrap(), "bar"); let req = warp::test::request() .header("cookie", "abc=def; foo=baz"); assert_eq!(req.filter(&foo).unwrap().unwrap(), "baz"); let req = warp::test::request() .header("cookie", "abc=def"); assert!(req.matches(&foo)); let req = warp::test::request() .header("cookie", "foobar=quux"); assert!(req.matches(&foo)); }
pub mod approx; pub mod eval; pub mod subst; pub mod expand; pub mod paren; pub mod collect; pub mod use_suffix; mod tools; // pub use approx::approx; // pub use eval::Eval; // pub use subst::Subst; // pub use expand::Expand;
use super::{EthereumRPC, Either, RPCStep, RPCTransaction, RPCBlock, RPCLog, RPCReceipt, RPCTopicFilter, RPCLogFilter, RPCTraceConfig, RPCBreakpointConfig, RPCSourceMapConfig}; use super::filter::*; use super::serialize::*; use super::solidity::*; use error::Error; use miner::MinerState; use rlp::{self, UntrustedRlp}; use bigint::{M256, U256, H256, H2048, Address, Gas}; use hexutil::{read_hex, to_hex}; use block::{Block, TotalHeader, Account, Log, Receipt, FromKey, Transaction, UnsignedTransaction, TransactionAction, GlobalSignaturePatch, RlpHash}; use blockchain::chain::HeaderHash; use sputnikvm::{ValidTransaction, UntrustedTransaction, VM, VMStatus, MachineStatus, HeaderParams, SeqTransactionVM, Patch, Memory, AccountChange, AccountCommitment}; use sputnikvm_stateful::MemoryStateful; use std::str::FromStr; use std::collections::HashMap; use std::rc::Rc; use sha3::{Keccak256, Digest}; use jsonrpc_macros::Trailing; pub fn from_block_number<T: Into<Option<String>>>(state: &MinerState, value: T) -> Result<usize, Error> { let value: Option<String> = value.into(); if value == Some("latest".to_string()) || value == Some("pending".to_string()) || value == None { Ok(state.block_height()) } else if value == Some("earliest".to_string()) { Ok(0) } else { let v: u64 = U256::from(read_hex(&value.unwrap())?.as_slice()).into(); let v = v as usize; if v > state.block_height() { Err(Error::NotFound) } else { Ok(v) } } } pub fn to_rpc_log(receipt: &Receipt, index: usize, transaction: &Transaction, block: &Block) -> RPCLog { use sha3::{Keccak256, Digest}; let transaction_hash = H256::from(Keccak256::digest(&rlp::encode(transaction).to_vec()).as_slice()); let transaction_index = { let mut i = 0; let mut found = false; for transaction in &block.transactions { let other_hash = H256::from(Keccak256::digest(&rlp::encode(transaction).to_vec()).as_slice()); if transaction_hash == other_hash { found = true; break; } i += 1; } assert!(found); i }; RPCLog { removed: false, log_index: Hex(index), transaction_index: Hex(transaction_index), transaction_hash: Hex(transaction_hash), block_hash: Hex(block.header.header_hash()), block_number: Hex(block.header.number), data: Bytes(receipt.logs[index].data.clone()), topics: receipt.logs[index].topics.iter().map(|t| Hex(*t)).collect(), } } pub fn to_rpc_receipt(state: &MinerState, receipt: Receipt, transaction: &Transaction, block: &Block) -> Result<RPCReceipt, Error> { use sha3::{Keccak256, Digest}; let transaction_hash = H256::from(Keccak256::digest(&rlp::encode(transaction).to_vec()).as_slice()); let transaction_index = { let mut i = 0; let mut found = false; for transaction in &block.transactions { let other_hash = H256::from(Keccak256::digest(&rlp::encode(transaction).to_vec()).as_slice()); if transaction_hash == other_hash { found = true; break; } i += 1; } assert!(found); i }; let cumulative_gas_used = { let mut sum = Gas::zero(); for i in 0..(transaction_index + 1) { let other_hash = H256::from(Keccak256::digest(&rlp::encode(&block.transactions[i]).to_vec()).as_slice()); sum = sum + state.get_receipt_by_transaction_hash(other_hash)?.used_gas; } sum }; let contract_address = { if transaction.action == TransactionAction::Create { Some(transaction.address().unwrap()) } else { None } }; Ok(RPCReceipt { transaction_hash: Hex(transaction_hash), transaction_index: Hex(transaction_index), block_hash: Hex(block.header.header_hash()), block_number: Hex(block.header.number), cumulative_gas_used: Hex(cumulative_gas_used), gas_used: Hex(receipt.used_gas), contract_address: contract_address.map(|v| Hex(v)), logs: { let mut ret = Vec::new(); for i in 0..receipt.logs.len() { ret.push(to_rpc_log(&receipt, i, transaction, block)); } ret }, root: Hex(receipt.state_root), status: if state.receipt_status(transaction.rlp_hash()) { 1 } else { 0 }, }) } pub fn to_rpc_transaction(transaction: Transaction, block: Option<&Block>) -> RPCTransaction { use sha3::{Keccak256, Digest}; let hash = H256::from(Keccak256::digest(&rlp::encode(&transaction).to_vec()).as_slice()); RPCTransaction { from: Some(Hex(transaction.caller().unwrap())), to: match transaction.action { TransactionAction::Call(address) => Some(Hex(address)), TransactionAction::Create => None, }, gas: Some(Hex(transaction.gas_limit)), gas_price: Some(Hex(transaction.gas_price)), value: Some(Hex(transaction.value)), data: Some(Bytes(transaction.input)), nonce: Some(Hex(transaction.nonce)), hash: Some(Hex(hash)), block_hash: block.map(|b| Hex(b.header.header_hash())), block_number: block.map(|b| Hex(b.header.number)), transaction_index: { if block.is_some() { let block = block.unwrap(); let mut i = 0; let mut found = false; for transaction in &block.transactions { let other_hash = H256::from(Keccak256::digest(&rlp::encode(transaction).to_vec()).as_slice()); if hash == other_hash { found = true; break; } i += 1; } if found { Some(Hex(i)) } else { None } } else { None } }, } } pub fn to_rpc_block(block: Block, total_header: TotalHeader, full_transactions: bool) -> RPCBlock { use sha3::{Keccak256, Digest}; let logs_bloom: H2048 = block.header.logs_bloom.clone().into(); RPCBlock { number: Hex(block.header.number), hash: Hex(block.header.header_hash()), parent_hash: Hex(block.header.parent_hash), nonce: Hex(block.header.nonce), sha3_uncles: Hex(block.header.ommers_hash), logs_bloom: Hex(logs_bloom), transactions_root: Hex(block.header.transactions_root), state_root: Hex(block.header.state_root), receipts_root: Hex(block.header.receipts_root), miner: Hex(block.header.beneficiary), difficulty: Hex(block.header.difficulty), total_difficulty: Hex(total_header.total_difficulty()), // TODO: change this to the correct one after the Typhoon is over... extra_data: Bytes(rlp::encode(&block.header.extra_data).to_vec()), size: Hex(rlp::encode(&block.header).to_vec().len()), gas_limit: Hex(block.header.gas_limit), gas_used: Hex(block.header.gas_used), timestamp: Hex(block.header.timestamp), transactions: if full_transactions { Either::Right(block.transactions.iter().map(|t| to_rpc_transaction(t.clone(), Some(&block))).collect()) } else { Either::Left(block.transactions.iter().map(|t| { let encoded = rlp::encode(t).to_vec(); Hex(H256::from(Keccak256::digest(&encoded).as_slice())) }).collect()) }, uncles: block.ommers.iter().map(|u| Hex(u.header_hash())).collect(), } } pub fn to_signed_transaction(state: &MinerState, transaction: RPCTransaction, stateful: &MemoryStateful) -> Result<Transaction, Error> { let address = match transaction.from { Some(val) => val.0, None => Address::default(), }; let secret_key = { let mut secret_key = None; for key in state.accounts() { if Address::from_secret_key(&key)? == address { secret_key = Some(key); } } match secret_key { Some(val) => val, None => return Err(Error::NotFound), } }; let block = state.get_block_by_number(state.block_height()); let trie = stateful.state_of(block.header.state_root); let account: Option<Account> = trie.get(&address); let unsigned = UnsignedTransaction { nonce: match transaction.nonce { Some(val) => val.0, None => { account.as_ref().map(|account| account.nonce).unwrap_or(U256::zero()) } }, gas_price: match transaction.gas_price { Some(val) => val.0, None => Gas::zero(), }, gas_limit: match transaction.gas { Some(val) => val.0, None => Gas::from(90000u64), }, action: match transaction.to { Some(val) => TransactionAction::Call(val.0), None => TransactionAction::Create, }, value: match transaction.value { Some(val) => val.0, None => U256::zero(), }, input: match transaction.data { Some(val) => val.0, None => Vec::new(), }, }; let transaction = unsigned.sign::<GlobalSignaturePatch>(&secret_key); Ok(transaction) } pub fn to_valid_transaction<P: Patch>(state: &MinerState, transaction: RPCTransaction, stateful: &MemoryStateful) -> Result<ValidTransaction, Error> { let address = match transaction.from { Some(val) => val.0, None => Address::default(), }; let block = state.get_block_by_number(state.block_height()); let trie = stateful.state_of(block.header.state_root); let account: Option<Account> = trie.get(&address); let commitment = match account { Some(account) => { let code = match stateful.code(account.code_hash) { Some(code) => Rc::new(code), None => return Err(Error::NotFound), }; AccountCommitment::Full { address, nonce: account.nonce, balance: account.balance, code: code, } }, None => { AccountCommitment::Nonexist(address) }, }; let untrusted = UntrustedTransaction { caller: commitment, gas_price: match transaction.gas_price { Some(val) => val.0, None => Gas::zero(), }, gas_limit: match transaction.gas { Some(val) => val.0, None => Gas::from(90000u64), }, action: match transaction.to { Some(val) => TransactionAction::Call(val.0), None => TransactionAction::Create, }, value: match transaction.value { Some(val) => val.0, None => U256::zero(), }, input: Rc::new(match transaction.data { Some(val) => val.0, None => Vec::new(), }), }; let valid = untrusted.to_valid::<P>()?; Ok(valid) } pub fn from_topic_filter(filter: Option<RPCTopicFilter>) -> Result<TopicFilter, Error> { Ok(match filter { None => TopicFilter::All, Some(RPCTopicFilter::Single(s)) => TopicFilter::Or(vec![ s.0 ]), Some(RPCTopicFilter::Or(ss)) => { TopicFilter::Or(ss.into_iter().map(|v| v.0).collect()) }, }) } pub fn from_log_filter(state: &MinerState, filter: RPCLogFilter) -> Result<LogFilter, Error> { Ok(LogFilter { from_block: from_block_number(state, filter.from_block)?, to_block: from_block_number(state, filter.to_block)?, address: match filter.address { Some(val) => Some(val.0), None => None, }, topics: match filter.topics { Some(topics) => { let mut ret = Vec::new(); for i in 0..4 { if topics.len() > i { ret.push(from_topic_filter(topics[i].clone())?); } else { ret.push(TopicFilter::All); } } ret }, None => vec![TopicFilter::All, TopicFilter::All, TopicFilter::All, TopicFilter::All], }, }) } pub fn replay_transaction<P: Patch>( stateful: &MemoryStateful<'static>, transaction: Transaction, block: &Block, last_hashes: &[H256], config: &RPCTraceConfig ) -> Result<(Vec<RPCStep>, SeqTransactionVM<P>), Error> { let valid = stateful.to_valid::<P>(transaction)?; let mut vm = SeqTransactionVM::<P>::new(valid, HeaderParams::from(&block.header)); let mut steps = Vec::new(); let mut last_gas = Gas::zero(); loop { match vm.status() { VMStatus::ExitedOk | VMStatus::ExitedErr(_) => break, VMStatus::ExitedNotSupported(_) => panic!(), VMStatus::Running => { stateful.step(&mut vm, block.header.number, &last_hashes); let gas = vm.used_gas(); let gas_cost = gas - last_gas; last_gas = gas; if let Some(machine) = vm.current_machine() { let depth = machine.state().depth; let error = match machine.status() { MachineStatus::ExitedErr(err) => format!("{:?}", err), _ => "".to_string(), }; let pc = machine.pc().position(); let opcode_pc = machine.pc().opcode_position(); let op = machine.pc().code()[pc]; let code_hash = H256::from(Keccak256::digest(machine.pc().code()).as_slice()); let address = machine.state().context.address; let memory = if config.disable_memory { None } else { let mut ret = Vec::new(); for i in 0..machine.state().memory.len() { ret.push(machine.state().memory.read_raw(U256::from(i))); } Some(vec![Bytes(ret)]) }; let stack = if config.disable_stack { None } else { let mut ret = Vec::new(); for i in 0..machine.state().stack.len() { ret.push(Hex(machine.state().stack.peek(i).unwrap())); } Some(ret) }; let storage = if config.disable_storage { None } else { let mut for_storage = None; let context_address = machine.state().context.address; for account in machine.state().account_state.accounts() { match account { &AccountChange::Full { address, ref changing_storage, .. } => { if address == context_address { for_storage = Some(changing_storage.clone()); } }, &AccountChange::Create { address, ref storage, .. } => { if address == context_address { for_storage = Some(storage.clone()); } }, _ => (), } } let storage = for_storage; let mut ret = HashMap::new(); if let Some(storage) = storage { let storage: HashMap<U256, M256> = storage.clone().into(); for (key, value) in storage { ret.insert(Hex(key), Hex(value)); } } Some(ret) }; if let &Some(RPCBreakpointConfig { ref source_map, ref breakpoints }) = &config.breakpoints { if let Some(&RPCSourceMapConfig { ref source_map, ref source_list }) = source_map.get(&Hex(code_hash)) { let source_map = parse_source_map(source_map, source_list)?; let source_map = &source_map[opcode_pc]; let breakpoints = parse_source(breakpoints)?; if let Some((breakpoint_index, breakpoint)) = source_map.source.find_intersection(&breakpoints) { steps.push(RPCStep { depth, error, gas: Hex(gas), gas_cost: Hex(gas_cost), breakpoint_index: Some(breakpoint_index), breakpoint: Some(format!( "{}:{}:{}", breakpoint.offset, breakpoint.length, breakpoint.file_name)), code_hash: Hex(code_hash), address: Hex(address), memory, op, pc, opcode_pc, stack, storage }); } } } else { steps.push(RPCStep { depth, error, gas: Hex(gas), gas_cost: Hex(gas_cost), breakpoint_index: None, breakpoint: None, code_hash: Hex(code_hash), address: Hex(address), memory, op, pc, opcode_pc, stack, storage }); } } }, } } Ok((steps, vm)) }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - control register"] pub cr: CR, #[doc = "0x04 - device configuration register"] pub dcr: DCR, #[doc = "0x08 - status register"] pub sr: SR, #[doc = "0x0c - flag clear register"] pub fcr: FCR, #[doc = "0x10 - data length register"] pub dlr: DLR, #[doc = "0x14 - communication configuration register"] pub ccr: CCR, #[doc = "0x18 - address register"] pub ar: AR, #[doc = "0x1c - ABR"] pub abr: ABR, #[doc = "0x20 - data register"] pub dr: DR, #[doc = "0x24 - polling status mask register"] pub psmkr: PSMKR, #[doc = "0x28 - polling status match register"] pub psmar: PSMAR, #[doc = "0x2c - polling interval register"] pub pir: PIR, #[doc = "0x30 - low-power timeout register"] pub lptr: LPTR, } #[doc = "CR (rw) register accessor: control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr`] module"] pub type CR = crate::Reg<cr::CR_SPEC>; #[doc = "control register"] pub mod cr; #[doc = "DCR (rw) register accessor: device configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dcr`] module"] pub type DCR = crate::Reg<dcr::DCR_SPEC>; #[doc = "device configuration register"] pub mod dcr; #[doc = "SR (r) register accessor: status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sr`] module"] pub type SR = crate::Reg<sr::SR_SPEC>; #[doc = "status register"] pub mod sr; #[doc = "FCR (rw) register accessor: flag clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fcr`] module"] pub type FCR = crate::Reg<fcr::FCR_SPEC>; #[doc = "flag clear register"] pub mod fcr; #[doc = "DLR (rw) register accessor: data length register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dlr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dlr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dlr`] module"] pub type DLR = crate::Reg<dlr::DLR_SPEC>; #[doc = "data length register"] pub mod dlr; #[doc = "CCR (rw) register accessor: communication configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ccr`] module"] pub type CCR = crate::Reg<ccr::CCR_SPEC>; #[doc = "communication configuration register"] pub mod ccr; #[doc = "AR (rw) register accessor: address register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ar`] module"] pub type AR = crate::Reg<ar::AR_SPEC>; #[doc = "address register"] pub mod ar; #[doc = "ABR (rw) register accessor: ABR\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`abr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`abr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`abr`] module"] pub type ABR = crate::Reg<abr::ABR_SPEC>; #[doc = "ABR"] pub mod abr; #[doc = "DR (rw) register accessor: data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dr`] module"] pub type DR = crate::Reg<dr::DR_SPEC>; #[doc = "data register"] pub mod dr; #[doc = "PSMKR (rw) register accessor: polling status mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`psmkr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`psmkr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`psmkr`] module"] pub type PSMKR = crate::Reg<psmkr::PSMKR_SPEC>; #[doc = "polling status mask register"] pub mod psmkr; #[doc = "PSMAR (rw) register accessor: polling status match register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`psmar::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`psmar::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`psmar`] module"] pub type PSMAR = crate::Reg<psmar::PSMAR_SPEC>; #[doc = "polling status match register"] pub mod psmar; #[doc = "PIR (rw) register accessor: polling interval register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pir::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pir::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pir`] module"] pub type PIR = crate::Reg<pir::PIR_SPEC>; #[doc = "polling interval register"] pub mod pir; #[doc = "LPTR (rw) register accessor: low-power timeout register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lptr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lptr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`lptr`] module"] pub type LPTR = crate::Reg<lptr::LPTR_SPEC>; #[doc = "low-power timeout register"] pub mod lptr;
use engine2d::objects::Rect; use crate::CHAR_SIZE; pub fn info() -> [(char, Rect); 71] { [ (' ', Rect::new(0.0, 0.0, CHAR_SIZE, CHAR_SIZE)), ('!', Rect::new(16.0, 0.0, CHAR_SIZE, CHAR_SIZE)), ('a', Rect::new(16.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('b', Rect::new(32.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('c', Rect::new(48.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('d', Rect::new(64.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('e', Rect::new(80.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('f', Rect::new(96.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('g', Rect::new(112.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('h', Rect::new(128.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('i', Rect::new(144.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('j', Rect::new(160.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('k', Rect::new(176.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('l', Rect::new(192.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('m', Rect::new(208.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('n', Rect::new(224.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('o', Rect::new(240.0, 64.0, CHAR_SIZE, CHAR_SIZE)), ('p', Rect::new(0.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('q', Rect::new(16.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('r', Rect::new(32.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('s', Rect::new(48.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('t', Rect::new(64.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('u', Rect::new(80.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('v', Rect::new(96.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('w', Rect::new(112.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('x', Rect::new(128.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('y', Rect::new(144.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('z', Rect::new(160.0, 80.0, CHAR_SIZE, CHAR_SIZE)), ('A', Rect::new(16.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('B', Rect::new(32.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('C', Rect::new(48.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('D', Rect::new(64.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('E', Rect::new(80.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('F', Rect::new(96.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('G', Rect::new(112.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('H', Rect::new(128.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('I', Rect::new(144.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('J', Rect::new(160.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('K', Rect::new(176.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('L', Rect::new(192.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('M', Rect::new(208.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('N', Rect::new(224.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('O', Rect::new(240.0, 32.0, CHAR_SIZE, CHAR_SIZE)), ('P', Rect::new(0.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('Q', Rect::new(16.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('R', Rect::new(32.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('S', Rect::new(48.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('T', Rect::new(64.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('U', Rect::new(80.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('V', Rect::new(96.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('W', Rect::new(112.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('X', Rect::new(128.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('Y', Rect::new(144.0, 48.0, CHAR_SIZE, CHAR_SIZE)), ('Z', Rect::new(160.0, 48.0, CHAR_SIZE, CHAR_SIZE)), (':', Rect::new(160.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('0', Rect::new(0.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('1', Rect::new(16.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('2', Rect::new(32.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('3', Rect::new(48.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('4', Rect::new(64.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('5', Rect::new(80.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('6', Rect::new(96.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('7', Rect::new(112.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('8', Rect::new(128.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('9', Rect::new(144.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('-', Rect::new(208.0, 0.0, CHAR_SIZE, CHAR_SIZE)), ('.', Rect::new(224.0, 0.0, CHAR_SIZE, CHAR_SIZE)), (',', Rect::new(192.0, 0.0, CHAR_SIZE, CHAR_SIZE)), ('\'', Rect::new(112.0, 0.0, CHAR_SIZE, CHAR_SIZE)), ('?', Rect::new(240.0, 16.0, CHAR_SIZE, CHAR_SIZE)), ('*', Rect::new(160.0, 0.0, CHAR_SIZE, CHAR_SIZE)), ] }
use skeptic::*; fn main() { let mdbook_files = markdown_files_of_directory("md/"); generate_doc_tests(&mdbook_files); }
/*===============================================================================================*/ // Copyright 2016 Kyle Finlay // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /*===============================================================================================*/ /*===============================================================================================*/ //! The logging module. //! //! It provides basic logging functionality. /*===============================================================================================*/ extern crate ansi_term; extern crate log; use self::ansi_term::Colour::{Blue, Purple, Yellow, Red}; use std::boxed::Box; use std::fs::File; use std::io::BufWriter; use std::io::prelude::Write; /*===============================================================================================*/ /*------LOGGER STRUCT----------------------------------------------------------------------------*/ /*===============================================================================================*/ /// The logger struct /// /// It provides basic logging capabilities, and is designed for use within ion. pub struct Logger { // Private log_file: BufWriter <File>, log_to_console: bool } /*===============================================================================================*/ /*------LOGGER PUBLIC STATIC METHODS-------------------------------------------------------------*/ /*===============================================================================================*/ impl Logger { /// Initializes the logger /// /// This is required before any logging functions are performed. /// Any logging done beforehand will be ignored. /// /// # Arguments /// * `log_file_path` - The path to where the log will be saved /// * `log_to_console` - Whether the log output should also be printed to the console /// /// # Return value /// A result, returning an error on failure. /// /// # Examples /// ``` /// # use ion_core::util::Logger; /// # /// Logger::init ("LogFile.log", false); pub fn init (log_file_path: &str, log_to_console: bool) -> Result<(), log::SetLoggerError> { log::set_logger (|max_log_level| { if cfg! (debug_assertions) { max_log_level.set (log::LogLevelFilter::Debug); } else { max_log_level.set (log::LogLevelFilter::Info); } Box::new (Logger {log_file: BufWriter::new (File::create (&log_file_path).unwrap ()), log_to_console: log_to_console}) }) } /*-----------------------------------------------------------------------------------------------*/ /// Releases the logger and all of its resources. pub fn release () { drop (log::shutdown_logger ().unwrap ()); } } /*===============================================================================================*/ /*------LOGGER PRIVATE METHODS-------------------------------------------------------------------*/ /*===============================================================================================*/ impl log::Log for Logger { fn enabled (&self, metadata: &log::LogMetadata) -> bool { if cfg! (debug_assertions) { return metadata.level () <= log::LogLevel::Debug; } metadata.level () <= log::LogLevel::Info } /*-----------------------------------------------------------------------------------------------*/ fn log (&self, record: &log::LogRecord) { let output = match record.level () { log::LogLevel::Debug => format! ("{} ({} : {}) - {}\n", Purple.paint (record.level ().to_string ()), record.location ().module_path (), record.location ().line (), record.args ()), log::LogLevel::Info => format! ("{} ({} : {}) - {}\n", Blue.paint (record.level ().to_string ()), record.location ().module_path (), record.location ().line (), record.args ()), log::LogLevel::Warn => format! ("{} ({} : {}) - {}\n", Yellow.paint (record.level ().to_string ()), record.location ().module_path (), record.location ().line (), record.args ()), log::LogLevel::Error => format! ("{} ({} : {}) - {}\n", Red.paint (record.level ().to_string ()), record.location ().module_path (), record.location ().line (), record.args ()), _ => String::new () }; if self.log_to_console { println! ("{}", output); } self.log_file.get_ref ().write (format! ("{} ({} : {}) - {}\n\n", record.level (), record.location ().module_path (), record.location ().line (), record.args ()).as_bytes ()).unwrap (); } }
use std::marker::PhantomData; use mpi::topology::Rank; use mpi::traits::*; use super::prelude::*; pub struct AllGather<T: Equivalence, P: ProtocolPart>(PhantomData<(T, P)>); unsafe impl<T: Equivalence, P: ProtocolPart> ProtocolPart for AllGather<T, P> { unsafe fn build_part() -> Self { Self(PhantomData) } } impl<T: Equivalence, P: ProtocolPart, C: Communicator> Session<AllGather<T, P>, C> { pub fn all_gather(self, input: &T, output: &mut [T]) -> Session<P, C> { unsafe { self.comm.all_gather_into(input, output); self.advance_next() } } } pub struct Gather<Source: RankSelect, T: Equivalence, P: ProtocolPart>(PhantomData<(Source, T, P)>); unsafe impl<Source: RankSelect, T: Equivalence, P: ProtocolPart> ProtocolPart for Gather<Source, T, P> { unsafe fn build_part() -> Self { Self(PhantomData) } } impl<Source: RankSelect, T: Equivalence, P: ProtocolPart, C: Communicator> Session<Gather<Source, T, P>, C> { pub fn split(self) -> Split<Gatherer<T, P>, Gatheree<T, P>, C> { let rank = Source::get_rank(self.state()); if self.comm.rank() == rank { Split::Left(unsafe { self.advance(Gatherer(rank, PhantomData)) }) } else { Split::Right(unsafe { self.advance(Gatheree(rank, PhantomData)) }) } } } pub struct Gatherer<T: Equivalence, P: ProtocolPart>(Rank, PhantomData<(T, P)>); impl<T: Equivalence, P: ProtocolPart, C: Communicator> Session<Gatherer<T, P>, C> { pub fn gather(self, send: &T, receive: &mut [T]) -> Session<P, C> { unsafe { self.comm .process_at_rank(self.protocol().0) .gather_into_root(send, receive); self.advance_next() } } } pub struct Gatheree<T: Equivalence, P: ProtocolPart>(Rank, PhantomData<(T, P)>); impl<T: Equivalence, P: ProtocolPart, C: Communicator> Session<Gatheree<T, P>, C> { pub fn gather(self, send: &T) -> Session<P, C> { unsafe { self.comm .process_at_rank(self.protocol().0) .gather_into(send); self.advance_next() } } } pub struct Publish<Source: RankSelect, K: super::key::Key, P: ProtocolPart>( PhantomData<(Source, K, P)>, ); unsafe impl<Source: RankSelect, K: super::key::Key, P: ProtocolPart> ProtocolPart for Publish<Source, K, P> { unsafe fn build_part() -> Self { Self(PhantomData) } } impl<Source: RankSelect, K: super::key::Key, P: ProtocolPart, C: Communicator> Session<Publish<Source, K, P>, C> { pub fn split(self) -> Split<Publisher<K, P>, Publishee<K, P>, C> { let rank = Source::get_rank(self.state()); if self.comm.rank() == rank { Split::Left(unsafe { self.advance(Publisher(rank, PhantomData)) }) } else { Split::Right(unsafe { self.advance(Publishee(rank, PhantomData)) }) } } } pub struct Publisher<K: super::key::Key, P: ProtocolPart>(Rank, PhantomData<(K, P)>); impl<K: super::key::Key + 'static, P: ProtocolPart, C: Communicator> Session<Publisher<K, P>, C> where K::Value: Equivalence, { pub fn publish(mut self, mut value: K::Value) -> Session<P, C> { unsafe { self.comm .process_at_rank(self.protocol().0) .broadcast_into(&mut value); self.state_mut().insert::<K>(Box::new(value)); self.advance_next() } } } pub struct Publishee<K: super::key::Key, P: ProtocolPart>(Rank, PhantomData<(K, P)>); impl<K: super::key::Key + 'static, P: ProtocolPart, C: Communicator> Session<Publishee<K, P>, C> where K::Value: Equivalence, { pub fn receive(mut self) -> Session<P, C> { unsafe { let mut value: K::Value = std::mem::uninitialized(); self.comm .process_at_rank(self.protocol().0) .broadcast_into(&mut value); self.state_mut().insert::<K>(Box::new(value)); self.advance_next() } } }
use serde::{Deserialize, Serialize}; use std::{fs::File, io::BufReader, path::Path}; pub const DEFAULT_CONFIG_FILE_NAME: &str = "config.json"; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Config { pub blob_size: f64, pub repel_force: f64, pub repel_distance: f64, pub friction_force: f64, pub max_acceleration: f64, pub min_acceleration: f64, } macro_rules! print_config_diff { ($item:expr, $new:expr) => { if $item != $new { println!("{} => {}", stringify!($item), $new); } }; } impl Config { pub fn load_file<P: AsRef<Path>>(path: P) -> anyhow::Result<Self> { let file = File::open(path)?; let reader = BufReader::new(file); let config = serde_json::from_reader(reader)?; Ok(config) } pub fn load_default_config_file() -> anyhow::Result<Self> { Config::load_file(DEFAULT_CONFIG_FILE_NAME) } pub fn print_config_diff(&self, new: &Config) { print_config_diff!(self.blob_size, new.blob_size); print_config_diff!(self.repel_force, new.repel_force); print_config_diff!(self.repel_distance, new.repel_distance); print_config_diff!(self.friction_force, new.friction_force); print_config_diff!(self.max_acceleration, new.max_acceleration); print_config_diff!(self.min_acceleration, new.min_acceleration); } }
// Implement the CloudWatch Client #![forbid(unsafe_code)] #![deny(missing_docs)] use anyhow::Result; use aws_sdk_cloudwatch::client::Client as CloudWatchClient; use aws_sdk_cloudwatch::operation::get_metric_statistics::GetMetricStatisticsOutput; use aws_sdk_cloudwatch::primitives::DateTime; use aws_sdk_cloudwatch::types::{ Dimension, DimensionFilter, Metric, StandardUnit, Statistic, }; use aws_smithy_types_convert::date_time::DateTimeExt; use chrono::prelude::DateTime as ChronoDt; use chrono::prelude::Utc; use chrono::Duration; use crate::common::{ Bucket, ClientConfig, }; use tracing::debug; /// A `CloudWatch` `Client` pub struct Client { /// The AWS SDK `CloudWatchClient`. pub client: CloudWatchClient, /// Bucket name that was selected, if any. pub bucket_name: Option<String>, } impl Client { /// Return a new `Client` with the given `ClientConfig`. pub async fn new(config: ClientConfig) -> Self { let bucket_name = config.bucket_name; let region = config.region; debug!("new: Creating CloudWatchClient in region '{}'", region.name()); let config = aws_config::from_env() .region(region.clone()) .load() .await; let client = CloudWatchClient::new(&config); Self { client: client, bucket_name: bucket_name, } } /// Returns a `Vec` of `GetMetricStatisticsOutput` for the given `Bucket`. /// /// This returns a `Vec` because there is one `GetMetricStatisticsOutput` /// for each S3 bucket storage type that `CloudWatch` has statistics for. pub async fn get_metric_statistics( &self, bucket: &Bucket, ) -> Result<Vec<GetMetricStatisticsOutput>> { debug!("get_metric_statistics: Processing {:?}", bucket); // These are used repeatedly while looping, just prepare them once. let now: ChronoDt<Utc> = Utc::now(); let one_day = Duration::days(1); let period = one_day.num_seconds() as i32; let start_time = DateTime::from_chrono_utc(now - (one_day * 2)); let storage_types = match &bucket.storage_types { Some(st) => st.clone(), None => Vec::new(), }; let mut outputs = Vec::new(); for storage_type in storage_types { let dimensions = vec![ Dimension::builder() .name("BucketName") .value(bucket.name.clone()) .build(), Dimension::builder() .name("StorageType") .value(storage_type.clone()) .build(), ]; let input = self.client.get_metric_statistics() .end_time(DateTime::from_chrono_utc(now)) .metric_name("BucketSizeBytes") .namespace("AWS/S3") .period(period) .set_dimensions(Some(dimensions)) .start_time(start_time) .statistics(Statistic::Average) .unit(StandardUnit::Bytes); debug!("{:?}", input); let output = input .send() .await?; outputs.push(output); } Ok(outputs) } /// Get list of buckets with `BucketSizeBytes` metrics. /// /// An individual metric resembles the following: /// ```rust /// Metric { /// metric_name: Some("BucketSizeBytes"), /// namespace: Some("AWS/S3") /// dimensions: Some([ /// Dimension { /// name: "StorageType", /// value: "StandardStorage" /// }, /// Dimension { /// name: "BucketName", /// value: "some-bucket-name" /// } /// ]), /// } /// ``` pub async fn list_metrics(&self) -> Result<Vec<Metric>> { debug!("list_metrics: Listing..."); let mut metrics = Vec::new(); let mut next_token = None; // If we selected a bucket to list, filter for it here. let dimensions = match self.bucket_name.as_ref() { Some(bucket_name) => { let filter = DimensionFilter::builder() .name("BucketName") .value(bucket_name.clone()) .build(); Some(vec![filter]) }, None => None, }; // We loop until we've processed everything. loop { // Input for CloudWatch API let output = self.client.list_metrics() .namespace("AWS/S3") .metric_name("BucketSizeBytes") .set_dimensions(dimensions.clone()) .set_next_token(next_token) .send() .await?; debug!("list_metrics: API returned: {:#?}", output); // If we get any metrics, append them to our vec if let Some(m) = output.metrics() { metrics.append(&mut m.to_vec()); } // If there was a next token, use it, otherwise the loop is done. match output.next_token() { Some(t) => next_token = Some(t.to_string()), None => break, }; } debug!("list_metrics: Metrics collection: {:#?}", metrics); Ok(metrics) } } #[cfg(test)] mod tests { use super::*; use aws_sdk_cloudwatch::config::Config as CloudWatchConfig; use aws_sdk_cloudwatch::config::Credentials; use aws_sdk_cloudwatch::types::{ Datapoint, Dimension, Metric, }; use aws_smithy_client::erase::DynConnector; use aws_smithy_client::test_connection::TestConnection; use aws_smithy_http::body::SdkBody; use pretty_assertions::assert_eq; use std::fs; use std::path::Path; // Create a mock CloudWatch client, returning the data from the specified // data_file. fn mock_client( data_file: Option<&str>, ) -> Client { let data = match data_file { None => "".to_string(), Some(d) => { let path = Path::new("test-data").join(d); fs::read_to_string(path).unwrap() }, }; let events = vec![ ( http::Request::builder() .body(SdkBody::from("request body")) .unwrap(), http::Response::builder() .status(200) .body(SdkBody::from(data)) .unwrap(), ), ]; let conn = TestConnection::new(events); let conn = DynConnector::new(conn); let creds = Credentials::from_keys( "ATESTCLIENT", "atestsecretkey", Some("atestsessiontoken".to_string()), ); let conf = CloudWatchConfig::builder() .credentials_provider(creds) .http_connector(conn) .region(aws_sdk_cloudwatch::config::Region::new("eu-west-1")) .build(); let client = CloudWatchClient::from_conf(conf); Client { client: client, bucket_name: None, } } #[tokio::test] async fn test_get_metric_statistics() { let client = mock_client( Some("cloudwatch-get-metric-statistics.xml"), ); let storage_types = vec![ "StandardStorage".into(), ]; let bucket = Bucket { name: "test-bucket".into(), region: None, storage_types: Some(storage_types), }; let ret = client.get_metric_statistics(&bucket) .await .unwrap(); let timestamp = ChronoDt::parse_from_rfc3339("2020-03-01T20:59:00Z") .unwrap(); let datapoints = vec![ Datapoint::builder() .average(123456789.0) .timestamp(DateTime::from_chrono_fixed(timestamp)) .unit(StandardUnit::Bytes) .build(), ]; let expected = vec![ GetMetricStatisticsOutput::builder() .set_datapoints(Some(datapoints)) .set_label(Some("BucketSizeBytes".into())) .build(), ]; assert_eq!(ret, expected); } #[tokio::test] async fn test_list_metrics() { let client = mock_client( Some("cloudwatch-list-metrics.xml"), ); let ret = client.list_metrics().await.unwrap(); let expected = vec![ Metric::builder() .metric_name("BucketSizeBytes") .namespace("AWS/S3") .set_dimensions(Some(vec![ Dimension::builder() .name("BucketName") .value("a-bucket-name") .build(), Dimension::builder() .name("StorageType") .value("StandardStorage") .build(), ])) .build(), Metric::builder() .metric_name("BucketSizeBytes") .namespace("AWS/S3") .set_dimensions(Some(vec![ Dimension::builder() .name("BucketName") .value("a-bucket-name") .build(), Dimension::builder() .name("StorageType") .value("StandardIAStorage") .build(), ])) .build(), Metric::builder() .metric_name("BucketSizeBytes") .namespace("AWS/S3") .set_dimensions(Some(vec![ Dimension::builder() .name("BucketName") .value("another-bucket-name") .build(), Dimension::builder() .name("StorageType") .value("StandardStorage") .build(), ])) .build(), ]; assert_eq!(ret, expected); } }
use rahashmap::HashMap; use std::hash::{BuildHasher, Hash}; use std::sync::{atomic, Arc, Mutex}; pub(crate) struct Inner<K, V, M, S> where K: Eq + Hash, S: BuildHasher, { pub(crate) data: HashMap<K, Vec<V>, S>, pub(crate) epochs: Arc<Mutex<Vec<Arc<atomic::AtomicUsize>>>>, pub(crate) meta: M, ready: bool, } impl<K, V, M, S> Clone for Inner<K, V, M, S> where K: Eq + Hash + Clone, S: BuildHasher + Clone, M: Clone, { fn clone(&self) -> Self { assert!(self.data.is_empty()); Inner { data: HashMap::with_capacity_and_hasher( self.data.capacity(), self.data.hasher().clone(), ), epochs: Arc::clone(&self.epochs), meta: self.meta.clone(), ready: self.ready, } } } impl<K, V, M, S> Inner<K, V, M, S> where K: Eq + Hash, S: BuildHasher, { pub fn with_hasher(m: M, hash_builder: S) -> Self { Inner { data: HashMap::with_hasher(hash_builder), epochs: Default::default(), meta: m, ready: false, } } pub fn with_capacity_and_hasher(m: M, capacity: usize, hash_builder: S) -> Self { Inner { data: HashMap::with_capacity_and_hasher(capacity, hash_builder), epochs: Default::default(), meta: m, ready: false, } } pub fn mark_ready(&mut self) { self.ready = true; } pub fn is_ready(&self) -> bool { self.ready } }
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// SyntheticsBrowserErrorType : Error type returned by a browser test. /// Error type returned by a browser test. #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum SyntheticsBrowserErrorType { #[serde(rename = "network")] NETWORK, #[serde(rename = "js")] JS, } impl ToString for SyntheticsBrowserErrorType { fn to_string(&self) -> String { match self { Self::NETWORK => String::from("network"), Self::JS => String::from("js"), } } }
use ::anyhow::Result; use ::log::error; use ::std::fs::{remove_file, File}; use ::std::io::prelude::*; use ::std::path::{Path, PathBuf}; use ::pueue::settings::Settings; /// Return the paths to temporary stdout and stderr files for a task pub fn get_log_paths(task_id: usize, settings: &Settings) -> (PathBuf, PathBuf) { let pueue_dir = Path::new(&settings.daemon.pueue_directory).join("temp"); let out_path = pueue_dir.join(format!("{}_stdout.log", task_id)); let err_path = pueue_dir.join(format!("{}_stderr.log", task_id)); (out_path, err_path) } /// Create and return the file handle for temporary stdout and stderr files for a task pub fn create_log_file_handles(task_id: usize, settings: &Settings) -> Result<(File, File)> { let (out_path, err_path) = get_log_paths(task_id, settings); let stdout = File::create(out_path)?; let stderr = File::create(err_path)?; Ok((stdout, stderr)) } /// Return the file handle for temporary stdout and stderr files for a task pub fn get_log_file_handles(task_id: usize, settings: &Settings) -> Result<(File, File)> { let (out_path, err_path) = get_log_paths(task_id, settings); let stdout = File::open(out_path)?; let stderr = File::open(err_path)?; Ok((stdout, stderr)) } /// Return the content of temporary stdout and stderr files for a task pub fn read_log_files(task_id: usize, settings: &Settings) -> Result<(String, String)> { let (mut stdout_handle, mut stderr_handle) = get_log_file_handles(task_id, settings)?; let mut stdout_buffer = Vec::new(); let mut stderr_buffer = Vec::new(); stdout_handle.read_to_end(&mut stdout_buffer)?; stderr_handle.read_to_end(&mut stderr_buffer)?; let stdout = String::from_utf8_lossy(&stdout_buffer); let stderr = String::from_utf8_lossy(&stderr_buffer); Ok((stdout.to_string(), stderr.to_string())) } /// Remove temporary stdout and stderr files for a task pub fn clean_log_handles(task_id: usize, settings: &Settings) { let (out_path, err_path) = get_log_paths(task_id, settings); if let Err(err) = remove_file(out_path) { error!( "Failed to remove stdout file for task {} with error {:?}", task_id, err ); }; if let Err(err) = remove_file(err_path) { error!( "Failed to remove stderr file for task {} with error {:?}", task_id, err ); }; }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsAccount { #[serde(flatten)] pub resource: Resource, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<Sku>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<MapsAccountProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsAccountCreateParameters { pub location: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, pub sku: Sku, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsAccountUpdateParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sku: Option<Sku>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsAccounts { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<MapsAccount>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Sku { pub name: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub tier: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsAccountsMoveRequest { #[serde(rename = "targetResourceGroup")] pub target_resource_group: String, #[serde(rename = "resourceIds")] pub resource_ids: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsKeySpecification { #[serde(rename = "keyType")] pub key_type: maps_key_specification::KeyType, } pub mod maps_key_specification { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum KeyType { #[serde(rename = "primary")] Primary, #[serde(rename = "secondary")] Secondary, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsAccountKeys { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "primaryKey", default, skip_serializing_if = "Option::is_none")] pub primary_key: Option<String>, #[serde(rename = "secondaryKey", default, skip_serializing_if = "Option::is_none")] pub secondary_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsOperations { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MapsAccountProperties { #[serde(rename = "x-ms-client-id", default, skip_serializing_if = "Option::is_none")] pub x_ms_client_id: Option<String>, }
use std::collections::BTreeMap; use std::rc::Rc; use wlral::compositor::Compositor; use wlral::geometry::{Displacement, Rectangle}; use wlral::input::event_filter::EventFilter; use wlral::input::events::*; use wlral::output::Output; use wlral::output_management_protocol::OutputManagementProtocol; use wlral::output_manager::OutputManager; use wlral::window::{Window, WindowEdge}; use wlral::window_management_policy::*; use wlral::window_manager::WindowManager; use xkbcommon::xkb; enum Gesture { Move(MoveRequest), Resize(ResizeRequest, Rectangle), } struct FloatingWindowManager { output_manager: Rc<OutputManager>, window_manager: Rc<WindowManager>, output_management_protocol: Rc<OutputManagementProtocol>, gesture: Option<Gesture>, restore_size: BTreeMap<usize, Rectangle>, } impl FloatingWindowManager { fn output_for_window(&self, window: &Window) -> Option<Rc<Output>> { self .output_manager .outputs() .iter() .find(|output| output.extents().overlaps(&window.extents())) .cloned() .or_else(|| self.output_manager.outputs().first().cloned()) } } impl WindowManagementPolicy for FloatingWindowManager { fn handle_window_ready(&mut self, window: Rc<Window>) { let output = self.output_for_window(&window); if window.can_receive_focus() { // Center the new window if let Some(output) = output { window.move_to( output.top_left() + ((output.size() - window.extents().size()) / 2.0).as_displacement(), ); } // Focus the new window self.window_manager.focus_window(window.clone()); } } fn handle_request_activate(&mut self, request: ActivateRequest) { self.window_manager.focus_window(request.window); } fn handle_request_close(&mut self, request: CloseRequest) { request.window.ask_client_to_close(); } fn handle_request_move(&mut self, request: MoveRequest) { if !self.window_manager.window_has_focus(&request.window) { // Deny move requests from unfocused clients return; } if request.window.maximized() { request.window.set_maximized(false); } if request.window.fullscreen() { request.window.set_fullscreen(false); } self.gesture = Some(Gesture::Move(request)) } fn handle_request_resize(&mut self, request: ResizeRequest) { if !self.window_manager.window_has_focus(&request.window) { // Deny resize requests from unfocused clients return; } if !request.window.resizing() { request.window.set_resizing(true); } let original_extents = request.window.extents(); self.gesture = Some(Gesture::Resize(request, original_extents)) } fn handle_request_maximize(&mut self, request: MaximizeRequest) { let output = self.output_for_window(&request.window); if let Some(output) = output { if request.maximize { self.restore_size.insert( request.window.wlr_surface() as usize, request.window.extents(), ); request.window.set_maximized(true); request.window.set_extents(&Rectangle { top_left: output.top_left(), size: output.size(), }); } else { request.window.set_maximized(false); if let Some(extents) = self .restore_size .get(&(request.window.wlr_surface() as usize)) { request.window.set_extents(extents); } } } } fn handle_request_fullscreen(&mut self, request: FullscreenRequest) { let output = request .output .clone() .or_else(|| self.output_for_window(&request.window)); if let Some(output) = output { if request.fullscreen { self.restore_size.insert( request.window.wlr_surface() as usize, request.window.extents(), ); request.window.set_fullscreen(true); request.window.set_extents(&Rectangle { top_left: output.top_left(), size: output.size(), }); } else { request.window.set_fullscreen(false); if let Some(extents) = self .restore_size .get(&(request.window.wlr_surface() as usize)) { request.window.set_extents(extents); } } } } } impl EventFilter for FloatingWindowManager { fn handle_pointer_motion_event(&mut self, event: &MotionEvent) -> bool { match &self.gesture { Some(Gesture::Move(gesture)) => { gesture .window .move_to((event.position() - gesture.drag_point.as_displacement()).into()); true } Some(Gesture::Resize(gesture, original_extents)) => { let displacement = Displacement::from(event.position() - gesture.cursor_position); let mut extents = original_extents.clone(); if gesture.edges.contains(WindowEdge::TOP) { extents.top_left.y += displacement.dy; extents.size.height -= displacement.dy; } else if gesture.edges.contains(WindowEdge::BOTTOM) { extents.size.height += displacement.dy; } if gesture.edges.contains(WindowEdge::LEFT) { extents.top_left.x += displacement.dx; extents.size.width -= displacement.dx; } else if gesture.edges.contains(WindowEdge::RIGHT) { extents.size.width += displacement.dx; } gesture.window.set_extents(&extents); true } _ => false, } } fn handle_pointer_button_event(&mut self, event: &ButtonEvent) -> bool { match (&self.gesture, event.state()) { (Some(gesture), ButtonState::Released) => { if let Gesture::Resize(request, _) = gesture { if request.window.resizing() { request.window.set_resizing(false); } } self.gesture = None; true } _ => false, } } fn handle_keyboard_event(&mut self, event: &KeyboardEvent) -> bool { let keysym = event.get_one_sym(); if event.state() != KeyState::Pressed { return false; } if keysym == xkb::KEY_Escape && event .xkb_state() .mod_name_is_active(xkb::MOD_NAME_CTRL, xkb::STATE_MODS_DEPRESSED) { if let Some(window) = self.window_manager.focused_window() { window.ask_client_to_close(); } true } else if keysym == xkb::KEY_a && event .xkb_state() .mod_name_is_active(xkb::MOD_NAME_CTRL, xkb::STATE_MODS_DEPRESSED) && self.output_management_protocol.has_pending_test() { self .output_management_protocol .apply_pending_test() .expect("Could not apply pending test"); true } else if keysym == xkb::KEY_c && event .xkb_state() .mod_name_is_active(xkb::MOD_NAME_CTRL, xkb::STATE_MODS_DEPRESSED) && self.output_management_protocol.has_pending_test() { self .output_management_protocol .cancel_pending_test() .expect("Could not cancel pending test"); true } else if keysym == xkb::KEY_d && event .xkb_state() .mod_name_is_active(xkb::MOD_NAME_CTRL, xkb::STATE_MODS_DEPRESSED) && event .xkb_state() .mod_name_is_active(xkb::MOD_NAME_ALT, xkb::STATE_MODS_DEPRESSED) { println!("Windows:"); for window in self.window_manager.windows() { println!(" {}:", window.title().unwrap_or("[no title]".to_string())); println!( " app_id: {}", window.app_id().unwrap_or("[no app_id]".to_string()) ); println!( " outputs: {}", window .outputs() .iter() .map(|o| o.name()) .collect::<Vec<_>>() .join(", ") ); } true } else { false } } } fn main() { env_logger::init(); let compositor = Compositor::init(); compositor.config_manager().update_config(|config| { config.background_color = [0.3, 0.3, 0.3]; }); let output_management_protocol = compositor .enable_output_management_protocol(30_000) .unwrap(); let window_manager = FloatingWindowManager { output_manager: compositor.output_manager(), output_management_protocol, window_manager: compositor.window_manager(), gesture: None, restore_size: BTreeMap::new(), }; compositor .run(window_manager) .expect("Could not run compositor"); }
//! Formats a DOM structure to a Write //! //! ### Example //! ``` //! use sxd_document::Package; //! use sxd_document::writer::format_document; //! //! let package = Package::new(); //! let doc = package.as_document(); //! //! let hello = doc.create_element("hello"); //! hello.set_attribute_value("planet", "Earth"); //! doc.root().append_child(hello); //! //! let mut output = Vec::new(); //! format_document(&doc, &mut output).expect("unable to output XML"); //! ``` //! //! ### Potential options to support //! //! - Space before `/>` //! - Single vs double quotes //! - Fixed ordering of attributes use std::borrow::ToOwned; use std::io::{self,Write}; use std::slice; use self::Content::*; use super::QName; use super::dom; use super::dom::{ChildOfElement,ChildOfRoot}; use super::lazy_hash_map::LazyHashMap; trait WriteStr: Write { fn write_str(&mut self, s: &str) -> io::Result<()> { self.write_all(s.as_bytes()) } } impl<W: ?Sized> WriteStr for W where W: Write {} // TODO: Duplicating the String seems inefficient... struct PrefixScope<'d> { ns_to_prefix: LazyHashMap<&'d str, String>, prefix_to_ns: LazyHashMap<String, &'d str>, defined_prefixes: Vec<(String, &'d str)>, default_namespace_uri: Option<&'d str>, } impl<'d> PrefixScope<'d> { fn new() -> PrefixScope<'d> { PrefixScope { ns_to_prefix: LazyHashMap::new(), prefix_to_ns: LazyHashMap::new(), defined_prefixes: Vec::new(), default_namespace_uri: None, } } fn has_prefix(&self, prefix: &str) -> bool { self.prefix_to_ns.contains_key(prefix) } fn has_namespace_uri(&self, namespace_uri: &str) -> bool { self.ns_to_prefix.contains_key(namespace_uri) } fn prefix_is(&self, prefix: &str, namespace_uri: &str) -> bool { match self.prefix_to_ns.get(prefix) { Some(ns) => *ns == namespace_uri, _ => false, } } fn namespace_uri_for(&self, prefix: &str) -> Option<&'d str> { self.prefix_to_ns.get(prefix).map(|&ns| ns) } fn prefix_for(&self, namespace_uri: &str) -> Option<&str> { self.ns_to_prefix.get(namespace_uri).map(|p| &p[..]) } fn add_mapping(&mut self, prefix: &str, namespace_uri: &'d str) { let prefix = prefix.to_owned(); self.prefix_to_ns.insert(prefix.clone(), namespace_uri); self.ns_to_prefix.insert(namespace_uri, prefix); } fn define_prefix(&mut self, prefix: String, namespace_uri: &'d str) { self.defined_prefixes.push((prefix, namespace_uri)); } } enum NamespaceType<'a> { Default, Prefix(&'a str), Unknown, } struct PrefixMapping<'d> { scopes: Vec<PrefixScope<'d>>, generated_prefix_count: usize, } impl<'d> PrefixMapping<'d> { fn new() -> PrefixMapping<'d> { PrefixMapping { scopes: vec![PrefixScope::new()], generated_prefix_count: 0, } } fn push_scope(&mut self) { self.scopes.push(PrefixScope::new()); } fn pop_scope(&mut self) { self.scopes.pop(); } fn active_default_namespace_uri(&self) -> Option<&'d str> { self.scopes.iter().rev().filter_map(|s| s.default_namespace_uri).next() } fn active_namespace_uri_for_prefix(&self, prefix: &str) -> Option<&'d str> { self.scopes.iter().rev().filter_map(|s| s.namespace_uri_for(prefix)).next() } fn default_namespace_uri_in_current_scope(&self) -> Option<&'d str> { self.scopes.last().unwrap().default_namespace_uri } fn prefixes_in_current_scope(&self) -> slice::Iter<(String, &'d str)> { self.scopes.last().unwrap().defined_prefixes.iter() } fn populate_scope(&mut self, element: &dom::Element<'d>, attributes: &[dom::Attribute<'d>]) { self.scopes.last_mut().unwrap().default_namespace_uri = element.default_namespace_uri(); if let Some(prefix) = element.preferred_prefix() { let name = element.name(); if let Some(uri) = name.namespace_uri { self.set_prefix(prefix, uri); } } for attribute in attributes.iter() { if let Some(prefix) = attribute.preferred_prefix() { let name = attribute.name(); if let Some(uri) = name.namespace_uri { self.set_prefix(prefix, uri); } } } let name = element.name(); if let Some(uri) = name.namespace_uri { self.generate_prefix(uri); } for attribute in attributes.iter() { let name = attribute.name(); if let Some(uri) = name.namespace_uri { self.generate_prefix(uri); } } } fn set_prefix(&mut self, prefix: &str, namespace_uri: &'d str) { let idx_of_last = self.scopes.len().saturating_sub(1); let (parents, current_scope) = self.scopes.split_at_mut(idx_of_last); let current_scope = &mut current_scope[0]; // If we're already using this prefix, we can't redefine it. if current_scope.has_prefix(prefix) { return; } // We are definitely going to use this prefix, claim it current_scope.add_mapping(prefix, namespace_uri); for parent_scope in parents.iter().rev() { if parent_scope.prefix_is(prefix, namespace_uri) { // A parent defines it as the URI we want. // Prevent redefining it return; } } // Defined by us, must be added to the element current_scope.define_prefix(prefix.to_owned(), namespace_uri); } fn generate_prefix(&mut self, namespace_uri: &'d str) { if Some(namespace_uri) == self.active_default_namespace_uri() { // We already map this namespace to the default return; } let idx_of_last = self.scopes.len().saturating_sub(1); let (parents, current_scope) = self.scopes.split_at_mut(idx_of_last); let current_scope = &mut current_scope[0]; if current_scope.has_namespace_uri(namespace_uri) { // We already map this namespace to *some* prefix return; } // Check if the parent already defined a prefix for this ns for parent_scope in parents.iter().rev() { if let Some(prefix) = parent_scope.prefix_for(namespace_uri) { // A parent happens to have a prefix for this URI. // Prevent redefining it current_scope.add_mapping(prefix, namespace_uri); return; } } loop { let prefix = format!("autons{}", self.generated_prefix_count); self.generated_prefix_count += 1; if ! current_scope.has_prefix(&prefix) { current_scope.add_mapping(&prefix, namespace_uri); current_scope.define_prefix(prefix, namespace_uri); break; } } } fn namespace_type<'a>(&'a self, preferred_prefix: Option<&'a str>, namespace_uri: &str, ignore_default: bool) -> NamespaceType<'a> { if !ignore_default && Some(namespace_uri) == self.active_default_namespace_uri() { return NamespaceType::Default; } if let Some(prefix) = preferred_prefix { if Some(namespace_uri) == self.active_namespace_uri_for_prefix(prefix) { return NamespaceType::Prefix(prefix); } } for scope in self.scopes.iter().rev() { if let Some(prefix) = scope.prefix_for(namespace_uri) { return NamespaceType::Prefix(prefix); } } NamespaceType::Unknown } } enum Content<'d> { Element(dom::Element<'d>), ElementEnd(dom::Element<'d>), Text(dom::Text<'d>), Comment(dom::Comment<'d>), ProcessingInstruction(dom::ProcessingInstruction<'d>), } fn format_qname<'d, W: ?Sized>(q: QName<'d>, mapping: &mut PrefixMapping<'d>, preferred_prefix: Option<&str>, ignore_default: bool, writer: &mut W) -> io::Result<()> where W: Write { // Can something without a namespace be prefixed? No, because // defining a prefix requires a non-empty URI if let Some(namespace_uri) = q.namespace_uri { match mapping.namespace_type(preferred_prefix, namespace_uri, ignore_default) { NamespaceType::Default => { // No need to do anything }, NamespaceType::Prefix(prefix) => { try!(writer.write_str(prefix)); try!(writer.write_str(":")); }, NamespaceType::Unknown => { panic!("No namespace prefix available for {}", namespace_uri); }, } } writer.write_str(q.local_part) } fn format_attribute_value<W: ?Sized>(value: &str, writer: &mut W) -> io::Result<()> where W: Write { for item in value.split_keeping_delimiter(|c| c == '<' || c == '>' || c == '&' || c == '\'' || c == '"') { match item { SplitType::Match(t) => try!(writer.write_str(t)), SplitType::Delimiter("<") => try!(writer.write_str("&lt;")), SplitType::Delimiter(">") => try!(writer.write_str("&gt;")), SplitType::Delimiter("&") => try!(writer.write_str("&amp;")), SplitType::Delimiter("'") => try!(writer.write_str("&apos;")), SplitType::Delimiter("\"") => try!(writer.write_str("&quot;")), SplitType::Delimiter(..) => unreachable!(), } } Ok(()) } fn format_element<'d, W: ?Sized>(element: dom::Element<'d>, todo: &mut Vec<Content<'d>>, mapping: &mut PrefixMapping<'d>, writer: &mut W) -> io::Result<()> where W: Write { let attrs = element.attributes(); mapping.populate_scope(&element, &attrs); try!(writer.write_str("<")); try!(format_qname(element.name(), mapping, element.preferred_prefix(), false, writer)); for attr in &attrs { try!(writer.write_str(" ")); try!(format_qname(attr.name(), mapping, attr.preferred_prefix(), true, writer)); try!(write!(writer, "='")); try!(format_attribute_value(attr.value(), writer)); try!(write!(writer, "'")); } if let Some(ns_uri) = mapping.default_namespace_uri_in_current_scope() { try!(writer.write_str(" xmlns='")); try!(writer.write_str(ns_uri)); try!(writer.write_str("'")); } for &(ref prefix, ref ns_uri) in mapping.prefixes_in_current_scope() { try!(writer.write_str(" xmlns:")); try!(writer.write_str(prefix)); try!(write!(writer, "='{}'", ns_uri)); } let mut children = element.children(); if children.is_empty() { try!(writer.write_str("/>")); mapping.pop_scope(); Ok(()) } else { try!(writer.write_str(">")); todo.push(ElementEnd(element)); children.reverse(); let x = children.into_iter().map(|c| match c { ChildOfElement::Element(element) => Element(element), ChildOfElement::Text(t) => Text(t), ChildOfElement::Comment(c) => Comment(c), ChildOfElement::ProcessingInstruction(p) => ProcessingInstruction(p), }); todo.extend(x); Ok(()) } } fn format_element_end<'d, W: ?Sized>(element: dom::Element<'d>, mapping: &mut PrefixMapping<'d>, writer: &mut W) -> io::Result<()> where W: Write { try!(writer.write_str("</")); try!(format_qname(element.name(), mapping, element.preferred_prefix(), false, writer)); writer.write_str(">") } use super::str_ext::{SplitKeepingDelimiterExt,SplitType}; fn format_text<W: ?Sized>(text: dom::Text, writer: &mut W) -> io::Result<()> where W: Write { for item in text.text().split_keeping_delimiter(|c| c == '<' || c == '>' || c == '&') { match item { SplitType::Match(t) => try!(writer.write_str(t)), SplitType::Delimiter("<") => try!(writer.write_str("&lt;")), SplitType::Delimiter(">") => try!(writer.write_str("&gt;")), SplitType::Delimiter("&") => try!(writer.write_str("&amp;")), SplitType::Delimiter(..) => unreachable!(), } } Ok(()) } fn format_comment<W: ?Sized>(comment: dom::Comment, writer: &mut W) -> io::Result<()> where W: Write { write!(writer, "<!--{}-->", comment.text()) } fn format_processing_instruction<W: ?Sized>(pi: dom::ProcessingInstruction, writer: &mut W) -> io::Result<()> where W: Write { match pi.value() { None => write!(writer, "<?{}?>", pi.target()), Some(v) => write!(writer, "<?{} {}?>", pi.target(), v), } } fn format_one<'d, W: ?Sized>(content: Content<'d>, todo: &mut Vec<Content<'d>>, mapping: &mut PrefixMapping<'d>, writer: &mut W) -> io::Result<()> where W: Write { match content { Element(e) => { mapping.push_scope(); format_element(e, todo, mapping, writer) }, ElementEnd(e) => { let r = format_element_end(e, mapping, writer); mapping.pop_scope(); r }, Text(t) => format_text(t, writer), Comment(c) => format_comment(c, writer), ProcessingInstruction(p) => format_processing_instruction(p, writer), } } fn format_body<W: ?Sized>(element: dom::Element, writer: &mut W) -> io::Result<()> where W: Write { let mut todo = vec![Element(element)]; let mut mapping = PrefixMapping::new(); while ! todo.is_empty() { try!(format_one(todo.pop().unwrap(), &mut todo, &mut mapping, writer)); } Ok(()) } /// Formats a document into a Write pub fn format_document<'d, W: ?Sized>(doc: &'d dom::Document<'d>, writer: &mut W) -> io::Result<()> where W: Write { try!(writer.write_str("<?xml version='1.0'?>")); for child in doc.root().children().into_iter() { try!(match child { ChildOfRoot::Element(e) => format_body(e, writer), ChildOfRoot::Comment(c) => format_comment(c, writer), ChildOfRoot::ProcessingInstruction(p) => format_processing_instruction(p, writer), }) } Ok(()) } #[cfg(test)] mod test { use super::super::Package; use super::super::dom; use super::format_document; fn format_xml<'d>(doc: &'d dom::Document<'d>) -> String { let mut w = Vec::new(); format_document(doc, &mut w).expect("Not formatted"); String::from_utf8(w).expect("Not a string") } #[test] fn top_element() { let p = Package::new(); let d = p.as_document(); let e = d.create_element("hello"); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello/>"); } #[test] fn element_with_namespace() { let p = Package::new(); let d = p.as_document(); let e = d.create_element(("namespace", "local-part")); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><autons0:local-part xmlns:autons0='namespace'/>"); } #[test] fn element_with_default_namespace() { let p = Package::new(); let d = p.as_document(); let e = d.create_element(("namespace", "local-part")); e.set_default_namespace_uri(Some("namespace")); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><local-part xmlns='namespace'/>"); } #[test] fn element_with_preferred_namespace_prefix() { let p = Package::new(); let d = p.as_document(); let e = d.create_element(("namespace", "local-part")); e.set_preferred_prefix(Some("prefix")); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><prefix:local-part xmlns:prefix='namespace'/>"); } #[test] fn element_with_attributes() { let p = Package::new(); let d = p.as_document(); let e = d.create_element("hello"); e.set_attribute_value("a", "b"); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello a='b'/>"); } #[test] fn attribute_with_namespace() { let p = Package::new(); let d = p.as_document(); let e = d.create_element("hello"); e.set_attribute_value(("namespace", "a"), "b"); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello autons0:a='b' xmlns:autons0='namespace'/>"); } #[test] fn attribute_with_preferred_namespace_prefix() { let p = Package::new(); let d = p.as_document(); let e = d.create_element("hello"); let a = e.set_attribute_value(("namespace", "a"), "b"); a.set_preferred_prefix(Some("p")); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello p:a='b' xmlns:p='namespace'/>"); } #[test] fn attribute_with_default_namespace_prefix() { let p = Package::new(); let d = p.as_document(); let e = d.create_element(("namespace", "hello")); e.set_preferred_prefix(Some("p")); e.set_default_namespace_uri(Some("namespace")); e.set_attribute_value(("namespace", "a"), "b"); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello p:a='b' xmlns='namespace' xmlns:p='namespace'/>"); } #[test] fn attributes_with_conflicting_preferred_namespace_prefixes() { let p = Package::new(); let d = p.as_document(); let e = d.create_element("hello"); let a = e.set_attribute_value(("namespace1", "a1"), "b1"); a.set_preferred_prefix(Some("p")); let a = e.set_attribute_value(("namespace2", "a2"), "b2"); a.set_preferred_prefix(Some("p")); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello p:a1='b1' autons0:a2='b2' xmlns:p='namespace1' xmlns:autons0='namespace2'/>"); } #[test] fn attributes_with_different_preferred_namespace_prefixes_for_same_namespace() { let p = Package::new(); let d = p.as_document(); let e = d.create_element("hello"); let a = e.set_attribute_value(("namespace", "a1"), "b1"); a.set_preferred_prefix(Some("p1")); let a = e.set_attribute_value(("namespace", "a2"), "b2"); a.set_preferred_prefix(Some("p2")); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello p1:a1='b1' p2:a2='b2' xmlns:p1='namespace' xmlns:p2='namespace'/>"); } #[test] fn attribute_values_with_less_than_greater_than_ampersand_apostrophe_or_quote_are_escaped() { let p = Package::new(); let d = p.as_document(); let e = d.create_element("hello"); e.set_attribute_value("name", r#"'1 < 2' & "4 > 3""#); d.root().append_child(e); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello name='&apos;1 &lt; 2&apos; &amp; &quot;4 &gt; 3&quot;'/>"); } #[test] fn nested_element() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element("hello"); let world = d.create_element("world"); hello.append_child(world); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello><world/></hello>"); } #[test] fn nested_element_with_namespaces() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element(("outer", "hello")); let world = d.create_element(("inner", "world")); hello.append_child(world); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><autons0:hello xmlns:autons0='outer'><autons1:world xmlns:autons1='inner'/></autons0:hello>"); } #[test] fn nested_empty_element_with_namespaces() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element(("outer", "hello")); hello.set_default_namespace_uri(Some("outer")); hello.set_preferred_prefix(Some("o")); let world = d.create_element("world"); world.set_default_namespace_uri(Some("inner")); let empty = d.create_element("empty"); world.append_child(empty); hello.append_child(world); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello xmlns='outer' xmlns:o='outer'><world xmlns='inner'><empty/></world></hello>"); } #[test] fn nested_element_with_namespaces_with_reused_namespaces() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element(("ns", "hello")); let world = d.create_element(("ns", "world")); hello.append_child(world); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><autons0:hello xmlns:autons0='ns'><autons0:world/></autons0:hello>"); } #[test] fn nested_element_with_with_conflicting_preferred_namespace_prefixes() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element(("outer", "hello")); let world = d.create_element(("inner", "world")); hello.set_preferred_prefix(Some("p")); world.set_preferred_prefix(Some("p")); hello.append_child(world); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><p:hello xmlns:p='outer'><p:world xmlns:p='inner'/></p:hello>"); } #[test] fn nested_text() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element("hello"); let text = d.create_text("A fine day to you!"); hello.append_child(text); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello>A fine day to you!</hello>"); } #[test] fn text_escapes_less_than_greater_than_and_ampersand() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element("escaped"); let text = d.create_text("1 < 3 & 4 > 2"); hello.append_child(text); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><escaped>1 &lt; 3 &amp; 4 &gt; 2</escaped>"); } #[test] fn nested_comment() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element("hello"); let comment = d.create_comment(" Fill this in "); hello.append_child(comment); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello><!-- Fill this in --></hello>"); } #[test] fn nested_processing_instruction_without_value() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element("hello"); let pi = d.create_processing_instruction("display", None); hello.append_child(pi); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello><?display?></hello>"); } #[test] fn nested_processing_instruction_with_value() { let p = Package::new(); let d = p.as_document(); let hello = d.create_element("hello"); let pi = d.create_processing_instruction("display", Some("screen")); hello.append_child(pi); d.root().append_child(hello); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><hello><?display screen?></hello>"); } #[test] fn top_level_comment() { let p = Package::new(); let d = p.as_document(); let comment = d.create_comment(" Fill this in "); d.root().append_child(comment); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><!-- Fill this in -->"); } #[test] fn top_level_processing_instruction() { let p = Package::new(); let d = p.as_document(); let pi = d.create_processing_instruction("display", None); d.root().append_child(pi); let xml = format_xml(&d); assert_eq!(xml, "<?xml version='1.0'?><?display?>"); } }
pub mod apis; pub mod discord; pub mod helpers;
mod parse_swc_ast; pub use parse_swc_ast::*;
//! An ownership-semantics based handle to OpenGL. This prevents us from //! accidentally modifying OpenGL state from multiple threads. //! //! GLW stands for "OpenGL wrapper". #![feature(globs)] #![feature(phase)] #![feature(unsafe_destructor)] extern crate gl; extern crate libc; #[phase(plugin, link)] extern crate log; extern crate nalgebra; pub mod camera; pub mod color; mod cstr_cache; pub mod gl_buffer; pub mod gl_context; pub mod light; pub mod shader; pub mod texture; pub mod vertex;
use crate::generated::{ spdk_app_fini, spdk_app_opts, spdk_app_opts_init, spdk_app_start, spdk_app_stop, }; use libc::c_char; use libc::c_void; use std::ffi::CString; use std::ptr; #[derive(Debug, Error)] pub enum AppError { #[error(display = "Spdk failed to start: {}", _0)] StartupError(i32), } #[derive(Default)] pub struct AppOpts(spdk_app_opts); impl AppOpts { pub fn new() -> Self { let mut opts: spdk_app_opts = Default::default(); unsafe { spdk_app_opts_init(&mut opts as *mut spdk_app_opts); } AppOpts(opts) } pub fn name(&mut self, name: &str) { self.0.name = CString::new(name) .expect("Couldn't create a string") .into_raw() } pub fn config_file(&mut self, config_file: &str) { self.0.config_file = CString::new(config_file) .expect("Couldn't create a string") .into_raw() } // TODO: probably need this to properly deallocate pollers :() // pub fn shutdown_cb() { // //spdk_app_shutdown_cb // } pub fn start<F>(mut self, f: F) -> Result<(), AppError> where F: Fn() -> (), { let user_data = &f as *const _ as *mut c_void; extern "C" fn start_wrapper<F>(closure: *mut c_void, _: *mut c_void) where F: Fn() -> (), { let opt_closure = closure as *mut F; unsafe { (*opt_closure)() } } let ret = unsafe { let self_ref = &mut self; let opts_ref = &mut self_ref.0; spdk_app_start( opts_ref as *mut spdk_app_opts, Some(start_wrapper::<F>), user_data, ptr::null_mut(), ) }; unsafe { spdk_app_fini(); } if ret == 0 { Ok(()) } else { Err(AppError::StartupError(ret)) } } } pub fn app_stop(success: bool) { unsafe { spdk_app_stop(if success { 0 } else { -1 }); }; } impl Drop for AppOpts { fn drop(&mut self) { drop_if_not_null(self.0.name as *mut c_char); drop_if_not_null(self.0.config_file as *mut c_char); } } fn drop_if_not_null(string: *mut c_char) { if !string.is_null() { unsafe { CString::from_raw(string as *mut c_char) }; } }
/** text.txt contains hex-encoded ciphertexts. One of them has been encrypted with ECB. Detect it. Remember that the problem with ECB is that it is stateless and deterministic; the same 16 byte plaintext block will always produce the same 16 byte ciphertext. */ extern crate hex; use std::fs; use std::collections::HashMap; use std::str; fn main() { let hex_encoded_data = fs::read_to_string("text.txt") .expect("Unable to read file"); let ciphertexts = hex_encoded_data .split("\n") .collect::<Vec<&str>>(); let found = detect_aes(ciphertexts); println!("Found: {:?}", found); } struct TalliedText<'a> { tally: HashMap<&'a str, i32>, text: String } impl <'a> TalliedText<'a> { fn new_from_string(text: &str) -> TalliedText<'a> { TalliedText { tally: HashMap::new(), text: String::from(text) } } } fn detect_aes(ciphertexts: Vec<&str>) -> String { let mut needles = ciphertexts .iter() .map(|text| { text .as_bytes() .chunks(16) .map(str::from_utf8) .fold(TalliedText::new_from_string(*text), |mut acc, chunk| { let chunk_entry = acc.tally .entry(chunk.unwrap()) .or_insert(0); *chunk_entry += 1; acc }) }) .collect::<Vec<_>>(); fn add_scores_above_1(acc: i32, entry: &i32) -> i32 { if *entry > 1 { return acc + *entry; } acc } needles .sort_by(|a, b| b.tally.values().fold(0, add_scores_above_1).cmp(&a.tally.values().fold(0, add_scores_above_1))); String::from(needles[0].text.clone()) } #[cfg(test)] mod tests { use super::*; use std::fs; #[test] fn test_xor() { let hex_encoded_data = fs::read_to_string("text.txt") .expect("Unable to read file"); let ciphertexts = hex_encoded_data .split("\n") .collect::<Vec<&str>>(); let result = detect_aes(ciphertexts); let actual = "d880619740a8a19b7840a8a31c810a3d08649af70dc06f4fd5d2d69c744cd283e2dd052f6b641dbf9d11b0348542bb5708649af70dc06f4fd5d2d69c744cd2839475c9dfdbc1d46597949d9c7e82bf5a08649af70dc06f4fd5d2d69c744cd28397a93eab8d6aecd566489154789a6b0308649af70dc06f4fd5d2d69c744cd283d403180c98c8f6db1f2a3f9c4040deb0ab51b29933f2c123c58386b06fba186a"; assert_eq!(result, actual); } }
//! Common functionality and types. use std::path::PathBuf; use anyhow::{anyhow, Context, Result}; use async_std::path::PathBuf as AsyncPathBuf; use async_std::task::spawn_blocking; use console::Emoji; use indicatif::{ProgressBar, ProgressStyle}; pub static BUILDING: Emoji<'_, '_> = Emoji("📦", ""); pub static SUCCESS: Emoji<'_, '_> = Emoji("✅", ""); pub static ERROR: Emoji<'_, '_> = Emoji("❌", ""); pub static SERVER: Emoji<'_, '_> = Emoji("📡", ""); /// Ensure the given value for `--public-url` is formatted correctly. pub fn parse_public_url(val: &str) -> String { let prefix = if !val.starts_with('/') { "/" } else { "" }; let suffix = if !val.ends_with('/') { "/" } else { "" }; format!("{}{}{}", prefix, val, suffix) } /// A utility function to recursively copy a directory. pub async fn copy_dir_recursive(from_dir: PathBuf, to_dir: PathBuf) -> Result<()> { if !AsyncPathBuf::from(&from_dir).exists().await { return Err(anyhow!("directory can not be copied as it does not exist {:?}", &from_dir)); } spawn_blocking(move || { let opts = fs_extra::dir::CopyOptions { overwrite: true, content_only: true, ..Default::default() }; let _ = fs_extra::dir::copy(from_dir, to_dir, &opts).context("error copying directory")?; Ok(()) }) .await } /// A utility function to recursively delete a directory. Use this instead of /// fs::remove_dir_all(...) because of Windows compatibility issues, per advice of /// https://blog.qwaz.io/chat/issues-of-rusts-remove-dir-all-implementation-on-windows pub async fn remove_dir_all(from_dir: PathBuf) -> Result<()> { if !AsyncPathBuf::from(&from_dir).exists().await { return Ok(()); } spawn_blocking(move || { ::remove_dir_all::remove_dir_all(from_dir.as_path()).context("error removing directory")?; Ok(()) }) .await } /// Build system spinner. pub fn spinner() -> ProgressBar { let style = ProgressStyle::default_spinner().template("{spinner} {prefix} trunk | {wide_msg}"); ProgressBar::new_spinner().with_style(style) }
//fn sum(n: i32, acc: i32) -> i32 { // if n == 0 {acc} else {sum(n-1, acc+1)} //} fn main() { // rustc -O でTCOが可能 //println!("{}", sum(100000000, 0)); println!("Hello, world!"); let mut name = "m0riiii"; println!("hello, {}", name); name = "change"; println!("hello, {}", name); let vec = vec![1,2,3]; println!("all vec is {:?}", vec); println!("[reprint]all vec is {:?}", vec); println!("1 + 1 = {}", add(1,1)); println!("13 + 23 = {}", add(13,23)); println!("1 - 1 = {}", sub(1,1)); println!("2 * 4 = {}", times(2,4)); let a: () = f(); let mut a = [0,1,2,3,4,5]; a[2] = 100; println!("{:?}", a); println!("a.len() = {}", a.len()); let middle = &mut a[1..4]; println!("{:?}", middle); println!("middle.len() = {}", middle.len()); println!("{:?}", [fib(1),fib(2),fib(3),fib(4),fib(5),fib(6)]); let mut b: [i32;3] = [0;3]; //b = [1,2,3]; b[0] = 1; println!("{:?}", b); } fn add(a: i32, b: i32) -> i32 { a + b } fn sub(a: i32, b: i32) -> i32 { a - b } fn times(a: i32, b: i32) -> i32 { a * b } fn f(){} fn fib(n: i32) -> i32 { if n < 2 { n } else { fib(n - 1) + fib(n -2) } }
use super::*; pub struct Nicklist { parent: Rc<Window>, window: Window, ctx: Rc<Context>, } impl Nicklist { pub fn new(parent: Rc<Window>, ctx: Rc<Context>) -> Self { let bounds = parent.get_max_yx(); let window = parent .subwin(0, 0, 0, bounds.1 - bounds.1 / 5) .expect("create nicklist"); Self { parent, window: window.into(), ctx, } } pub fn is_visible(&self) -> bool { false } pub fn toggle(&self) {} } impl_recv!(Nicklist);
//! Internet Layer packets pub mod ipv4; pub mod ipv6; pub mod icmp; pub mod icmpv6;
use crate::pool::TxPool; use crate::service::ChainReorgArgs; use ckb_logger::debug_target; use ckb_snapshot::Snapshot; use ckb_store::ChainStore; use ckb_types::{ core::{cell::get_related_dep_out_points, BlockView, TransactionView}, packed::{Byte32, OutPoint, ProposalShortId}, prelude::*, }; use ckb_util::LinkedHashSet; use ckb_verification::cache::CacheEntry; use futures::future::Future; use std::collections::HashMap; use std::collections::{HashSet, VecDeque}; use std::sync::Arc; use tokio::prelude::{Async, Poll}; use tokio::sync::lock::Lock; pub struct ChainReorgProcess { pub tx_pool: Lock<TxPool>, pub txs_verify_cache: HashMap<Byte32, CacheEntry>, pub args: Option<ChainReorgArgs>, } impl ChainReorgProcess { pub fn new( tx_pool: Lock<TxPool>, txs_verify_cache: HashMap<Byte32, CacheEntry>, detached_blocks: VecDeque<BlockView>, attached_blocks: VecDeque<BlockView>, detached_proposal_id: HashSet<ProposalShortId>, snapshot: Arc<Snapshot>, ) -> ChainReorgProcess { ChainReorgProcess { tx_pool, txs_verify_cache, args: Some(( detached_blocks, attached_blocks, detached_proposal_id, snapshot, )), } } } impl Future for ChainReorgProcess { type Item = HashMap<Byte32, CacheEntry>; type Error = (); fn poll(&mut self) -> Poll<Self::Item, Self::Error> { match self.tx_pool.poll_lock() { Async::Ready(mut guard) => { let (detached_blocks, attached_blocks, detached_proposal_id, snapshot) = self.args.take().expect("cannot poll twice"); let ret = update_tx_pool_for_reorg( &mut guard, &self.txs_verify_cache, detached_blocks, attached_blocks, detached_proposal_id, snapshot, ); Ok(Async::Ready(ret)) } Async::NotReady => Ok(Async::NotReady), } } } pub fn update_tx_pool_for_reorg( tx_pool: &mut TxPool, txs_verify_cache: &HashMap<Byte32, CacheEntry>, detached_blocks: VecDeque<BlockView>, attached_blocks: VecDeque<BlockView>, detached_proposal_id: HashSet<ProposalShortId>, snapshot: Arc<Snapshot>, ) -> HashMap<Byte32, CacheEntry> { tx_pool.snapshot = Arc::clone(&snapshot); let mut detached = LinkedHashSet::default(); let mut attached = LinkedHashSet::default(); for blk in detached_blocks { detached.extend(blk.transactions().iter().skip(1).cloned()) } for blk in attached_blocks { attached.extend(blk.transactions().iter().skip(1).cloned()); tx_pool.fee_estimator.process_block( blk.header().number(), blk.transactions().iter().skip(1).map(|tx| tx.hash()), ); } let retain: Vec<TransactionView> = detached.difference(&attached).cloned().collect(); let txs_iter = attached.iter().map(|tx| { let get_cell_data = |out_point: &OutPoint| { snapshot .get_cell_data(&out_point.tx_hash(), out_point.index().unpack()) .map(|result| result.0) }; let related_out_points = get_related_dep_out_points(tx, get_cell_data).expect("Get dep out points failed"); (tx, related_out_points) }); // NOTE: `remove_expired` will try to re-put the given expired/detached proposals into // pending-pool if they can be found within txpool. As for a transaction // which is both expired and committed at the one time(commit at its end of commit-window), // we should treat it as a committed and not re-put into pending-pool. So we should ensure // that involves `remove_committed_txs_from_proposed` before `remove_expired`. tx_pool.remove_committed_txs_from_proposed(txs_iter); tx_pool.remove_expired(detached_proposal_id.iter()); let to_update_cache = retain .into_iter() .filter_map(|tx| tx_pool.readd_dettached_tx(&snapshot, txs_verify_cache, tx)) .collect(); for tx in &attached { tx_pool.try_proposed_orphan_by_ancestor(tx); } let mut entries = Vec::new(); let mut gaps = Vec::new(); // pending ---> gap ----> proposed // try move gap to proposed let mut removed: Vec<ProposalShortId> = Vec::with_capacity(tx_pool.gap.size()); for key in tx_pool.gap.keys_sorted_by_fee_and_relation() { if snapshot.proposals().contains_proposed(&key.id) { let entry = tx_pool.gap.get(&key.id).expect("exists"); entries.push(( Some(CacheEntry::new(entry.cycles, entry.fee)), entry.size, entry.transaction.to_owned(), )); removed.push(key.id.clone()); } } removed.into_iter().for_each(|id| { tx_pool.gap.remove_entry_and_descendants(&id); }); // try move pending to proposed let mut removed: Vec<ProposalShortId> = Vec::with_capacity(tx_pool.pending.size()); for key in tx_pool.pending.keys_sorted_by_fee_and_relation() { let entry = tx_pool.pending.get(&key.id).expect("exists"); if snapshot.proposals().contains_proposed(&key.id) { entries.push(( Some(CacheEntry::new(entry.cycles, entry.fee)), entry.size, entry.transaction.to_owned(), )); removed.push(key.id.clone()); } else if snapshot.proposals().contains_gap(&key.id) { gaps.push(( Some(CacheEntry::new(entry.cycles, entry.fee)), entry.size, entry.transaction.to_owned(), )); removed.push(key.id.clone()); } } removed.into_iter().for_each(|id| { tx_pool.pending.remove_entry(&id); }); // try move conflict to proposed for entry in tx_pool.conflict.entries() { if snapshot.proposals().contains_proposed(entry.key()) { let entry = entry.remove(); entries.push((entry.cache_entry, entry.size, entry.transaction)); } else if snapshot.proposals().contains_gap(entry.key()) { let entry = entry.remove(); gaps.push((entry.cache_entry, entry.size, entry.transaction)); } } for (cycles, size, tx) in entries { let tx_hash = tx.hash(); if let Err(e) = tx_pool.proposed_tx_and_descendants(cycles, size, tx) { debug_target!( crate::LOG_TARGET_TX_POOL, "Failed to add proposed tx {}, reason: {}", tx_hash, e ); } } for (cycles, size, tx) in gaps { debug_target!( crate::LOG_TARGET_TX_POOL, "tx proposed, add to gap {}", tx.hash() ); let tx_hash = tx.hash(); if let Err(e) = tx_pool.gap_tx(cycles, size, tx) { debug_target!( crate::LOG_TARGET_TX_POOL, "Failed to add tx to gap {}, reason: {}", tx_hash, e ); } } to_update_cache }
// This file is part of Substrate. // Copyright (C) 2019-2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 2.0.0-rc6 #![allow(unused_parens)] #![allow(unused_imports)] use frame_support::{traits::Get, weights::Weight}; use sp_std::marker::PhantomData; pub struct WeightInfo<T>(PhantomData<T>); impl<T: frame_system::Trait> pallet_elections_phragmen::WeightInfo for WeightInfo<T> { fn vote(v: u32) -> Weight { (91_489_000 as Weight) .saturating_add((199_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn vote_update(v: u32) -> Weight { (56_511_000 as Weight) .saturating_add((245_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn remove_voter() -> Weight { (76_714_000 as Weight) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn report_defunct_voter_correct(c: u32, v: u32) -> Weight { (0 as Weight) .saturating_add((1_743_000 as Weight).saturating_mul(c as Weight)) .saturating_add((31_750_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(7 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } fn report_defunct_voter_incorrect(c: u32, v: u32) -> Weight { (0 as Weight) .saturating_add((1_733_000 as Weight).saturating_mul(c as Weight)) .saturating_add((31_861_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn submit_candidacy(c: u32) -> Weight { (74_714_000 as Weight) .saturating_add((315_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn renounce_candidacy_candidate(c: u32) -> Weight { (50_408_000 as Weight) .saturating_add((159_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn renounce_candidacy_members() -> Weight { (79_626_000 as Weight) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } fn renounce_candidacy_runners_up() -> Weight { (49_715_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn remove_member_with_replacement() -> Weight { (76_572_000 as Weight) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(5 as Weight)) } fn remove_member_wrong_refund() -> Weight { (8_777_000 as Weight).saturating_add(T::DbWeight::get().reads(1 as Weight)) } }
use std::clone::Clone; use std::fs; use std::fs::File; use std::io::prelude::*; use std::path::Path; use chrono::Local; use crate::entities::bucket::Bucket; use crate::entities::object::Object; use crate::entities::user::User; #[derive(Clone)] pub struct Storage { base_path: String, buckets: Vec<Bucket>, objects: Vec<Object>, users: Vec<User>, } impl Storage { pub fn new(base_path: &str) -> Self { Self { base_path: base_path.to_string(), buckets: vec![], objects: vec![], users: vec![], } } pub fn new_user( &mut self, id: &str, display_name: &str, access_key: &str, secret_access_key: &str, ) { let user = User { id: id.to_string(), display_name: display_name.to_string(), access_key: access_key.to_string(), secret_access_key: secret_access_key.to_string(), }; self.users.push(user); } pub fn find_user(&self, id: &str) -> Option<&User> { self.users.iter().find(|&u| u.access_key == id) } pub fn create_bucket(&mut self, owner_id: &str, name: &str) { let path = Path::new(&self.base_path).join(name); if !path.exists() { fs::create_dir(path).unwrap(); } self.buckets.push(Bucket { name: name.to_string(), owner_id: owner_id.to_string(), object_count: 0, size: 0, creation_date: Local::now(), }); } pub fn list_buckets(&self, owner_id: &str) -> Vec<&Bucket> { let user = self.users.iter().find(|&u| u.id == owner_id).unwrap(); self.buckets .iter() .filter(|&b| b.owner_id == user.id) .collect() } pub fn list_objects(&self, bucket: &str) -> Vec<&Object> { self.objects .iter() .filter(|&o| o.bucket == bucket) .collect() } pub fn put_object(&mut self, user: &User, bucket: &str, object: &str, body: &[u8]) { let path = Path::new(&self.base_path).join(bucket).join(object); let mut file = File::with_options() .create(true) .truncate(true) .write(true) .open(path) .unwrap(); file.write_all(body).unwrap(); file.sync_all().unwrap(); self.objects.push(Object { key: object.to_string(), bucket: bucket.to_string(), owner_id: user.id.to_string(), size: body.len() as i64, last_modified: Local::now(), }) } pub fn get_object(&self, bucket: &str, object: &str, buf: &mut Vec<u8>) -> &Object { let path = Path::new(&self.base_path).join(bucket).join(object); let mut file = File::open(path).unwrap(); file.read_to_end(buf).unwrap(); self.objects .iter() .find(|&o| o.bucket == bucket && o.key == object) .unwrap() } pub fn delete_bucket(&mut self, bucket: &str) { let path = Path::new(&self.base_path).join(bucket); fs::remove_dir_all(path).unwrap(); self.buckets.retain(|b| b.name == bucket); } pub fn delete_object(&mut self, bucket: &str, object: &str) { let path = Path::new(&self.base_path).join(bucket).join(object); fs::remove_file(path).unwrap(); self.objects .retain(|o| o.bucket == bucket && o.key == object); } }
mod yahtzee; mod scorecard; mod test; mod engine; mod ai_keep_first_roll; mod simulation; use crate::yahtzee::{new_game, roll}; use crate::scorecard::{new_scorecard}; use std::time::Instant; use crate::engine::score_roll; use crate::ai_keep_first_roll::ai_play; use std::env; use crate::simulation::{new_simulation, run_sim, print_sim_results}; fn main(){ const GAMES: usize = 1000; let mut sim = new_simulation(GAMES, ai_play); run_sim(&mut sim); print_sim_results(&mut sim); }
#![no_std] #![feature(test)] #![feature(cfg_target_feature)] extern crate test; extern "C" { fn SipHashC(key: *const u64, bytes: *const u8, size: u64) -> u64; fn SipHash13C(key: *const u64, bytes: *const u8, size: u64) -> u64; fn HighwayHash64_Portable(key: *const u64, bytes: *const u8, size: u64) -> u64; #[cfg(target_feature = "sse4.1")] fn HighwayHash64_SSE41(key: *const u64, bytes: *const u8, size: u64) -> u64; #[cfg(target_feature = "avx2")] fn HighwayHash64_AVX2(key: *const u64, bytes: *const u8, size: u64) -> u64; fn HighwayHash64(key: *const u64, bytes: *const u8, size: u64) -> u64; } /// An implementation of SipHash 2-4. /// /// Returns a 64-bit hash of the given data bytes. pub fn siphash(key: &[u64; 2], bytes: &[u8]) -> u64 { unsafe { SipHashC(key.as_ptr(), bytes.as_ptr(), bytes.len() as u64) } } /// An implementation of SipHash 1-3. /// /// Returns a 64-bit hash of the given data bytes. pub fn siphash13(key: &[u64; 2], bytes: &[u8]) -> u64 { unsafe { SipHash13C(key.as_ptr(), bytes.as_ptr(), bytes.len() as u64) } } /// HighwayHash is a strong pseudorandom function with security claims. It is /// intended as a safer general-purpose hash, about 4x faster than SipHash and /// 10x faster than BLAKE2. /// /// Returns a 64-bit hash of the given data bytes. /// /// This is a portable implementation not relying on specific instruction sets. pub fn highwayhash64_portable(key: &[u64; 4], bytes: &[u8]) -> u64 { unsafe { HighwayHash64_Portable(key.as_ptr(), bytes.as_ptr(), bytes.len() as u64) } } /// HighwayHash is a strong pseudorandom function with security claims. It is /// intended as a safer general-purpose hash, about 4x faster than SipHash and /// 10x faster than BLAKE2. /// /// Returns a 64-bit hash of the given data bytes. /// /// This implementation relies on SSE4.1 instructions. #[cfg(target_feature = "sse4.1")] pub fn highwayhash64_sse41(key: &[u64; 4], bytes: &[u8]) -> u64 { unsafe { HighwayHash64_SSE41(key.as_ptr(), bytes.as_ptr(), bytes.len() as u64) } } /// HighwayHash is a strong pseudorandom function with security claims. It is /// intended as a safer general-purpose hash, about 4x faster than SipHash and /// 10x faster than BLAKE2. /// /// Returns a 64-bit hash of the given data bytes. /// /// This implementation relies on AVX2 instruction and is the fastest available /// implementation. #[cfg(target_feature = "avx2")] pub fn highwayhash64_avx2(key: &[u64; 4], bytes: &[u8]) -> u64 { unsafe { HighwayHash64_AVX2(key.as_ptr(), bytes.as_ptr(), bytes.len() as u64) } } /// HighwayHash is a strong pseudorandom function with security claims. It is /// intended as a safer general-purpose hash, about 4x faster than SipHash and /// 10x faster than BLAKE2. /// /// Returns a 64-bit hash of the given data bytes. /// /// This uses the fastest available implementation. pub fn highwayhash64(key: &[u64; 4], bytes: &[u8]) -> u64 { unsafe { HighwayHash64(key.as_ptr(), bytes.as_ptr(), bytes.len() as u64) } } #[derive(Clone, Debug)] /// std-Wrapper for `highwayhash`. pub struct HighwayHasher { key: [u64; 4], hash: u64, } impl HighwayHasher { pub fn new() -> HighwayHasher { HighwayHasher { key: [0, 0, 0, 0], hash: 0, } } pub fn new_with_key(key: [u64; 4]) -> HighwayHasher { HighwayHasher { key: key, hash: 0, } } } impl core::default::Default for HighwayHasher { fn default() -> Self { HighwayHasher::new() } } impl core::hash::Hasher for HighwayHasher { fn finish(&self) -> u64 { self.hash } fn write(&mut self, msg: &[u8]) { let lhs = self.hash; let rhs = highwayhash64(&self.key, msg); // Ideally we want to use the hash function to combine with the old value. // This is however not possible without allocation with the current interface. So we use a // way to combine hashes proposed in // https://stackoverflow.com/questions/5889238/why-is-xor-the-default-way-to-combine-hashes self.hash ^= rhs.wrapping_add(0x9e3779b97f4a7c16) .wrapping_add(lhs << 6) .wrapping_add(lhs >> 2); } } #[cfg(test)] mod test_highway { use super::*; #[test] fn test_highwayhash64() { let key = [1, 2, 3, 4]; let bytes = [12, 23, 234, 123, 123, 2, 4]; assert_eq!(highwayhash64(&key, &bytes), 6732608382896043210); // TODO: Verify this is the correct value. } #[test] fn test_highwayhash64_portable() { let key = [1, 2, 3, 4]; let bytes = [12, 23, 234, 123, 123, 2, 4]; assert_eq!(highwayhash64_portable(&key, &bytes), 6732608382896043210); // TODO: Verify this is the correct value. } #[test] #[cfg(target_feature = "sse4.1")] fn test_highwayhash64_sse41() { let key = [1, 2, 3, 4]; let bytes = [12, 23, 234, 123, 123, 2, 4]; assert_eq!(highwayhash64_sse41(&key, &bytes), 6732608382896043210); // TODO: Verify this is the correct value. } #[test] #[cfg(target_feature = "avx2")] fn test_highwayhash64_avx2() { let key = [1, 2, 3, 4]; let bytes = [12, 23, 234, 123, 123, 2, 4]; assert_eq!(highwayhash64_avx2(&key, &bytes), 6732608382896043210); // TODO: Verify this is the correct value. } #[test] fn test_siphash() { let key = [1, 2]; let bytes = [12, 23, 234, 123, 123, 2, 4]; assert_eq!(siphash(&key, &bytes), 16073328535944263387); // TODO: Verify this is the correct value. } #[test] fn test_siphash13() { let key = [1, 2]; let bytes = [12, 23, 234, 123, 123, 2, 4]; assert_eq!(siphash13(&key, &bytes), 16934527632061698845); // TODO: Verify this is the correct value. } } #[cfg(test)] mod test_std { use core::hash::{Hash, Hasher}; use test::{Bencher, black_box}; use super::HighwayHasher; fn hash<T: Hash>(x: &T) -> u64 { let mut st = HighwayHasher::new(); x.hash(&mut st); st.finish() } fn hash_with_keys<T: Hash>(k1: u64, k2: u64, x: &T) -> u64 { let mut st = HighwayHasher::new_with_key([k1, k2, 0, 0]); x.hash(&mut st); st.finish() } fn hash_bytes(x: &[u8]) -> u64 { let mut s = HighwayHasher::default(); Hasher::write(&mut s, x); s.finish() } #[test] #[cfg(target_arch = "arm")] fn test_hash_usize() { let val = 0xdeadbeef_deadbeef_u64; assert!(hash(&(val as u64)) != hash(&(val as usize))); assert_eq!(hash(&(val as u32)), hash(&(val as usize))); } #[test] #[cfg(target_arch = "x86_64")] fn test_hash_usize() { let val = 0xdeadbeef_deadbeef_u64; assert_eq!(hash(&(val as u64)), hash(&(val as usize))); assert!(hash(&(val as u32)) != hash(&(val as usize))); } #[test] #[cfg(target_arch = "x86")] fn test_hash_usize() { let val = 0xdeadbeef_deadbeef_u64; assert!(hash(&(val as u64)) != hash(&(val as usize))); assert_eq!(hash(&(val as u32)), hash(&(val as usize))); } #[test] fn test_hash_idempotent() { let val64 = 0xdeadbeef_deadbeef_u64; assert_eq!(hash(&val64), hash(&val64)); let val32 = 0xdeadbeef_u32; assert_eq!(hash(&val32), hash(&val32)); } #[test] fn test_hash_no_bytes_dropped_64() { let val = 0xdeadbeef_deadbeef_u64; assert!(hash(&val) != hash(&zero_byte(val, 0))); assert!(hash(&val) != hash(&zero_byte(val, 1))); assert!(hash(&val) != hash(&zero_byte(val, 2))); assert!(hash(&val) != hash(&zero_byte(val, 3))); assert!(hash(&val) != hash(&zero_byte(val, 4))); assert!(hash(&val) != hash(&zero_byte(val, 5))); assert!(hash(&val) != hash(&zero_byte(val, 6))); assert!(hash(&val) != hash(&zero_byte(val, 7))); fn zero_byte(val: u64, byte: usize) -> u64 { assert!(byte < 8); val & !(0xff << (byte * 8)) } } #[test] fn test_hash_no_bytes_dropped_32() { let val = 0xdeadbeef_u32; assert!(hash(&val) != hash(&zero_byte(val, 0))); assert!(hash(&val) != hash(&zero_byte(val, 1))); assert!(hash(&val) != hash(&zero_byte(val, 2))); assert!(hash(&val) != hash(&zero_byte(val, 3))); fn zero_byte(val: u32, byte: usize) -> u32 { assert!(byte < 4); val & !(0xff << (byte * 8)) } } #[test] fn test_hash_no_concat_alias() { let s = ("aa", "bb"); let t = ("aabb", ""); let u = ("a", "abb"); assert!(s != t && t != u); assert!(hash(&s) != hash(&t) && hash(&s) != hash(&u)); let u = [1, 0, 0, 0]; let v = (&u[..1], &u[1..3], &u[3..]); let w = (&u[..], &u[4..4], &u[4..4]); assert!(v != w); assert!(hash(&v) != hash(&w)); } #[bench] fn bench_str_under_8_bytes(b: &mut Bencher) { let s = "foo"; b.iter(|| { assert_eq!(hash(&s), 7718513122671324293); }) } #[bench] fn bench_str_of_8_bytes(b: &mut Bencher) { let s = "foobar78"; b.iter(|| { assert_eq!(hash(&s), 951460047375270518); }) } #[bench] fn bench_str_over_8_bytes(b: &mut Bencher) { let s = "foobarbaz0"; b.iter(|| { assert_eq!(hash(&s), 18427267240069601439); }) } #[bench] fn bench_long_str(b: &mut Bencher) { let s = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor \ incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud \ exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute \ irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla \ pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui \ officia deserunt mollit anim id est laborum."; b.iter(|| { assert_eq!(hash(&s), 10953479867975918892); }) } #[bench] fn bench_u32(b: &mut Bencher) { let u = 162629500u32; let u = black_box(u); b.iter(|| { hash(&u) }); b.bytes = 8; } #[bench] fn bench_u32_keyed(b: &mut Bencher) { let u = 162629500u32; let u = black_box(u); let k1 = black_box(0x1); let k2 = black_box(0x2); b.iter(|| { hash_with_keys(k1, k2, &u) }); b.bytes = 8; } #[bench] fn bench_u64(b: &mut Bencher) { let u = 16262950014981195938u64; let u = black_box(u); b.iter(|| { hash(&u) }); b.bytes = 8; } #[bench] fn bench_bytes_4(b: &mut Bencher) { let data = black_box([b' '; 4]); b.iter(|| { hash_bytes(&data) }); b.bytes = 4; } #[bench] fn bench_bytes_7(b: &mut Bencher) { let data = black_box([b' '; 7]); b.iter(|| { hash_bytes(&data) }); b.bytes = 7; } #[bench] fn bench_bytes_8(b: &mut Bencher) { let data = black_box([b' '; 8]); b.iter(|| { hash_bytes(&data) }); b.bytes = 8; } #[bench] fn bench_bytes_a_16(b: &mut Bencher) { let data = black_box([b' '; 16]); b.iter(|| { hash_bytes(&data) }); b.bytes = 16; } #[bench] fn bench_bytes_b_32(b: &mut Bencher) { let data = black_box([b' '; 32]); b.iter(|| { hash_bytes(&data) }); b.bytes = 32; } #[bench] fn bench_bytes_c_128(b: &mut Bencher) { let data = black_box([b' '; 128]); b.iter(|| { hash_bytes(&data) }); b.bytes = 128; } }
use std::collections::HashMap; use std::ffi::OsString; use std::fmt::Write as _; use std::str; use std::sync::mpsc; use std::thread; use log::debug; use log::info; use serde::Deserialize; use serde::Serialize; use structopt::StructOpt; use crate::github_events::{github_events as _github_events, Action, RawEvent, Type}; pub mod github_events; #[derive(Debug, Deserialize, PartialEq, Eq, Serialize)] pub struct Config { pub repos: Vec<String>, pub token: Option<String>, } #[derive(StructOpt, Debug)] #[structopt( about = "Display simple counters for GitHub Pull Requests", author = "Nicolas Kosinski <nicokosi@yahoo.com>", name = "Pullpito 🐙", version = "0.1.0" )] struct Options { #[structopt( long = "repository", help = "the name of a GitHub repository, i.e. 'python/peps'", required = true, takes_value = true, multiple = true, short = "r" )] repositories: Vec<String>, #[structopt( help = "an optional GitHub personal access token (required for private GitHub repositories)", long = "token", short = "t" )] token: Option<String>, } fn config_from_args(args: Vec<OsString>) -> Config { let options = Options::from_iter(args); Config { repos: options.repositories, token: options.token, } } /// Calls GitHub REST API in order to log pull requests' statistics in the standard output. /// /// # Panics /// /// Panics if the GitHub API request fails or if response cannot be deserialized. pub fn log_github_events(os: Vec<OsString>) { env_logger::init(); let config = config_from_args(os); info!( "Computing stats for GitHub repos '{:?}' (with token: {})", config.repos, config.token.is_some() ); let (sender, receiver) = mpsc::channel(); let number_of_repos = config.repos.len(); for repo in config.repos { debug!("Query stats for GitHub repo {:?}", repo); let sender = mpsc::Sender::clone(&sender); let token = config.token.clone(); thread::spawn(move || { sender .send(RepoEvents { repo: repo.clone(), events_per_author: events_per_author(_github_events(&repo, &token).unwrap()), }) .unwrap(); }); } for _ in 0..number_of_repos { let repo_events = receiver.recv().unwrap(); debug!("Print stats for GitHub repo {:?}", repo_events.repo); println!( "{}", print_events_per_author(&repo_events.repo, &repo_events.events_per_author) ); } } struct RepoEvents { repo: String, events_per_author: HashMap<String, Vec<RawEvent>>, } fn events_per_author(events: Vec<RawEvent>) -> HashMap<String, Vec<RawEvent>> { events .into_iter() .filter(|e| { e.event_type == Type::PullRequestEvent || e.event_type == Type::PullRequestReviewCommentEvent || e.event_type == Type::IssueCommentEvent }) .fold(HashMap::new(), |mut acc, event: RawEvent| { (*acc .entry(event.actor.login.clone()) .or_insert_with(Vec::new)) .push(event); acc }) } fn print_events_per_author( repo: &str, events_per_author: &HashMap<String, Vec<RawEvent>>, ) -> String { let mut out: String = format!("pull requests for {repo:?} ->\n"); out.push_str(" opened per author:\n"); print_pull_request_events_per_author(events_per_author, &Action::opened, &mut out); out.push_str(" commented per author:\n"); print_pull_request_events_per_author(events_per_author, &Action::created, &mut out); out.push_str(" closed per author:\n"); print_pull_request_events_per_author(events_per_author, &Action::closed, &mut out); out } fn print_pull_request_events_per_author( events_per_author: &HashMap<String, Vec<RawEvent>>, payload_action: &Action, out: &mut String, ) { for (author, events) in events_per_author.iter() { let matching_pull_requests = events .iter() .filter(|e| { e.event_type == Type::PullRequestEvent && &e.payload.action == payload_action }) .count(); if matching_pull_requests > 0 { let _ = writeln!(out, " {author}: {matching_pull_requests}"); } } } #[cfg(test)] mod tests { use std::collections::HashMap; use chrono::{TimeZone, Utc}; use crate::config_from_args; use crate::events_per_author; use crate::print_events_per_author; use crate::Config; use crate::OsString; use super::github_events::*; #[test] fn parse_args_with_a_long_repo_param() { assert_eq!( config_from_args(vec![ OsString::from("pullpito"), OsString::from("--repository"), OsString::from("fakeRepo"), ]), Config { repos: vec!["fakeRepo".to_string()], token: None, }, ); } #[test] fn parse_args_with_a_long_repo_param_and_a_long_token_param() { assert_eq!( config_from_args(vec![ OsString::from("pullpito"), OsString::from("--repository"), OsString::from("fakeRepo"), OsString::from("--token"), OsString::from("fakeToken"), ]), Config { repos: vec!["fakeRepo".to_string()], token: Some("fakeToken".to_string()), } ); } #[test] fn parse_args_with_two_long_repo_params_and_a_long_token_param() { assert_eq!( config_from_args(vec![ OsString::from("pullpito"), OsString::from("--repository"), OsString::from("fakeRepo1"), OsString::from("--repository"), OsString::from("fakeRepo2"), OsString::from("--token"), OsString::from("fakeToken"), ]), Config { repos: vec!["fakeRepo1".to_string(), "fakeRepo2".to_string()], token: Some("fakeToken".to_string()), } ); } #[test] fn parse_args_with_two_short_repo_params_and_a_short_token_param() { assert_eq!( config_from_args(vec![ OsString::from("pullpito"), OsString::from("-r"), OsString::from("fakeRepo1"), OsString::from("-r"), OsString::from("fakeRepo2"), OsString::from("-t"), OsString::from("fakeToken"), ]), Config { repos: vec!["fakeRepo1".to_string(), "fakeRepo2".to_string()], token: Some("fakeToken".to_string()), } ); } #[test] fn printable_with_opened_pull_request() { let mut events: HashMap<String, Vec<RawEvent>> = HashMap::new(); events.insert( "alice".to_string(), vec![RawEvent { actor: Actor { login: "alice".to_string(), }, payload: Payload { action: Action::opened, }, event_type: Type::PullRequestEvent, created_at: Utc.with_ymd_and_hms(2016, 12, 1, 16, 26, 43).unwrap(), }], ); let printable = print_events_per_author("my-org/my-repo", &events); assert!(printable.contains("pull requests for \"my-org/my-repo\" ->")); assert!(printable.contains("opened per author:\n alice: 1\n")); assert!(printable.contains("opened per author:\n alice: 1\n")); assert!(printable.contains("commented per author:\n closed per author:\n")); } #[test] fn compute_events_per_author() { let events_per_author = events_per_author(vec![RawEvent { actor: Actor { login: "alice".to_string(), }, payload: Payload { action: Action::opened, }, event_type: Type::PullRequestEvent, created_at: Utc.with_ymd_and_hms(2016, 12, 1, 16, 26, 43).unwrap(), }]); assert_eq!(events_per_author.get("alice").iter().len(), 1); } }
//! Physical Frame structures and functionality use bootloader::boot_info::{MemoryRegion, MemoryRegionKind, MemoryRegions}; use spin::{Mutex, Once}; use x86_64::structures::paging::mapper::MapToError; use x86_64::{PhysAddr, VirtAddr}; /// A global frame allocator initialized from [init](PhysFrameAllocator::init) pub static FRAME_ALLOCATOR: Once<PhysFrameAllocatorWrapper> = Once::new(); pub static BYTES_AVAILABLE_RAM: Once<u64> = Once::new(); const BITMAP_START: usize = 0xFFFF_FF00_0000_0000; /// A structure that holds the usable memory region from BIOS and a corresponding /// bitmap to track allocated frames with associated functions pub struct PhysFrameAllocator { /// The region of memory marked as usable for OS and User use pub usable_memory_region: MemoryRegion, /// The region of memory reserved for bit map manipulation for /// preserving which frames have been allocated or deallocated pub bit_map_region: MemoryRegion, } impl PhysFrameAllocator { /// This function initializes the physical frame allocator for the system /// /// We take the usable memory region from the bootloader and divide it into two new regions, /// the first being a bit map region that will be able to indicate wether a frame is free or used /// in the new usable region. And a new usable region minus the consumed bit map region frames. /// This function will not assign a new global frame allocator again once initialized pub fn init(memory_regions: &'static MemoryRegions) { let mut usable_memory_region = MemoryRegion::empty(); let mut bit_map_region = MemoryRegion::empty(); usable_memory_region.kind = MemoryRegionKind::Usable; let mut num_bit_map_frames: u64 = 0; for memory_region in memory_regions.iter() { if memory_region.kind == MemoryRegionKind::Usable { // Calculate the number of frames required for the bitmap + 1 num_bit_map_frames = ((memory_region.end - memory_region.start) >> 24) + 1; // Assign the bitmap region bit_map_region.start = memory_region.start; bit_map_region.end = memory_region.start + (num_bit_map_frames << 12) - 1; // Assign the new usable memory region usable_memory_region.start = memory_region.start + (num_bit_map_frames << 12); usable_memory_region.end = memory_region.end; } } // Identity Map the physical bit map frames to pages map_bit_frames(bit_map_region, num_bit_map_frames).unwrap(); // Init global frame allocator FRAME_ALLOCATOR.call_once(|| { PhysFrameAllocatorWrapper::new(Mutex::new(PhysFrameAllocator { usable_memory_region, bit_map_region, })) }); BYTES_AVAILABLE_RAM.call_once(|| usable_memory_region.end - usable_memory_region.start); } /// Get the physical frame from the bitmap index fn frame_from_bit_index(&self, index: u64) -> PhysFrame { let frame_addr = PhysAddr::new(self.usable_memory_region.start + (index << 12)); PhysFrame::<Size4KiB>::containing_address(frame_addr) } /// Allocate a specific frame from an addr alligned to the nearest 4KiB frame, will return none if /// frame is in use pub fn allocate_frame_nth(&self, start: PhysAddr) -> Option<PhysFrame> { let start_frame_addr = start.align_down(Size4KiB::SIZE); let usable_addr = start_frame_addr.as_u64() - self.usable_memory_region.start; // Get the index of the frame in the bitmap let index = usable_addr >> 12; let u64_byte = index >> 6; let bit_index = index % 64; let u64_byte_ptr = (self.bit_map_region.start + (u64_byte << 6)) as *mut u64; unsafe { if *u64_byte_ptr >> bit_index == 1 { *u64_byte_ptr &= !(0 << bit_index); Some(PhysFrame::from_start_address(start_frame_addr).unwrap()) } else { None } } } } // TODO: CONSIDER TURNING BITMAP HANDLING ROUTINES INTO A STRUCT use x86_64::structures::paging::{ FrameAllocator, FrameDeallocator, Mapper, Page, PageSize, PageTableFlags, PhysFrame, Size4KiB, }; use crate::KERNEL_PAGE_TABLE; unsafe impl FrameAllocator<Size4KiB> for PhysFrameAllocator { /// Allocate the next available frame in the usable memory region. /// We navigate the bitmap for a empty bit and return the Physical Frame /// if there, else no frames are available and return None fn allocate_frame(&mut self) -> Option<PhysFrame> { let mut bm_ptr = BITMAP_START as *mut u64; while bm_ptr < (BITMAP_START as u64 + (self.bit_map_region.end - self.bit_map_region.start)) as *mut u64 { let mut quadword = unsafe { *bm_ptr } as u64; let qw_clone = quadword; let mut index = 0; if quadword != 0xFFFFFFFFFFFFFFFF { while index < 64 { if quadword & 1 == 0 { // Usable PhysFrame unsafe { *bm_ptr = qw_clone | (0x01 << index) }; // Return the frame containing the physical address of the bit in the bitmap return Some( self.frame_from_bit_index(bm_ptr as u64 - BITMAP_START as u64 + index), ); } index += 1; quadword >>= 1; } } // Point to next quadword in the map unsafe { bm_ptr = bm_ptr.add(8) }; } None } } impl FrameDeallocator<Size4KiB> for PhysFrameAllocator { /// Deallocate a frame in no longer in use /// /// This is done by clearing the bit in the bit_map to indicate that this /// frame is no longer in use /// /// # Safety /// The user must validate that the frame is no longer in use before /// deallocation unsafe fn deallocate_frame(&mut self, frame: PhysFrame<Size4KiB>) { // Get addr of frame along usable_mem_region let usable_addr = frame.start_address().as_u64() - self.usable_memory_region.start; // Get the index of the frame in the bitmap let index = usable_addr >> 12; let u64_byte = index >> 6; let bit_index = index % 64; let u64_byte_ptr = (BITMAP_START as u64 + (u64_byte << 6)) as *mut u64; *u64_byte_ptr &= !(1 << bit_index); } } /// This function is called during the initialization of the frame allocator to identity map the bit map frames fn map_bit_frames( bit_map_region: MemoryRegion, num_bit_map_frames: u64, ) -> Result<(), MapToError<Size4KiB>> { // Create pages in the bit map region let page_range = { let bit_map_start = VirtAddr::new(BITMAP_START as u64); let bit_map_end = VirtAddr::new(BITMAP_START as u64 + (bit_map_region.end - bit_map_region.start)); let bit_map_start_page = Page::<Size4KiB>::containing_address(bit_map_start); let bit_map_end_page = Page::<Size4KiB>::containing_address(bit_map_end); Page::range_inclusive(bit_map_start_page, bit_map_end_page) }; let bm_range = bit_map_region.start..bit_map_region.end; let frame_addresses = bm_range.step_by(4096); let mut frames = frame_addresses.map(|addr| PhysFrame::<Size4KiB>::containing_address(PhysAddr::new(addr))); let mut empty_allocator = EmptyFrameAllocator; assert_eq!(page_range.count() as u64, num_bit_map_frames); for page in page_range { let frame = frames.next().ok_or(MapToError::FrameAllocationFailed)?; let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE; unsafe { KERNEL_PAGE_TABLE .wait() .unwrap() .lock() .map_to(page, frame, flags, &mut empty_allocator) .unwrap() .flush() }; assert_eq!( KERNEL_PAGE_TABLE .wait() .unwrap() .lock() .translate_page(page) .unwrap(), frame ); } Ok(()) } /// Wrapper struct for implementing FrameAllocator traits around the mutex type pub struct PhysFrameAllocatorWrapper { pub inner: Mutex<PhysFrameAllocator>, } impl PhysFrameAllocatorWrapper { /// Return a new [PhysFrameAllocatorWrapper] object with a mutex wrapped in a PhysFrameAllocator pub fn new(inner: Mutex<PhysFrameAllocator>) -> Self { Self { inner } } } /// Wrapper implementation for implementing the FrameAllocator trait unsafe impl FrameAllocator<Size4KiB> for &PhysFrameAllocatorWrapper { /// Obtains mutex lock and calls inner [`Allocate Frame`](PhysFrameAllocator::allocate_frame) fn allocate_frame(&mut self) -> Option<PhysFrame<Size4KiB>> { self.inner.lock().allocate_frame() } } /// Wrapper implementation for implementing the FrameDeallocator trait impl FrameDeallocator<Size4KiB> for &PhysFrameAllocatorWrapper { /// Obtains mutex lock and calls inner [`Deallocate Frame`](PhysFrameAllocator::deallocate_frame) unsafe fn deallocate_frame(&mut self, frame: PhysFrame<Size4KiB>) { self.inner.lock().deallocate_frame(frame) } } /// Used once for allocating the bitmap frames because the map_to function requires an allocator /// encase more frame allocations are required for more table entries which is impossible /// at the early stage of execution #[doc(hidden)] pub struct EmptyFrameAllocator; #[doc(hidden)] unsafe impl FrameAllocator<Size4KiB> for EmptyFrameAllocator { /// Returns None, does not affect any memory state fn allocate_frame(&mut self) -> Option<PhysFrame> { None } }
use super::pages::*; use crate::data::MealPlans; use oikos_api::components::schemas::{RecipeList, ShoppingList}; use serde::{Deserialize, Serialize}; use yew::prelude::*; use yew_router::{components::RouterAnchor, prelude::*}; use yew_state::{Area, SharedHandle, SharedStateComponent, Storable}; #[derive(Clone, Debug, Switch)] pub enum AppRoute { #[to = "/recipes/{}"] Recipe(String), #[to = "/recipes"] RecipeList, #[to = "/new_recipe"] NewRecipe, #[to = "/shopping"] ShoppingList, #[to = "/search"] Search, #[to = "/auth"] Auth, #[to = "/token?code={}"] Token(String), #[to = "/!"] Planning, } pub type AppRouter = Router<AppRoute>; pub type AppAnchor = RouterAnchor<AppRoute>; #[derive(Default, Clone, PartialEq, Serialize, Deserialize)] pub struct DataState { pub recipes: Option<RecipeList>, pub meal_plans: Option<MealPlans>, pub shopping_list: Option<ShoppingList>, } impl Storable for DataState { fn area() -> Area { Area::Local } } pub type DataHandle = SharedHandle<DataState>; pub struct RootComponent { _handle: DataHandle, } impl RootComponent { fn switch(switch: AppRoute) -> Html { match switch { AppRoute::Recipe(id) => { html! { <RecipePage recipe_id=id /> } } AppRoute::RecipeList => { html! { <RecipeListPage /> } } AppRoute::NewRecipe => { html! { <NewRecipePage /> } } AppRoute::ShoppingList => { html! { <ShoppingListPage /> } } AppRoute::Planning => { html! { <PlanningPage /> } } AppRoute::Auth => { html! { <AuthPage /> } } AppRoute::Token(code) => { html! { <TokenPage code=code/> } } AppRoute::Search => { html! { <SearchPage /> } } } } } impl Component for RootComponent { type Message = (); type Properties = DataHandle; fn create(handle: Self::Properties, _link: ComponentLink<Self>) -> Self { Self { _handle: handle } } fn rendered(&mut self, first_render: bool) { if first_render { wasm_logger::init(wasm_logger::Config::default()); } } fn update(&mut self, _msg: Self::Message) -> ShouldRender { true } fn change(&mut self, _props: Self::Properties) -> ShouldRender { false } fn view(&self) -> Html { html! { <AppRouter render=AppRouter::render(Self::switch) /> } } } pub type Root = SharedStateComponent<RootComponent>;
use std::convert::TryFrom; #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, sqlx::Type)] #[sqlx(rename_all = "camelCase")] pub enum ItemType { Cast, Location, Storyline, } impl ItemType { #[inline] pub fn as_str(&self) -> &'static str { match self { Self::Cast => "cast", Self::Location => "location", Self::Storyline => "storyline", } } } impl<'a> TryFrom<&'a str> for ItemType { type Error = &'a str; #[inline] fn try_from(item_type: &'a str) -> Result<Self, Self::Error> { Ok(match item_type { "cast" => Self::Cast, "location" => Self::Location, "storyline" => Self::Storyline, _ => return Err(item_type), }) } }
use std::cmp::Ordering; use std::io; use std::collections::{BTreeMap, BTreeSet, BinaryHeap}; use crate::base::Part; type Path = Vec<Position>; type Cavern = BTreeMap<Position, Tile>; type Units = BTreeMap<Position, Unit>; pub fn part1(r: &mut dyn io::Read) -> Result<String, String> { solve(r, Part::One) } pub fn part2(r: &mut dyn io::Read) -> Result<String, String> { solve(r, Part::Two) } fn solve(r: &mut dyn io::Read, part: Part) -> Result<String, String> { let mut input = String::new(); r.read_to_string(&mut input).map_err(|e| e.to_string())?; let (cavern, units) = parse_input(&input); match part { Part::One => { let (full_rounds, _cavern_after_combat, units_after_combat) = combat(&cavern, &units); let hitpoints_sum = units_after_combat .values() .map(|unit| unit.hitpoints as usize) .sum::<usize>(); let outcome = full_rounds * hitpoints_sum; Ok(outcome.to_string()) } Part::Two => { let (full_rounds, units_after_combat) = (3..) .filter_map(|power| { let (full_rounds, all_elves_alive, _cavern_after_combat, units_after_combat) = combat_until_elf_dies(power, &cavern, &units); if all_elves_alive { Some((full_rounds, units_after_combat)) } else { None } }) .next() .unwrap(); let hitpoints_sum = units_after_combat .values() .map(|unit| unit.hitpoints as usize) .sum::<usize>(); let outcome = full_rounds * hitpoints_sum; Ok(outcome.to_string()) } } } #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] struct Position { row: isize, col: isize, } impl Ord for Position { fn cmp(&self, other: &Position) -> Ordering { match self.row.cmp(&other.row) { Ordering::Equal => self.col.cmp(&other.col), ordering => ordering, } } } impl PartialOrd for Position { fn partial_cmp(&self, other: &Position) -> Option<Ordering> { Some(self.cmp(other)) } } #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] enum Tile { Wall, Open, Unit(Unit), } #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] struct Unit { unit_type: UnitType, hitpoints: i64, attack_power: i64, } impl Unit { fn new(unit_type: UnitType) -> Self { Unit { unit_type, hitpoints: 200, attack_power: 3, } } } #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] enum UnitType { Goblin, Elf, } fn parse_input(input: &str) -> (Cavern, Units) { let mut cavern = Cavern::new(); let mut units = Units::new(); for (row, line) in input.lines().enumerate() { for (col, c) in line.chars().enumerate() { let position = Position { row: row as isize, col: col as isize, }; let opt_unit = match c { 'G' => Some(Unit::new(UnitType::Goblin)), 'E' => Some(Unit::new(UnitType::Elf)), _ => None, }; let tile = match c { '#' => Tile::Wall, '.' => Tile::Open, 'G' | 'E' => Tile::Unit(opt_unit.unwrap()), _ => unreachable!(), }; cavern.insert(position, tile); if let Some(unit) = opt_unit { units.insert(position, unit); } } } (cavern, units) } #[allow(dead_code)] fn print_cavern(cavern: &Cavern, units: &Units) { let mut last_row = 0; for (&position, &tile) in cavern.iter() { if position.row > last_row { print!(" "); for (unit_position, unit) in units.iter() { if unit_position.row != last_row { continue; } let c = match unit.unit_type { UnitType::Goblin => 'G', UnitType::Elf => 'E', }; print!("{}({}), ", c, unit.hitpoints); } println!(); } last_row = position.row; let c = match tile { Tile::Wall => '#', Tile::Open => '.', Tile::Unit(unit) => match unit.unit_type { UnitType::Goblin => 'G', UnitType::Elf => 'E', }, }; print!("{}", c); } println!(); } fn adjacent_positions(position: Position) -> BTreeSet<Position> { [(-1, 0), (0, -1), (1, 0), (0, 1)] .into_iter() .map(|(drow, dcol)| Position { row: position.row + drow, col: position.col + dcol, }) .collect() } fn in_range(position: Position, cavern: &Cavern) -> BTreeSet<Position> { adjacent_positions(position) .into_iter() .filter(|adjacent| { cavern.contains_key(&adjacent) && *cavern.get(&adjacent).unwrap() == Tile::Open }) .collect() } fn find_target_positions(target_unit_type: UnitType, units: &Units) -> BTreeSet<Position> { units .iter() .filter_map(|(&unit_position, unit)| { if unit.unit_type == target_unit_type { Some(unit_position) } else { None } }) .collect() } fn combat_until_elf_dies( elf_attack_power: i64, cavern: &Cavern, units: &Units, ) -> (usize, bool, Cavern, Units) { let mut current_cavern = cavern.clone(); let mut current_units = units.clone(); for (_unit_position, unit) in current_units.iter_mut() { if unit.unit_type != UnitType::Elf { continue; } unit.attack_power = elf_attack_power; } let count_elves = current_units .values() .filter(|unit| unit.unit_type == UnitType::Elf) .count(); let mut full_rounds = 0; let mut current_count_elves = count_elves; let mut combat_ended = false; while !combat_ended && current_count_elves == count_elves { let (cavern_after_round, units_after_round, combat_ended_during_round) = round(&current_cavern, &current_units); current_cavern = cavern_after_round; current_units = units_after_round; combat_ended = combat_ended_during_round; current_count_elves = current_units .values() .filter(|unit| unit.unit_type == UnitType::Elf) .count(); if !combat_ended { full_rounds += 1; } } ( full_rounds, current_count_elves == count_elves, current_cavern, current_units, ) } fn combat(cavern: &Cavern, units: &Units) -> (usize, Cavern, Units) { let mut current_cavern = cavern.clone(); let mut current_units = units.clone(); let mut full_rounds = 0; let mut combat_ended = false; while !combat_ended { let (cavern_after_round, units_after_round, combat_ended_during_round) = round(&current_cavern, &current_units); current_cavern = cavern_after_round; current_units = units_after_round; combat_ended = combat_ended_during_round; if !combat_ended { full_rounds += 1; } } (full_rounds, current_cavern, current_units) } fn round(cavern: &Cavern, units: &Units) -> (Cavern, Units, bool) { let mut current_cavern = cavern.clone(); let mut current_units = units.clone(); for &acting_position in units.keys() { if !current_units.contains_key(&acting_position) { // The unit that would have acted has died. continue; } let (cavern_after_turn, units_after_turn, combat_ended) = turn(acting_position, &current_cavern, &current_units); if combat_ended { return (cavern_after_turn, units_after_turn, true); } current_cavern = cavern_after_turn; current_units = units_after_turn; } (current_cavern, current_units, false) } fn turn(acting_position: Position, cavern: &Cavern, units: &Units) -> (Cavern, Units, bool) { let acting_unit = *units.get(&acting_position).unwrap(); let target_unit_type = match acting_unit.unit_type { UnitType::Goblin => UnitType::Elf, UnitType::Elf => UnitType::Goblin, }; let target_positions = find_target_positions(target_unit_type, units); if target_positions.is_empty() { // There are no targets at all, so combat ends without anything being changed. return (cavern.clone(), units.clone(), true); } // There are still targets left. if let Some((cavern_after_attacking, units_after_attacking)) = perform_attack_if_possible(acting_position, &target_positions, cavern, units) { // The acting unit could perform an attack, so end the turn and return the results of // attacking. return (cavern_after_attacking, units_after_attacking, false); } // The acting unit is not currently in range of attacking anyone, and will therefore try to // move. let in_range_positions = target_positions .iter() .flat_map(|&target_position| in_range(target_position, cavern)) .collect::<BTreeSet<Position>>(); if in_range_positions.is_empty() { // There are no open squares adjacent to any of the targets, so the acting unit cannot // move, ending its turn without anything being changed. return (cavern.clone(), units.clone(), false); } // The acting tries to move (might not be able to move due to being locked in). let (position_after_moving, cavern_after_moving, units_after_moving) = perform_move(acting_position, &in_range_positions, cavern, units); let mut new_cavern = cavern_after_moving.clone(); let mut new_units = units_after_moving.clone(); // After moving, the acting unit might be able to attack a target. if let Some((cavern_after_attacking, units_after_attacking)) = perform_attack_if_possible( position_after_moving, &target_positions, &cavern_after_moving, &units_after_moving, ) { new_cavern = cavern_after_attacking; new_units = units_after_attacking; } // The unit has moved and possibly attacked, ending its turn. (new_cavern, new_units, false) } fn perform_attack_if_possible( acting_position: Position, target_positions: &BTreeSet<Position>, cavern: &Cavern, units: &Units, ) -> Option<(Cavern, Units)> { let adjacent_positions_to_acting = adjacent_positions(acting_position); let attackable_positions = target_positions & &adjacent_positions_to_acting; if attackable_positions.is_empty() { None } else { let acting_unit = *units.get(&acting_position).unwrap(); Some(perform_attack( acting_unit, &attackable_positions, cavern, units, )) } } fn perform_attack( acting_unit: Unit, attackable_positions: &BTreeSet<Position>, cavern: &Cavern, units: &Units, ) -> (Cavern, Units) { let mut new_cavern = cavern.clone(); let mut new_units = units.clone(); let target_position = *attackable_positions .iter() .min_by_key(|attackable_position| units.get(attackable_position).unwrap().hitpoints) .unwrap(); let mut attacked_unit = new_units.remove(&target_position).unwrap(); attacked_unit.hitpoints -= acting_unit.attack_power; if attacked_unit.hitpoints > 0 { new_units.insert(target_position, attacked_unit); } else { // The attacked unit died, so remove it both from the cavern and from the units. new_cavern.insert(target_position, Tile::Open); } (new_cavern, new_units) } fn perform_move( start_position: Position, in_range_positions: &BTreeSet<Position>, cavern: &Cavern, units: &Units, ) -> (Position, Cavern, Units) { let mut new_position = start_position; let mut new_cavern = cavern.clone(); let mut new_units = units.clone(); let chosen = in_range_positions .iter() .filter_map(|&in_range_position| { shortest_path(start_position, in_range_position, cavern) .map(|path| (in_range_position, path)) }) .min_by( |(position1, path1), (position2, path2)| match path1.len().cmp(&path2.len()) { Ordering::Equal => position1.cmp(position2), ordering => ordering, }, ); if let Some((_chosen_position, chosen_path)) = chosen { let first_step = chosen_path[0]; let removed_tile = new_cavern.insert(start_position, Tile::Open).unwrap(); let removed_unit = new_units.remove(&start_position).unwrap(); new_position = first_step; new_cavern.insert(first_step, removed_tile); new_units.insert(first_step, removed_unit); } (new_position, new_cavern, new_units) } #[derive(Clone, Debug, Eq, PartialEq, Hash)] struct SPEntry { position: Position, path: Path, } impl Ord for SPEntry { fn cmp(&self, other: &SPEntry) -> Ordering { let mut ordering = other.path.len().cmp(&self.path.len()); if ordering != Ordering::Equal { return ordering; } for (o, s) in other.path.iter().zip(self.path.iter()) { ordering = o.cmp(s); if ordering != Ordering::Equal { break; } } ordering } } impl PartialOrd for SPEntry { fn partial_cmp(&self, other: &SPEntry) -> Option<Ordering> { Some(self.cmp(other)) } } fn shortest_path(from: Position, to: Position, cavern: &Cavern) -> Option<Path> { let mut queue = BinaryHeap::new(); let mut visited: BTreeMap<Position, Path> = BTreeMap::new(); queue.extend(in_range_entries(from, &Vec::new(), cavern)); while let Some(current) = queue.pop() { if current.position == to { return Some(current.path); } if visited.contains_key(&current.position) { continue; } visited.insert(current.position, current.path.clone()); queue.extend(in_range_entries(current.position, &current.path, cavern)) } visited.remove(&to) } fn in_range_entries(position: Position, base_path: &Path, cavern: &Cavern) -> Vec<SPEntry> { in_range(position, cavern) .iter() .map(|&in_range| { let position = in_range; let mut path = base_path.clone(); path.push(position); SPEntry { position, path } }) .collect() } #[cfg(test)] mod tests { use super::*; use crate::test; mod part1 { use super::*; test!(example1, file "testdata/day15/ex1", "27730", part1); test!(example2, file "testdata/day15/ex2", "36334", part1); test!(example3, file "testdata/day15/ex3", "39514", part1); test!(example4, file "testdata/day15/ex4", "27755", part1); test!(example5, file "testdata/day15/ex5", "28944", part1); test!(example6, file "testdata/day15/ex6", "18740", part1); test!(actual, file "../../../inputs/2018/15", "201638", part1); } mod part2 { use super::*; test!(example1, file "testdata/day15/ex1", "4988", part2); test!(example3, file "testdata/day15/ex3", "31284", part2); test!(example4, file "testdata/day15/ex4", "3478", part2); test!(example5, file "testdata/day15/ex5", "6474", part2); test!(example6, file "testdata/day15/ex6", "1140", part2); test!(actual, file "../../../inputs/2018/15", "95764", part2); } }
use emission::*; use rendy::{ command::Families, factory::{Config, Factory}, graph::{ present::PresentNode, render::{RenderGroupBuilder, SimpleGraphicsPipeline}, Graph, GraphBuilder, NodeDesc, }, hal, wsi::winit::{ Event, EventsLoop, KeyboardInput, VirtualKeyCode, Window, WindowBuilder, WindowEvent, }, }; // #[cfg(feature = "dx12")] // type Backend = rendy::dx12::Backend; // #[cfg(feature = "metal")] // type Backend = rendy::metal::Backend; // #[cfg(feature = "vulkan")] type Backend = rendy::vulkan::Backend; struct Scene { proj: Mat4, view: Mat4, camera: Vec4, emitters: Vec<Emitter>, } impl QueryProjView for Scene { fn query_proj_view(&self) -> (&Mat4, &Mat4, &Vec4) { (&self.proj, &self.view, &self.camera) } } impl QueryEmitters for Scene { fn query_emitters(&self) -> &Vec<Emitter> { &self.emitters } } fn run( event_loop: &mut EventsLoop, factory: &mut Factory<Backend>, families: &mut Families<Backend>, window: &Window, ) -> Result<(), failure::Error> { let started = std::time::Instant::now(); let mut last_window_size = window.get_inner_size(); let mut need_rebuild = false; let mut frames = 0u64..; let mut elapsed = started.elapsed(); let size = window .get_inner_size() .unwrap() .to_physical(window.get_hidpi_factor()); let aspect = size.width / size.height; let e1 = Emitter { transform: Mat4::identity(), gen: 1, spawn_rate: 50.0, lifetime: 2.0, max_particles: 32, spawn_offset_min: [-0.2, 0.0, 0.0, 0.0].into(), spawn_offset_max: [0.2, 0.0, 0.0, 0.0].into(), accel: [0.0, 0.5, 0.0, 0.0].into(), scale: [1.0, 1.0, 1.0, 0.0].into(), color: [1.0, 0.2, 0.3, 0.8].into(), ..Default::default() }; let e2 = Emitter { transform: Mat4::new_translation(&Vec3::new(0.0, 0.0, 0.0)), spawn_offset_min: [-0.5, 0.0, 4.0, 0.0].into(), spawn_offset_max: [0.5, 0.0, 10.0, 0.0].into(), accel: [0.25, 0.0, 0.0, 0.0].into(), color: [0.2, 0.1, 0.2, 0.5].into(), ..e1 }; let mut proj = nalgebra::Perspective3::new(aspect as f32, std::f32::consts::PI / 4.0, 0.1, 200.0) .to_homogeneous(); // Flip y for Vulkan NDC. proj[(1, 1)] *= -1.0; let camera = Vec4::new(1.0, 1.0, -6.0, 0.0); let view = nalgebra::Isometry3::look_at_rh( &nalgebra::Point3::new(camera.x, camera.y, camera.z), &nalgebra::Point3::new(0.0, 0.0, 0.0), &Vec3::y(), ) .to_homogeneous(); let mut scene = Scene { proj, view, camera, // proj: Mat4::perspective_rh(std::f32::consts::PI / 4.0, aspect as f32, 0.1, 200.0), // view: Mat4::look_at_rh( // Vec3::new(2.0, 2.0, -6.0), // Vec3::zero(), // Vec3::new(0.0, 1.0, 0.0), // ), emitters: vec![e1, e2], }; let mut graph = build_graph(factory, families, window.clone(), &mut scene); for _ in &mut frames { factory.maintain(families); let mut close_requested = false; event_loop.poll_events(|e| match e { Event::WindowEvent { event: WindowEvent::CloseRequested, .. } | Event::WindowEvent { event: WindowEvent::KeyboardInput { input: KeyboardInput { virtual_keycode: Some(VirtualKeyCode::Escape), .. }, .. }, .. } => close_requested = true, _ => (), }); if close_requested { break; } let new_window_size = window.get_inner_size(); if last_window_size != new_window_size { need_rebuild = true; } if need_rebuild && last_window_size == new_window_size { need_rebuild = false; let started = std::time::Instant::now(); graph.dispose(factory, &scene); println!("Graph disposed in: {:?}", started.elapsed()); graph = build_graph(factory, families, window.clone(), &mut scene); } last_window_size = new_window_size; graph.run(factory, families, &scene); elapsed = started.elapsed(); // if elapsed >= std::time::Duration::new(5, 0) { // break; // } } let elapsed_ns = elapsed.as_secs() * 1_000_000_000 + elapsed.subsec_nanos() as u64; log::info!( "Elapsed: {:?}. Frames: {}. FPS: {}", elapsed, frames.start, frames.start * 1_000_000_000 / elapsed_ns ); graph.dispose(factory, &scene); Ok(()) } //#[cfg(any(feature = "dx12", feature = "metal", feature = "vulkan"))] fn main() { env_logger::Builder::from_default_env() .filter_module("emission", log::LevelFilter::Trace) .init(); let config: Config = Default::default(); let (mut factory, mut families): (Factory<Backend>, _) = rendy::factory::init(config).unwrap(); let mut event_loop = EventsLoop::new(); let window = WindowBuilder::new() .with_title("emission example") .build(&event_loop) .unwrap(); //event_loop.poll_events(|_| ()); run(&mut event_loop, &mut factory, &mut families, &window).unwrap(); log::debug!("Done"); log::debug!("Drop families"); drop(families); log::debug!("Drop factory"); drop(factory); } //#[cfg(any(feature = "dx12", feature = "metal", feature = "vulkan"))] fn build_graph<B: hal::Backend>( factory: &mut Factory<B>, families: &mut Families<B>, window: &Window, scene: &mut Scene, ) -> Graph<B, Scene> { let surface = factory.create_surface(window); let mut graph_builder = GraphBuilder::<B, Scene>::new(); let particles = graph_builder.create_buffer(MAX_PARTICLES as u64 * std::mem::size_of::<Particle>() as u64); let indirect = graph_builder.create_buffer( MAX_PARTICLES as u64 * std::mem::size_of::<rendy::command::DrawCommand>() as u64, ); let size = window .get_inner_size() .unwrap() .to_physical(window.get_hidpi_factor()); let window_kind = hal::image::Kind::D2(size.width as u32, size.height as u32, 1, 1); let color = graph_builder.create_image( window_kind, 1, factory.get_surface_format(&surface), Some(hal::command::ClearValue::Color([0.9, 0.9, 0.9, 1.0].into())), ); // let depth = graph_builder.create_image( // window_kind, // 1, // hal::format::Format::D16Unorm, // Some(hal::command::ClearValue::DepthStencil( // hal::command::ClearDepthStencil(1.0, 0), // )), // ); let compute = graph_builder.add_node( ComputeNodeDesc .builder() .with_buffer(particles) .with_buffer(indirect), ); let pass = graph_builder.add_node( RenderNode::builder() .with_buffer(particles) .with_buffer(indirect) .with_dependency(compute) .into_subpass() .with_color(color) //.with_depth_stencil(depth) .into_pass(), ); graph_builder.add_node(PresentNode::builder(&factory, surface, color).with_dependency(pass)); let started = std::time::Instant::now(); let graph = graph_builder.build(factory, families, scene).unwrap(); println!("Graph built in: {:?}", started.elapsed()); graph }
use bincode::serialize; use bincode::Infinite; use crypto_hash::{Algorithm, digest}; use serde_bytes; use std::fmt::Write; use std::time::{SystemTime, UNIX_EPOCH}; #[derive(Serialize)] pub struct Block { index: u32, timestamp: u64, nonce: u64, #[serde(with = "serde_bytes")] pub parent: [u8; 32], #[serde(with = "serde_bytes")] payload: Vec<u8>, } impl Block { pub fn new(payload: Vec<u8>) -> Block { Block { index: 0, timestamp: Block::get_current_time(), nonce: 0, parent: [0; 32], payload: payload, } } fn get_current_time() -> u64 { let current_time = SystemTime::now().duration_since(UNIX_EPOCH).expect( "Time went backwards", ); current_time.as_secs() * 1000 + current_time.subsec_nanos() as u64 / 1_000_000 } pub fn hash(&self) -> [u8; 32] { let encoded = serialize(&self, Infinite).unwrap(); let d = digest(Algorithm::SHA256, &encoded); let mut arr = [0u8; 32]; for i in 0..32 { arr[i] = d[i]; } arr } pub fn proof_of_work(&mut self) { self.nonce = 0; while &self.hash()[..2] != [0; 2] { self.nonce += 1; } println!("Found block at nonce: {}", self.nonce); } pub fn print(&self) { let mut s = String::new(); for byte in self.hash().to_vec() { write!(&mut s, "{:02X} ", byte).expect("Unable to write"); } println!("Hash: {}", s); } }
#[doc = "Reader of register INTR_MASKED"] pub type R = crate::R<u32, super::INTR_MASKED>; #[doc = "Reader of field `SAMPLE`"] pub type SAMPLE_R = crate::R<bool, bool>; #[doc = "Reader of field `INIT`"] pub type INIT_R = crate::R<bool, bool>; #[doc = "Reader of field `ADC_RES`"] pub type ADC_RES_R = crate::R<bool, bool>; impl R { #[doc = "Bit 1 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn sample(&self) -> SAMPLE_R { SAMPLE_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn init(&self) -> INIT_R { INIT_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 8 - Logical and of corresponding request and mask bits."] #[inline(always)] pub fn adc_res(&self) -> ADC_RES_R { ADC_RES_R::new(((self.bits >> 8) & 0x01) != 0) } }
mod permission; mod repository; pub use permission::*; pub use repository::*; use common::event::Event; use common::model::{AggregateRoot, StringId}; use common::result::Result; pub type RoleId = StringId; #[derive(Debug, Clone)] pub struct Role { base: AggregateRoot<RoleId, Event>, name: String, permissions: Vec<Permission>, } impl Role { pub fn new<S: Into<String>>(code: RoleId, name: S) -> Result<Self> { let name = name.into(); Ok(Role { base: AggregateRoot::new(code), name, permissions: Vec::new(), }) } pub fn base(&self) -> &AggregateRoot<RoleId, Event> { &self.base } pub fn is(&self, role_id: &str) -> bool { self.base().id().value() == role_id } pub fn name(&self) -> &str { &self.name } pub fn has_permissions(&self, module: &str, permissions: &str) -> bool { for p in self.permissions.iter() { if p.module() == module && p.contains(permissions) { return true; } } false } pub fn add_permissions(&mut self, permission: Permission) { self.permissions.push(permission); } } #[cfg(test)] mod tests { use common::result::Result; use super::*; #[test] fn create_role() -> Result<()> { let r = Role::new(RoleId::new("admin").unwrap(), "Administrator")?; assert_eq!(r.base(), &AggregateRoot::new(RoleId::new("admin").unwrap())); assert_eq!(r.name(), "Administrator"); assert_eq!(r.base(), &AggregateRoot::new(RoleId::new("admin").unwrap())); Ok(()) } #[test] fn permissions() -> Result<()> { let pmod1 = Permission::new("mod1", "CRUD")?; let pmod2 = Permission::new("mod2", "CRD")?; let pmod3 = Permission::new("mod3", "R")?; let mut r = Role::new(RoleId::new("user").unwrap(), "User")?; r.add_permissions(pmod1); r.add_permissions(pmod2); r.add_permissions(pmod3); assert!(r.has_permissions("mod1", "cD")); assert!(r.has_permissions("mod1", "crud")); assert!(r.has_permissions("mod2", "Cd")); assert!(!r.has_permissions("mod2", "CdU")); assert!(!r.has_permissions("mod3", "C")); assert!(r.has_permissions("mod3", "r")); let pmod4 = Permission::new("mod3", "c")?; r.add_permissions(pmod4); assert!(r.has_permissions("mod3", "C")); Ok(()) } }
// This file ensures the examples from the README compile. // Be sure not to `use` anything outside of the examples, since the examples are in charge of // specifying anything that needs to be imported. struct Err {} impl From<mongodb::error::Error> for Err { fn from(_error: mongodb::error::Error) -> Self { Err {} } } #[allow(dead_code)] type Result<T> = std::result::Result<T, Err>; #[cfg(all(not(feature = "sync"), not(feature = "tokio-sync")))] async fn _connecting() -> Result<()> { use mongodb::{options::ClientOptions, Client}; // Parse a connection string into an options struct. let mut client_options = ClientOptions::parse("mongodb://localhost:27017").await?; // Manually set an option. client_options.app_name = Some("My App".to_string()); // Get a handle to the deployment. let client = Client::with_options(client_options)?; // List the names of the databases in that deployment. for db_name in client.list_database_names(None, None).await? { println!("{}", db_name); } Ok(()) } #[cfg(all(not(feature = "sync"), not(feature = "tokio-sync")))] async fn _getting_handle_to_database(client: mongodb::Client) -> Result<()> { // Get a handle to a database. let db = client.database("mydb"); // List the names of the collections in that database. for collection_name in db.list_collection_names(None).await? { println!("{}", collection_name); } Ok(()) } #[cfg(all(not(feature = "sync"), not(feature = "tokio-sync")))] async fn _inserting_documents_into_a_collection(db: mongodb::Database) -> Result<()> { use mongodb::bson::{doc, Document}; // Get a handle to a collection in the database. let collection = db.collection::<Document>("books"); let docs = vec![ doc! { "title": "1984", "author": "George Orwell" }, doc! { "title": "Animal Farm", "author": "George Orwell" }, doc! { "title": "The Great Gatsby", "author": "F. Scott Fitzgerald" }, ]; // Insert some documents into the "mydb.books" collection. collection.insert_many(docs, None).await?; Ok(()) } use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize)] struct Book { title: String, author: String, } #[cfg(all(not(feature = "sync"), not(feature = "tokio-sync")))] async fn _inserting_documents_into_a_typed_collection(db: mongodb::Database) -> Result<()> { // Get a handle to a collection of `Book`. let typed_collection = db.collection::<Book>("books"); let books = vec![ Book { title: "The Grapes of Wrath".to_string(), author: "John Steinbeck".to_string(), }, Book { title: "To Kill a Mockingbird".to_string(), author: "Harper Lee".to_string(), }, ]; // Insert the books into "mydb.books" collection, no manual conversion to BSON necessary. typed_collection.insert_many(books, None).await?; Ok(()) } #[cfg(all(not(feature = "sync"), not(feature = "tokio-sync")))] async fn _finding_documents_into_a_collection( typed_collection: mongodb::Collection<Book>, ) -> Result<()> { // This trait is required to use `try_next()` on the cursor use futures::stream::TryStreamExt; use mongodb::{bson::doc, options::FindOptions}; // Query the books in the collection with a filter and an option. let filter = doc! { "author": "George Orwell" }; let find_options = FindOptions::builder().sort(doc! { "title": 1 }).build(); let mut cursor = typed_collection.find(filter, find_options).await?; // Iterate over the results of the cursor. while let Some(book) = cursor.try_next().await? { println!("title: {}", book.title); } Ok(()) } #[cfg(any(feature = "sync", feature = "tokio-sync"))] async fn _using_the_sync_api() -> Result<()> { use mongodb::{bson::doc, sync::Client}; let client = Client::with_uri_str("mongodb://localhost:27017")?; let database = client.database("mydb"); let collection = database.collection::<Book>("books"); let docs = vec![ Book { title: "1984".to_string(), author: "George Orwell".to_string(), }, Book { title: "Animal Farm".to_string(), author: "George Orwell".to_string(), }, Book { title: "The Great Gatsby".to_string(), author: "F. Scott Fitzgerald".to_string(), }, ]; // Insert some books into the "mydb.books" collection. collection.insert_many(docs, None)?; let cursor = collection.find(doc! { "author": "George Orwell" }, None)?; for result in cursor { println!("title: {}", result?.title); } Ok(()) } #[cfg(all(not(feature = "sync"), not(feature = "tokio-sync")))] async fn _windows_dns_note() -> Result<()> { use mongodb::{ options::{ClientOptions, ResolverConfig}, Client, }; let options = ClientOptions::parse_with_resolver_config( "mongodb+srv://my.host.com", ResolverConfig::cloudflare(), ) .await?; let client = Client::with_options(options)?; drop(client); Ok(()) }
use {Point, SignedNum}; /// An implementation of [Bresenham's circle algorithm]. /// /// This uses four quadrants, so calling `next()` will return a point for the first quadrant, /// then the second, third, fourth and then back to first. /// /// Example: /// /// ``` /// extern crate line_drawing; /// use line_drawing::BresenhamCircle; /// /// fn main() { /// for (x, y) in BresenhamCircle::new(0, 0, 1) { /// print!("({}, {}), ", x, y); /// } /// } /// ``` /// /// ```text /// (1, 0), (0, 1), (-1, 0), (0, -1), /// ``` /// /// [Bresenham's circle algorithm]: http://members.chello.at/~easyfilter/bresenham.html pub struct BresenhamCircle<T> { x: T, y: T, center_x: T, center_y: T, radius: T, error: T, quadrant: u8, } impl<T: SignedNum> BresenhamCircle<T> { #[inline] pub fn new(center_x: T, center_y: T, radius: T) -> Self { Self { center_x, center_y, radius, x: -radius, y: T::zero(), error: T::cast(2) - T::cast(2) * radius, quadrant: 1, } } } impl<T: SignedNum> Iterator for BresenhamCircle<T> { type Item = Point<T>; #[inline] fn next(&mut self) -> Option<Self::Item> { if self.x < T::zero() { let point = match self.quadrant { 1 => (self.center_x - self.x, self.center_y + self.y), 2 => (self.center_x - self.y, self.center_y - self.x), 3 => (self.center_x + self.x, self.center_y - self.y), 4 => (self.center_x + self.y, self.center_y + self.x), _ => unreachable!(), }; // Update the variables after each set of quadrants if self.quadrant == 4 { self.radius = self.error; if self.radius <= self.y { self.y += T::one(); self.error += self.y * T::cast(2) + T::one(); } if self.radius > self.x || self.error > self.y { self.x += T::one(); self.error += self.x * T::cast(2) + T::one(); } } self.quadrant = self.quadrant % 4 + 1; Some(point) } else { None } } }
extern crate actix_web; extern crate serde_json; extern crate uuid; extern crate ikrelln; mod helpers; use std::collections::HashMap; use std::{thread, time}; use actix_web::*; use ikrelln::api::span::IngestResponse; use ikrelln::engine::test_result::TestResult; use ikrelln::opentracing::span::Kind; use ikrelln::opentracing::Span; #[test] fn should_not_have_test_result_from_span_without_tags() { helpers::setup_logger(); let mut srv = helpers::setup_server(); let trace_id = uuid::Uuid::new_v4().to_string(); let req = srv .client(http::Method::POST, "/api/v1/spans") .json(vec![Span { trace_id: trace_id.to_string(), id: trace_id.clone(), parent_id: None, name: Some(trace_id.clone()), kind: Some(Kind::CLIENT), duration: Some(25), timestamp: Some(50), debug: false, shared: false, local_endpoint: None, remote_endpoint: None, annotations: vec![], tags: HashMap::new(), binary_annotations: vec![], }]) .unwrap(); let response = srv.execute(req.send()).unwrap(); assert!(response.status().is_success()); let data: Result<IngestResponse, _> = serde_json::from_slice(&*srv.execute(response.body()).unwrap()); assert!(data.is_ok()); assert_eq!(data.unwrap().nb_events, 1); thread::sleep(time::Duration::from_millis( helpers::DELAY_RESULT_SAVED_MILLISECONDS, )); let req_tr = srv .client( http::Method::GET, &format!("/api/v1/testresults?traceId={}", &trace_id), ) .finish() .unwrap(); let response_tr = srv.execute(req_tr.send()).unwrap(); assert!(response_tr.status().is_success()); let data_tr: Result<Vec<TestResult>, _> = serde_json::from_slice(&*srv.execute(response_tr.body()).unwrap()); assert!(data_tr.is_ok()); assert_eq!(data_tr.unwrap().len(), 0); thread::sleep(time::Duration::from_millis(helpers::DELAY_FINISH)); }
use std::fmt::{self, Debug, Formatter}; use std::fs::{self, File, OpenOptions}; use std::io::{self, BufReader, BufWriter, Read, Seek, SeekFrom, Write}; use std::mem::size_of; use std::path::Path; use std::process::Command; use std::str::{self, FromStr}; use std::sync::mpsc::{channel, TryRecvError}; use std::thread::{self, JoinHandle}; use std::time::{Duration, Instant}; use bellperson::groth16; use byteorder::{BigEndian, ReadBytesExt}; use clap::{App, AppSettings, Arg, ArgGroup, SubCommand}; use filecoin_proofs::constants::*; use filecoin_proofs::parameters::{ setup_params, window_post_public_params, winning_post_public_params, }; use filecoin_proofs::types::{ PaddedBytesAmount, PoRepConfig, PoRepProofPartitions, PoStConfig, PoStType, SectorSize, }; use filecoin_proofs::with_shape; use groupy::{CurveAffine, EncodedPoint}; use log::{error, info, warn}; use paired::bls12_381::{Bls12, G1Affine, G1Uncompressed, G2Affine, G2Uncompressed}; use phase2::small::{read_small_params_from_large_file, MPCSmall, Streamer}; use phase2::MPCParameters; use rand::rngs::OsRng; use rand::{RngCore, SeedableRng}; use rand_chacha::ChaChaRng; use simplelog::{self, CombinedLogger, LevelFilter, TermLogger, TerminalMode, WriteLogger}; use storage_proofs::compound_proof::{self, CompoundProof}; use storage_proofs::hasher::Sha256Hasher; use storage_proofs::merkle::MerkleTreeTrait; use storage_proofs::parameter_cache::{ self, metadata_id, parameter_id, verifying_key_id, CacheableParameters, }; use storage_proofs::porep::stacked::{ PublicParams as PoRepPublicParams, StackedCircuit, StackedCompound, StackedDrg, }; use storage_proofs::post::fallback::{ FallbackPoSt, FallbackPoStCircuit, FallbackPoStCompound, PublicParams as PoStPublicParams, }; const CHUNK_SIZE: usize = 10_000; // Non-raw sizes. const G1_SIZE: u64 = size_of::<G1Uncompressed>() as u64; // 96 const G2_SIZE: u64 = size_of::<G2Uncompressed>() as u64; // 192 const PUBKEY_SIZE: u64 = 3 * G1_SIZE + G2_SIZE + 64; // 544 const VEC_LEN_SIZE: u64 = size_of::<u32>() as u64; // 4 fn get_head_commit() -> String { let output = Command::new("git") .args(&["rev-parse", "--short=7", "HEAD"]) .output() .expect("failed to execute child process: `git rev-parse --short=7 HEAD`"); str::from_utf8(&output.stdout) .expect("`git` child process outputed invalid Utf8 bytes") .trim() .to_lowercase() } #[derive(Clone, Copy, Debug, PartialEq)] enum Proof { Sdr, Winning, Window, } impl Proof { fn pretty_print(&self) -> &str { match self { Proof::Sdr => "SDR", Proof::Winning => "Winning", Proof::Window => "Window", } } fn lowercase(&self) -> &str { match self { Proof::Sdr => "sdr", Proof::Winning => "winning", Proof::Window => "window", } } } #[derive(Clone, Copy, Debug, PartialEq)] enum Hasher { Poseidon, } impl Hasher { // Used for printing during logging. Implementing Debug and Display is less clear than having // methods `.pretty_print()` and `.lowercase()` which differentiate between printing for logging // v.s. printing for filenames. fn pretty_print(&self) -> &str { match self { Hasher::Poseidon => "Poseidon", } } // Used for constructing param filenames. fn lowercase(&self) -> &str { match self { Hasher::Poseidon => "poseidon", } } } #[derive(Clone, Copy, Debug, PartialEq)] #[allow(clippy::enum_variant_names)] enum Sector { SectorSize2KiB, SectorSize4KiB, SectorSize16KiB, SectorSize32KiB, SectorSize8MiB, SectorSize16MiB, SectorSize512MiB, SectorSize1GiB, SectorSize16GiB, SectorSize32GiB, SectorSize64GiB, } impl Sector { fn as_u64(self) -> u64 { match self { Sector::SectorSize2KiB => SECTOR_SIZE_2_KIB, Sector::SectorSize4KiB => SECTOR_SIZE_4_KIB, Sector::SectorSize16KiB => SECTOR_SIZE_16_KIB, Sector::SectorSize32KiB => SECTOR_SIZE_32_KIB, Sector::SectorSize8MiB => SECTOR_SIZE_8_MIB, Sector::SectorSize16MiB => SECTOR_SIZE_16_MIB, Sector::SectorSize512MiB => SECTOR_SIZE_512_MIB, Sector::SectorSize1GiB => SECTOR_SIZE_1_GIB, Sector::SectorSize16GiB => SECTOR_SIZE_16_GIB, Sector::SectorSize32GiB => SECTOR_SIZE_32_GIB, Sector::SectorSize64GiB => SECTOR_SIZE_64_GIB, } } fn lowercase(&self) -> &str { match self { Sector::SectorSize2KiB => "2kib", Sector::SectorSize4KiB => "4kib", Sector::SectorSize16KiB => "16kib", Sector::SectorSize32KiB => "32kib", Sector::SectorSize8MiB => "8mib", Sector::SectorSize16MiB => "16mib", Sector::SectorSize512MiB => "512mib", Sector::SectorSize1GiB => "1gib", Sector::SectorSize16GiB => "16gib", Sector::SectorSize32GiB => "32gib", Sector::SectorSize64GiB => "64gib", } } fn pretty_print(&self) -> &str { match self { Sector::SectorSize2KiB => "2KiB", Sector::SectorSize4KiB => "4KiB", Sector::SectorSize16KiB => "16KiB", Sector::SectorSize32KiB => "32KiB", Sector::SectorSize8MiB => "8MiB", Sector::SectorSize16MiB => "16MiB", Sector::SectorSize512MiB => "512MiB", Sector::SectorSize1GiB => "1GiB", Sector::SectorSize16GiB => "16GiB", Sector::SectorSize32GiB => "32GiB", Sector::SectorSize64GiB => "64GiB", } } } #[derive(Clone, Copy, Debug, PartialEq)] enum ParamSize { Large, Small, } impl ParamSize { fn pretty_print(&self) -> &str { match self { ParamSize::Large => "Large", ParamSize::Small => "Small", } } fn lowercase(&self) -> &str { match self { ParamSize::Large => "large", ParamSize::Small => "small", } } fn is_small(self) -> bool { self == ParamSize::Small } fn is_large(self) -> bool { self == ParamSize::Large } } fn params_filename( proof: Proof, hasher: Hasher, sector_size: Sector, head: &str, param_number: usize, param_size: ParamSize, raw: bool, ) -> String { format!( "{proof}_{hasher}_{sector}_{head}_{number}_{size}{maybe_fmt}", proof = proof.lowercase(), hasher = hasher.lowercase(), sector = sector_size.lowercase(), head = head, number = param_number, size = param_size.lowercase(), maybe_fmt = if raw { "_raw" } else { "" }, ) } // Parses a phase2 parameters filename into the tuple: // (proof, hasher, sector-size, head, param-number, param-size, is-raw). fn parse_params_filename(path: &str) -> (Proof, Hasher, Sector, String, usize, ParamSize, bool) { // Remove directories from the path. let filename = path .rsplitn(2, '/') .next() .expect("parse_params_filename rsplitn failed"); let split: Vec<&str> = filename.split('_').collect(); let proof = match split[0] { "sdr" => Proof::Sdr, "winning" => Proof::Winning, "window" => Proof::Window, other => panic!("invalid proof name in params filename: {}", other), }; let hasher = match split[1] { "poseidon" => Hasher::Poseidon, other => panic!("invalid hasher name in params filename: {}", other), }; let sector_size = match split[2] { "2kib" => Sector::SectorSize2KiB, "4kib" => Sector::SectorSize4KiB, "16kib" => Sector::SectorSize16KiB, "32kib" => Sector::SectorSize32KiB, "8mib" => Sector::SectorSize8MiB, "16mib" => Sector::SectorSize16MiB, "512mib" => Sector::SectorSize512MiB, "1gib" => Sector::SectorSize1GiB, "16gib" => Sector::SectorSize16GiB, "32gib" => Sector::SectorSize32GiB, "64gib" => Sector::SectorSize64GiB, other => panic!("invalid sector-size in params filename: {}", other), }; let head = split[3].to_string(); let param_number = usize::from_str(split[4]) .unwrap_or_else(|_| panic!("invalid param number in params filename: {}", split[4])); let param_size = match split[5] { "large" => ParamSize::Large, "small" => ParamSize::Small, other => panic!("invalid param-size in params filename: {}", other), }; let raw_fmt = split.get(6) == Some(&"raw"); if param_size.is_large() && raw_fmt { unimplemented!("large-raw params are not currently supported: {}", path); } ( proof, hasher, sector_size, head, param_number, param_size, raw_fmt, ) } fn blank_sdr_poseidon_params<Tree: MerkleTreeTrait>(sector_size: u64) -> PoRepPublicParams<Tree> { let n_partitions = *POREP_PARTITIONS .read() .expect("porep partition read error") .get(&sector_size) .expect("porep partition get error"); let porep_config = PoRepConfig { sector_size: SectorSize(sector_size), partitions: PoRepProofPartitions(n_partitions), porep_id: [0; 32], }; let setup_params = compound_proof::SetupParams { vanilla_params: setup_params( PaddedBytesAmount::from(porep_config), usize::from(PoRepProofPartitions::from(porep_config)), porep_config.porep_id, ) .expect("failed to setup params"), partitions: Some(usize::from(PoRepProofPartitions::from(porep_config))), priority: false, }; let public_params = <StackedCompound<Tree, Sha256Hasher> as CompoundProof< StackedDrg<Tree, Sha256Hasher>, _, >>::setup(&setup_params) .expect("public param setup failed"); public_params.vanilla_params } fn blank_winning_post_poseidon_params<Tree: 'static + MerkleTreeTrait>( sector_size: u64, ) -> PoStPublicParams { let post_config = PoStConfig { sector_size: SectorSize(sector_size), challenge_count: WINNING_POST_CHALLENGE_COUNT, sector_count: WINNING_POST_SECTOR_COUNT, typ: PoStType::Winning, priority: false, }; winning_post_public_params::<Tree>(&post_config).expect("winning post public params failed") } fn blank_window_post_poseidon_params<Tree: 'static + MerkleTreeTrait>( sector_size: u64, ) -> PoStPublicParams { let post_config = PoStConfig { sector_size: SectorSize(sector_size), challenge_count: WINDOW_POST_CHALLENGE_COUNT, sector_count: *WINDOW_POST_SECTOR_COUNT .read() .expect("post config sector count read failure") .get(&sector_size) .expect("post config sector count get failure"), typ: PoStType::Window, priority: false, }; window_post_public_params::<Tree>(&post_config).expect("window post public params failed") } /// Creates the first phase2 parameters for a circuit and writes them to a file. fn create_initial_params<Tree: 'static + MerkleTreeTrait>( proof: Proof, hasher: Hasher, sector_size: Sector, ) { let head = get_head_commit(); info!( "creating initial params for circuit: {}-{}-{}-{}", proof.pretty_print(), hasher.pretty_print(), sector_size.pretty_print(), head, ); let start_total = Instant::now(); let dt_create_circuit: u64; let dt_create_params: u64; let params = match (proof, hasher) { (Proof::Sdr, Hasher::Poseidon) => { let start = Instant::now(); let public_params = blank_sdr_poseidon_params(sector_size.as_u64()); let circuit = <StackedCompound<Tree, Sha256Hasher> as CompoundProof< StackedDrg<Tree, Sha256Hasher>, _, >>::blank_circuit(&public_params); dt_create_circuit = start.elapsed().as_secs(); let start = Instant::now(); let params = MPCParameters::new(circuit).expect("mpc params new failure"); dt_create_params = start.elapsed().as_secs(); params } (Proof::Winning, Hasher::Poseidon) => { let start = Instant::now(); let public_params = blank_winning_post_poseidon_params::<Tree>(sector_size.as_u64()); let circuit = <FallbackPoStCompound<Tree> as CompoundProof< FallbackPoSt<Tree>, FallbackPoStCircuit<Tree>, >>::blank_circuit(&public_params); dt_create_circuit = start.elapsed().as_secs(); let start = Instant::now(); let params = MPCParameters::new(circuit).expect("mpc params new failure"); dt_create_params = start.elapsed().as_secs(); params } (Proof::Window, Hasher::Poseidon) => { let start = Instant::now(); let public_params = blank_window_post_poseidon_params::<Tree>(sector_size.as_u64()); let circuit = <FallbackPoStCompound<Tree> as CompoundProof< FallbackPoSt<Tree>, FallbackPoStCircuit<Tree>, >>::blank_circuit(&public_params); dt_create_circuit = start.elapsed().as_secs(); let start = Instant::now(); let params = MPCParameters::new(circuit).expect("mpc params new failure"); dt_create_params = start.elapsed().as_secs(); params } }; info!( "successfully created initial params for circuit, dt_create_circuit={}s, dt_create_params={}s", dt_create_circuit, dt_create_params ); let large_path = params_filename( proof, hasher, sector_size, &head, 0, ParamSize::Large, false, ); { info!("writing large initial params to file: {}", large_path); let file = File::create(&large_path).expect("param file create failure"); let mut writer = BufWriter::with_capacity(1024 * 1024, file); params.write(&mut writer).expect("param file write failure"); info!("finished writing large params to file"); } // TODO: add conversion from large to small params to phase2 crate, then write initial params as // small-raw. /* let small_path = params_filename(proof, hasher, sector_size, &head, 0, ParamSize::Small, true); { info!("writing small initial params to file: {}", small_path); let file = File::create(&small_path).unwrap(); let mut writer = BufWriter::with_capacity(1024 * 1024, file); params.write_small(&mut writer).unwrap(); info!("finished writing small params to file"); } */ info!( "successfully created and wrote initial params for circuit: {}-{}-{}-{}, dt_total={}s", proof.pretty_print(), hasher.pretty_print(), sector_size.pretty_print(), head, start_total.elapsed().as_secs() ); } fn hex_string(contrib: &[u8]) -> String { hex::encode(contrib) } fn get_mixed_entropy() -> [u8; 32] { use dialoguer::theme::ColorfulTheme; use dialoguer::Password; let mut os_entropy = [0u8; 32]; OsRng.fill_bytes(&mut os_entropy); let user_input = Password::with_theme(&ColorfulTheme::default()) .with_prompt("Please randomly press your keyboard (press Return/Enter when finished)") .interact() .expect("entropy read failure"); let mut blake2b = blake2b_simd::Params::default(); blake2b.hash_length(32); let digest = blake2b.hash(user_input.as_bytes()); let user_entropy = digest.as_bytes(); let mut seed = [0u8; 32]; for i in 0..32 { seed[i] = os_entropy[i] ^ user_entropy[i]; } seed } /// Contributes entropy to the current phase2 parameters for a circuit, then writes the new params /// to a small-raw file. fn contribute_to_params(path_before: &str, seed: Option<[u8; 32]>) { let (proof, hasher, sector_size, head, prev_param_number, param_size, read_raw) = parse_params_filename(path_before); let param_number = prev_param_number + 1; info!( "contributing to params for circuit: {}-{}-{}-{}-{} {}->{}", proof.pretty_print(), hasher.pretty_print(), sector_size.pretty_print(), head, param_size.pretty_print(), prev_param_number, param_number ); // Get OS entropy prior to deserializing the previous params. let seed = if let Some(seed) = seed { warn!("using `seed` argument as entropy: {}", hex_string(&seed)); seed } else { info!("using mixed entropy"); get_mixed_entropy() }; let mut rng = ChaChaRng::from_seed(seed); // Write small-raw contributions. let path_after = params_filename( proof, hasher, sector_size, &head, param_number, ParamSize::Small, true, ); let start_total = Instant::now(); info!("making contribution"); let start_contrib = Instant::now(); let mut streamer = if param_size.is_large() { Streamer::new_from_large_file(path_before, read_raw, true).unwrap_or_else(|e| { panic!( "failed to make streamer from large `{}`: {}", path_before, e ); }) } else { Streamer::new(path_before, read_raw, true).unwrap_or_else(|e| { panic!( "failed to make streamer from small `{}`: {}", path_before, e ); }) }; let file_after = File::create(&path_after).unwrap_or_else(|e| { panic!( "failed to create 'after' params file `{}`: {}", path_after, e ); }); info!("streaming 'after' params to file: {}", path_after); let contrib = streamer .contribute(&mut rng, file_after, CHUNK_SIZE) .unwrap_or_else(|e| panic!("failed to make streaming contribution: {}", e)); let contrib_str = hex_string(&contrib); info!( "successfully made contribution: {}, dt_contribute={}s", contrib_str, start_contrib.elapsed().as_secs() ); let contrib_path = format!("{}.contrib", path_after); info!("writing contribution hash to file: {}", contrib_path); fs::write(&contrib_path, contrib_str).unwrap_or_else(|e| { panic!( "failed to write contribution to file `{}`: {}", contrib_path, e ); }); info!( "successfully made contribution, dt_total={}s", start_total.elapsed().as_secs() ); } fn convert_small(path_before: &str) { let (proof, hasher, sector_size, head, param_number, param_size, read_raw) = parse_params_filename(path_before); // TODO: change this if we update the large MPC params (and G2Affine) to support the raw serialization format. assert!( param_size.is_small(), "converting large params to raw format is not currently supported" ); let write_raw = !read_raw; info!( "converting params {to_from} raw format for circuit: {proof}-{hasher}-{sector_size}-{head}-{num} {param_size}", to_from = if write_raw { "to" } else { "from" }, proof = proof.pretty_print(), hasher = hasher.pretty_print(), sector_size = sector_size.pretty_print(), head = head, num = param_number, param_size = param_size.pretty_print(), ); // Default to small params for first participant. let path_after = params_filename( proof, hasher, sector_size, &head, param_number, ParamSize::Small, write_raw, ); let start_total = Instant::now(); info!("converting"); info!( "making streamer from small {} params: {}", if read_raw { "raw" } else { "non-raw" }, path_before ); let mut streamer = if param_size.is_large() { panic!("cannot convert large param format"); } else { Streamer::new(path_before, read_raw, write_raw).unwrap_or_else(|e| { panic!( "failed to make streamer from small `{}`: {}", path_before, e ); }) }; info!( "streamer is writing {} formatted params to file: {}", if write_raw { "raw" } else { "non-raw" }, path_after ); let file_after = File::create(&path_after).unwrap_or_else(|e| { panic!( "failed to create 'after' params file `{}`: {}", path_after, e ); }); streamer .process(file_after, CHUNK_SIZE) .expect("failed to convert"); info!( "successfully converted, dt_total={}s", start_total.elapsed().as_secs() ); } /// If `raw_subgroup_checks` is true, then `verify_contribution` ensures that the G1 points of the 'after' contribution /// are in the correct subgroup. This is expensive, so the 'before' contribution is not checked. This assumes that all /// 'after' contributions will be separately verified, and ensures that the subgroup check will happen once (but no /// more). This means the very first 'before' params will not have the subgroup check. However, the verifier will have /// constructed these deterministically such that they are known to be in the subgroup. fn verify_contribution( path_before: &str, path_after: &str, participant_contrib: [u8; 64], raw_subgroup_checks: bool, ) { #[allow(clippy::large_enum_variant)] enum Message { Done(MPCSmall), Error(io::Error), } let start_total = Instant::now(); info!( "verifying contribution:\n before: {}\n after: {}\n contrib: {}", path_before, path_after, hex_string(&participant_contrib) ); let (before_tx, before_rx) = channel::<Message>(); let (after_tx, after_rx) = channel::<Message>(); let path_before = path_before.to_string(); let path_after = path_after.to_string(); let before_thread: JoinHandle<()> = thread::spawn(move || { let start_read = Instant::now(); let is_large = path_before.contains("large"); let is_raw = path_before.ends_with("raw"); if !is_raw { warn!("using non-raw 'before' params"); } if is_raw { warn!("skipping subgroup checks when deserializing small-raw 'before' params"); } let read_res: io::Result<MPCSmall> = if is_large { info!( "reading large 'before' params as `MPCSmall`: {}", path_before ); read_small_params_from_large_file(&path_before) } else { info!( "reading small 'before' params as `MPCSmall`: {}", path_before ); File::open(&path_before).and_then(|file| { let mut reader = BufReader::with_capacity(1024 * 1024, file); MPCSmall::read(&mut reader, is_raw, false) }) }; match read_res { Ok(params) => { let dt_read = start_read.elapsed().as_secs(); info!("successfully read 'before' params, dt_read={}s", dt_read); before_tx.send(Message::Done(params)).expect("send failure"); } Err(e) => { error!("failed to read 'before' params: {}", e); before_tx.send(Message::Error(e)).expect("send failure"); } }; }); let after_thread: JoinHandle<()> = thread::spawn(move || { let start_read = Instant::now(); let is_large = path_after.contains("large"); let is_raw = path_after.ends_with("raw"); if !is_raw { warn!("using non-raw 'after' params"); } if is_raw && !raw_subgroup_checks { warn!("skipping subgroup checks when deserializing small-raw 'after' params") } let read_res: io::Result<MPCSmall> = if is_large { info!("reading large 'after' params as `MPCSmall`: {}", path_after); read_small_params_from_large_file(&path_after) } else { info!("reading small 'after' params as `MPCSmall`: {}", path_after); File::open(&path_after).and_then(|file| { let mut reader = BufReader::with_capacity(1024 * 1024, file); MPCSmall::read(&mut reader, is_raw, raw_subgroup_checks) }) }; match read_res { Ok(params) => { let dt_read = start_read.elapsed().as_secs(); info!("successfully read 'after' params, dt_read={}s", dt_read); after_tx.send(Message::Done(params)).expect("send failure"); } Err(e) => { error!("failed to read 'after' params: {}", e); after_tx.send(Message::Error(e)).expect("send failure"); } }; }); let mut before_params: Option<MPCSmall> = None; let mut after_params: Option<MPCSmall> = None; loop { if before_params.is_none() { match before_rx.try_recv() { Ok(Message::Done(params)) => { before_params = Some(params); info!("received 'before' params from thread"); } Ok(Message::Error(e)) => panic!("'before' thread panic-ed: {}", e), Err(TryRecvError::Disconnected) => panic!("'before' thread disconnected"), Err(TryRecvError::Empty) => {} }; } if after_params.is_none() { match after_rx.try_recv() { Ok(Message::Done(params)) => { after_params = Some(params); info!("received 'after' params from thread"); } Ok(Message::Error(e)) => panic!("'after' thread panic-ed: {}", e), Err(TryRecvError::Disconnected) => panic!("'after' thread disconnected"), Err(TryRecvError::Empty) => {} }; } if before_params.is_some() && after_params.is_some() { break; } thread::sleep(Duration::from_secs(3)); } before_thread.join().expect("thread join failure"); after_thread.join().expect("thread join failure"); info!("verifying contribution"); let start_verification = Instant::now(); let calculated_contrib = phase2::small::verify_contribution_small( &before_params.expect("before params failure"), &after_params.expect("after params failure"), ) .expect("failed to calculate expected contribution"); assert_eq!( &participant_contrib[..], &calculated_contrib[..], "provided contribution hash does not match expected contribution hash \ \n\tprovided: {}\n\texpected: {}", hex_string(&participant_contrib), hex_string(&calculated_contrib) ); info!( "successfully verified contribution, dt_verify={}s, dt_total={}s", start_verification.elapsed().as_secs(), start_total.elapsed().as_secs() ); } // Non-raw only. pub fn read_g1<R: Read>(mut reader: R) -> io::Result<G1Affine> { let mut affine_bytes = G1Uncompressed::empty(); reader.read_exact(affine_bytes.as_mut())?; let affine = affine_bytes .into_affine() .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; if affine.is_zero() { Err(io::Error::new( io::ErrorKind::InvalidData, "deserialized G1Affine is point at infinity", )) } else { Ok(affine) } } // Non-raw only. pub fn read_g2<R: Read>(mut reader: R) -> io::Result<G2Affine> { let mut affine_bytes = G2Uncompressed::empty(); reader.read_exact(affine_bytes.as_mut())?; let affine = affine_bytes .into_affine() .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; if affine.is_zero() { Err(io::Error::new( io::ErrorKind::InvalidData, "deserialized G2Affine is point at infinity", )) } else { Ok(affine) } } fn seek(file: &mut File, offset: u64) -> io::Result<()> { let pos = file.seek(SeekFrom::Start(offset))?; if pos != offset { Err(io::Error::new( io::ErrorKind::UnexpectedEof, format!("seek stopped early, reached: {}, expected: {}", pos, offset), )) } else { Ok(()) } } struct FileInfo { delta_g1_offset: u64, delta_g1: G1Affine, delta_g2: G2Affine, h_len_offset: u64, h_len: u64, h_first: G1Affine, h_last: G1Affine, l_len: u64, l_first: G1Affine, l_last: G1Affine, cs_hash: [u8; 64], contributions_len_offset: u64, contributions_len: u64, } impl Debug for FileInfo { fn fmt(&self, f: &mut Formatter) -> fmt::Result { f.debug_struct("FileInfo") .field("delta_g1_offset", &self.delta_g1_offset) .field("delta_g1", &self.delta_g1) .field("delta_g2", &self.delta_g2) .field("h_len_offset", &self.h_len_offset) .field("h_len", &self.h_len) .field("h_first", &self.h_first) .field("h_last", &self.h_last) .field("l_len", &self.l_len) .field("l_first", &self.l_first) .field("l_last", &self.l_last) .field("cs_hash", &hex_string(&self.cs_hash)) .field("contributions_len_offset", &self.contributions_len_offset) .field("contributions_len", &self.contributions_len) .finish() } } impl FileInfo { fn parse_small(path: &str) -> Self { let mut file = File::open(path).expect("failed to open file"); let delta_g1 = read_g1(&mut file).expect("failed to read delta_g1"); let delta_g2 = read_g2(&mut file).expect("failed to read delta_g2"); let h_len_offset = G1_SIZE + G2_SIZE; let h_len = file.read_u32::<BigEndian>().expect("failed to read h_len") as u64; let h_first = read_g1(&mut file).expect("failed to read first h element"); let h_last_offset = h_len_offset + VEC_LEN_SIZE + (h_len - 1) * G1_SIZE; seek(&mut file, h_last_offset).expect("failed to seek to last h element"); let h_last = read_g1(&mut file).expect("failed to read last h element"); let l_len_offset = h_last_offset + G1_SIZE; let l_len = file.read_u32::<BigEndian>().expect("failed to read l_len") as u64; let l_first = read_g1(&mut file).expect("failed to read first l element"); let l_last_offset = l_len_offset + VEC_LEN_SIZE + (l_len - 1) * G1_SIZE; seek(&mut file, l_last_offset).expect("failed to seek to last l element"); let l_last = read_g1(&mut file).expect("failed to read last l element"); let mut cs_hash = [0u8; 64]; let cs_hash_offset = l_last_offset + G1_SIZE; seek(&mut file, cs_hash_offset).expect("failed to seek to cs_hash"); file.read_exact(&mut cs_hash) .expect("failed to read cs_hash"); let contributions_len_offset = cs_hash_offset + 64; let contributions_len = file .read_u32::<BigEndian>() .expect("failed to read contributions_len") as u64; FileInfo { delta_g1_offset: 0, delta_g1, delta_g2, h_len_offset, h_len, h_first, h_last, l_len, l_first, l_last, cs_hash, contributions_len_offset, contributions_len, } } fn parse_large(path: &str) -> Self { let mut file = File::open(path).expect("failed to open file"); let delta_g1_offset = 2 * G1_SIZE + 2 * G2_SIZE; seek(&mut file, delta_g1_offset).expect("failed to seek to delta_g1"); let delta_g1 = read_g1(&mut file).expect("failed to read delta_g1"); let delta_g2 = read_g2(&mut file).expect("failed to read delta_g2"); let ic_len_offset = delta_g1_offset + G1_SIZE + G2_SIZE; let ic_len = file.read_u32::<BigEndian>().expect("failed to read ic_len") as u64; let h_len_offset = ic_len_offset + VEC_LEN_SIZE + ic_len * G1_SIZE; seek(&mut file, h_len_offset).expect("failed to seek to h_len"); let h_len = file.read_u32::<BigEndian>().expect("failed to read h_len") as u64; let h_first = read_g1(&mut file).expect("failed to read first h element"); let h_last_offset = h_len_offset + VEC_LEN_SIZE + (h_len - 1) * G1_SIZE; seek(&mut file, h_last_offset).expect("failed to seek to last h element"); let h_last = read_g1(&mut file).expect("failed to read last h element"); let l_len_offset = h_last_offset + G1_SIZE; let l_len = file.read_u32::<BigEndian>().expect("failed to read l_len") as u64; let l_first = read_g1(&mut file).expect("failed to read first l element"); let l_last_offset = l_len_offset + VEC_LEN_SIZE + (l_len - 1) * G1_SIZE; seek(&mut file, l_last_offset).expect("failed to seek to last l element"); let l_last = read_g1(&mut file).expect("failed to read last l element"); let a_len_offset = l_last_offset + G1_SIZE; seek(&mut file, a_len_offset).expect("failed to seek to a_len"); let a_len = file.read_u32::<BigEndian>().expect("failed to read a_len") as u64; let b_g1_len_offset = a_len_offset + VEC_LEN_SIZE + a_len * G1_SIZE; seek(&mut file, b_g1_len_offset).expect("failed to seek to b_g1_len"); let b_g1_len = file .read_u32::<BigEndian>() .expect("failed to read b_g1_len") as u64; let b_g2_len_offset = b_g1_len_offset + VEC_LEN_SIZE + b_g1_len * G1_SIZE; seek(&mut file, b_g2_len_offset).expect("failed to seek to b_g2_len"); let b_g2_len = file .read_u32::<BigEndian>() .expect("failed to read b_g2_len") as u64; let mut cs_hash = [0u8; 64]; let cs_hash_offset = b_g2_len_offset + VEC_LEN_SIZE + b_g2_len * G2_SIZE; seek(&mut file, cs_hash_offset).expect("failed to seek to cs_hash"); file.read_exact(&mut cs_hash) .expect("failed to read cs_hash"); let contributions_len_offset = cs_hash_offset + 64; let contributions_len = file .read_u32::<BigEndian>() .expect("failed to read contributions_len") as u64; FileInfo { delta_g1_offset, delta_g1, delta_g2, h_len_offset, h_len, h_first, h_last, l_len, l_first, l_last, cs_hash, contributions_len_offset, contributions_len, } } } // Writes info logs to stdout, error logs to stderr, and all logs to the file `log_filename` in // `rust-fil-proofs`'s top-level directory. fn setup_logger(log_filename: &str) { let log_file = File::create(&log_filename) .unwrap_or_else(|e| panic!("failed to create log file `{}`: {}", log_filename, e)); let term_logger = TermLogger::new( LevelFilter::Info, simplelog::Config::default(), TerminalMode::Mixed, ); let file_logger = WriteLogger::new(LevelFilter::Info, simplelog::Config::default(), log_file); CombinedLogger::init(vec![term_logger, file_logger]).unwrap_or_else(|e| { panic!("failed to create `CombinedLogger`: {}", e); }); } fn parameter_identifier<Tree: 'static + MerkleTreeTrait>(sector_size: u64, proof: Proof) -> String { match proof { Proof::Sdr => { let public_params = blank_sdr_poseidon_params::<Tree>(sector_size); <StackedCompound<Tree, Sha256Hasher> as CacheableParameters< StackedCircuit<Tree, Sha256Hasher>, _, >>::cache_identifier(&public_params) } Proof::Winning => { let public_params = blank_winning_post_poseidon_params::<Tree>(sector_size); <FallbackPoStCompound<Tree> as CacheableParameters< FallbackPoStCircuit<Tree>, _, >>::cache_identifier(&public_params) } Proof::Window => { let public_params = blank_window_post_poseidon_params::<Tree>(sector_size); <FallbackPoStCompound<Tree> as CacheableParameters< FallbackPoStCircuit<Tree>, _, >>::cache_identifier(&public_params) } } } #[allow(clippy::cognitive_complexity)] fn main() { let new_command = SubCommand::with_name("new") .about("Create initial phase2 parameters for circuit") .arg( Arg::with_name("sdr") .long("sdr") .help("Generate SDR PoRep parameters"), ) .arg( Arg::with_name("winning") .long("winning") .help("Generate Winning PoSt parameters"), ) .arg( Arg::with_name("window") .long("window") .help("Generate Window PoSt parameters"), ) .group( ArgGroup::with_name("proof") .args(&["sdr", "winning", "window"]) .required(true) .multiple(false), ) .arg( Arg::with_name("2kib") .long("2kib") .help("Create circuit with 2KiB sector-size"), ) .arg( Arg::with_name("4kib") .long("4kib") .help("Create circuit with 4KiB sector-size"), ) .arg( Arg::with_name("16kib") .long("16kib") .help("Create circuit with 16KiB sector-size"), ) .arg( Arg::with_name("32kib") .long("32kib") .help("Create circuit with 32KiB sector-size"), ) .arg( Arg::with_name("8mib") .long("8mib") .help("Create circuit with 8MiB sector-size"), ) .arg( Arg::with_name("16mib") .long("16mib") .help("Create circuit with 16MiB sector-size"), ) .arg( Arg::with_name("512mib") .long("512mib") .help("Create circuit with 512MiB sector-size"), ) .arg( Arg::with_name("1gib") .long("1gib") .help("Create circuit with 1GiB sector-size"), ) .arg( Arg::with_name("16gib") .long("16gib") .help("Create circuit with 16GiB sector-size"), ) .arg( Arg::with_name("32gib") .long("32gib") .help("Create circuit with 32GiB sector-size"), ) .arg( Arg::with_name("64gib") .long("64gib") .help("Create circuit with 64GiB sector-size"), ) .group( ArgGroup::with_name("sector-size") .args(&[ "2kib", "4kib", "16kib", "32kib", "8mib", "16mib", "512mib", "1gib","16gib", "32gib", "64gib", ]) .required(true) .multiple(false), ); let contribute_command = SubCommand::with_name("contribute") .about("Contribute to parameters") .arg( Arg::with_name("path-before") .required(true) .help("The path to the previous participant's params file"), ) .arg( Arg::with_name("seed") .long("seed") .takes_value(true) .help("Sets the contribution entropy (32 hex bytes)"), ); let verify_command = SubCommand::with_name("verify") .about("Verifies that a contribution transitions one set of params to another") .arg( Arg::with_name("path-after") .required(true) .help("The path to the params file containing the contribution to be verified"), ) .arg( Arg::with_name("skip-raw-subgroup-checks") .long("skip-raw-subgroup-checks") .short("s") .help("Skip the slow subgroup check when deserializing each raw G1 point"), ); let small_command = SubCommand::with_name("small") .about("Copies a large params file into the small file format") .arg( Arg::with_name("large-path") .required(true) .help("The path to the large params file"), ); let convert_command = SubCommand::with_name("convert") .about("Converts a small params file to and from raw format") .arg( Arg::with_name("path-before") .required(true) .help("The path to the small params file to convert."), ); let merge_command = SubCommand::with_name("merge") .about("Merges small-nonraw and large params into a new large file") .arg( Arg::with_name("path-small") .required(true) .help("Path to the small params file."), ) .arg( Arg::with_name("path-large") .required(true) .help("Path to the large params file."), ); let split_keys_command = SubCommand::with_name("split-keys") .about("Splits the keys from the trusted setup into parameter files") .arg( Arg::with_name("input-path") .required(true) .help("The path to the file that contains all the data."), ); let parse_command = SubCommand::with_name("parse") .about("Parses file info from large or small-nonraw params") .arg( Arg::with_name("path") .required(true) .help("Path to params file."), ); let verify_g1_command = SubCommand::with_name("verify-g1") .about("Verifies that all points in small-raw params are valid G1") .arg( Arg::with_name("path") .required(true) .help("Path to small-raw params file."), ); let matches = App::new("phase2") .version("1.0") .setting(AppSettings::ArgRequiredElseHelp) .setting(AppSettings::SubcommandRequired) .subcommand(new_command) .subcommand(contribute_command) .subcommand(verify_command) .subcommand(small_command) .subcommand(convert_command) .subcommand(merge_command) .subcommand(split_keys_command) .subcommand(parse_command) .subcommand(verify_g1_command) .get_matches(); if let (subcommand, Some(matches)) = matches.subcommand() { match subcommand { "new" => { let proof = if matches.is_present("sdr") { Proof::Sdr } else if matches.is_present("winning") { Proof::Winning } else { Proof::Window }; // Default to using Poseidon for the hasher. let hasher = Hasher::Poseidon; let sector_size = if matches.is_present("2kib") { Sector::SectorSize2KiB } else if matches.is_present("4kib") { Sector::SectorSize4KiB } else if matches.is_present("16kib") { Sector::SectorSize16KiB } else if matches.is_present("32kib") { Sector::SectorSize32KiB } else if matches.is_present("8mib") { Sector::SectorSize8MiB } else if matches.is_present("16mib") { Sector::SectorSize16MiB } else if matches.is_present("512mib") { Sector::SectorSize512MiB } else if matches.is_present("1gib") { Sector::SectorSize1GiB } else if matches.is_present("16gib") { Sector::SectorSize16GiB } else if matches.is_present("32gib") { Sector::SectorSize32GiB } else { Sector::SectorSize64GiB }; let head = get_head_commit(); let mut log_filename = params_filename( proof, hasher, sector_size, &head, 0, ParamSize::Large, false, ); log_filename.push_str(".log"); setup_logger(&log_filename); with_shape!( sector_size.as_u64(), create_initial_params, proof, hasher, sector_size ); } "contribute" => { let path_before = matches .value_of("path-before") .expect("path-before match failure"); let seed: Option<[u8; 32]> = matches.value_of("seed").map(|hex_str| { assert_eq!( hex_str.chars().count(), 64, "`seed` argument must be exactly 64 characters long, found {} characters", hex_str.chars().count() ); let mut seed = [0u8; 32]; let seed_vec = hex::decode(hex_str).unwrap_or_else(|_| { panic!("`seed` argument is not a valid hex string: {}", hex_str); }); seed.copy_from_slice(&seed_vec[..]); seed }); let (proof, hasher, sector_size, head, param_num_before, _param_size, _read_raw) = parse_params_filename(path_before); let param_num = param_num_before + 1; // Default to small contributions. let mut log_filename = params_filename( proof, hasher, sector_size, &head, param_num, ParamSize::Small, true, ); log_filename.push_str(".log"); setup_logger(&log_filename); contribute_to_params(path_before, seed); } "verify" => { let path_after = matches .value_of("path-after") .expect("path-after match failure"); let raw_subgroup_checks = !matches.is_present("skip-raw-subgroup-checks"); assert!( Path::new(&path_after).exists(), "'after' params path does not exist: `{}`", path_after ); let (proof, hasher, sector_size, head, param_num_after, _, _) = parse_params_filename(path_after); let log_filename = format!("{}_verify.log", path_after); setup_logger(&log_filename); // Default to using small-raw before params, fallback to non-raw params if raw do // not exist. let path_before = { let small_raw = params_filename( proof, hasher, sector_size, &head, param_num_after - 1, ParamSize::Small, true, ); let small_nonraw = small_raw.trim_end_matches("_raw").to_string(); let large = small_nonraw.replace("small", "large"); if Path::new(&small_raw).exists() { info!("found small-raw 'before' params: {}", small_raw); small_raw } else if Path::new(&small_nonraw).exists() { info!("found small-nonraw 'before' params: {}", small_nonraw); small_nonraw } else if Path::new(&large).exists() { info!("found large 'before' params: {}", large); large } else { let err_msg = format!( "no 'before' params found, attempted: {}, {}, {}", small_raw, small_nonraw, large ); error!("{}", err_msg); panic!("{}", err_msg); } }; let mut contrib_path = format!("{}.contrib", path_after); // It is possible that the .contrib file was generated using a param-size or // serialization format that differs from those in `path_after`, in which case we // need to search for the .contrib file. if !Path::new(&contrib_path).exists() { warn!("contrib file not found: {}", contrib_path); let mut found_contrib_file = false; for _ in 0..2 { contrib_path = if contrib_path.ends_with("large.contrib") { contrib_path.replace("large", "small") } else if contrib_path.ends_with("small.contrib") { contrib_path.replace("small", "small_raw") } else { contrib_path.replace("small_raw", "large") }; info!("trying contrib file: {}", contrib_path); if Path::new(&contrib_path).exists() { found_contrib_file = true; break; } warn!("contrib file not found"); } if !found_contrib_file { error!("no contrib file found"); panic!("no contrib file found"); } } info!("using contrib file: {}", contrib_path); let contrib = { let mut bytes = [0u8; 64]; let hex_str = fs::read_to_string(&contrib_path).unwrap_or_else(|e| { panic!("failed to read contribution file `{}`: {}", contrib_path, e); }); let bytes_vec = hex::decode(&hex_str).unwrap_or_else(|_| { panic!( "contribution found in file `{}` is not a valid hex string: {}", contrib_path, hex_str ); }); let n_bytes = bytes_vec.len(); assert_eq!( n_bytes, 64, "contribution file's `{}` hex string must represent 64 bytes, \ found {} bytes", contrib_path, n_bytes ); bytes.copy_from_slice(&bytes_vec[..]); bytes }; verify_contribution(&path_before, &path_after, contrib, raw_subgroup_checks); } "small" => { let large_path = matches .value_of("large-path") .expect("large-path match failure"); let (proof, hasher, sector_size, head, param_num, param_size, read_raw) = parse_params_filename(large_path); assert!(param_size.is_large(), "param file is not in large format"); assert!(!read_raw, "param file is in raw format"); let small_path = params_filename( proof, hasher, sector_size, &head, param_num, ParamSize::Small, false, ); println!("reading small params from large file: {}", large_path); let small_params = read_small_params_from_large_file(&large_path).unwrap_or_else(|e| { panic!("failed to read large params `{}`: {}", large_path, e) }); let start_read = Instant::now(); let small_file = File::create(&small_path).unwrap_or_else(|e| { panic!("failed to create small params file `{}`: {}", small_path, e); }); println!( "successfully read small params from large, dt_read={}s", start_read.elapsed().as_secs() ); let mut writer = BufWriter::with_capacity(1024 * 1024, small_file); println!("writing small params to file: {}", small_path); small_params.write(&mut writer).unwrap_or_else(|e| { panic!( "failed to write small params to file `{}`: {}", small_path, e ); }); println!("successfully wrote small params"); } "convert" => { let path_before = matches .value_of("path-before") .expect("path-before match failure"); let log_filename = format!("{}_convert.log", path_before); setup_logger(&log_filename); convert_small(path_before) } "merge" => { let path_small = matches .value_of("path-small") .expect("path-small match failure"); let path_large_old = matches .value_of("path-large") .expect("path-large match failure"); assert!( Path::new(path_small).exists(), "small file does not exist: {}", path_small ); assert!( Path::new(path_large_old).exists(), "large file does not exist: {}", path_large_old ); let ( proof_small, hasher_small, sector_size_small, head_small, param_num_small, param_size_small, is_raw_small, ) = parse_params_filename(path_small); let ( proof_large, hasher_large, sector_size_large, head_large, param_num_large, param_size_large, _, ) = parse_params_filename(path_large_old); assert!( param_size_small.is_small(), "small params file is not small" ); assert!( param_size_large.is_large(), "large params file is not large" ); assert_eq!( proof_small, proof_large, "small and large params do not have the same proof name" ); assert_eq!( hasher_small, hasher_large, "small and large params do not have the same hasher name" ); assert_eq!( sector_size_small, sector_size_large, "small and large params do not have the same sector-size name" ); assert_eq!( head_small, head_large, "small and large params do not have the same head commit" ); assert!( param_num_small > param_num_large, "small params must contain more contributions than the large" ); assert!(!is_raw_small, "small params must be non-raw"); let FileInfo { h_len: h_len_small, l_len: l_len_small, cs_hash: cs_hash_small, contributions_len_offset: contributions_len_offset_small, contributions_len: contributions_len_small, .. } = FileInfo::parse_small(&path_small); println!("parsed small file"); let FileInfo { delta_g1_offset: delta_g1_offset_large, h_len_offset: h_len_offset_large, h_len: h_len_large, l_len: l_len_large, cs_hash: cs_hash_large, contributions_len_offset: contributions_len_offset_large, contributions_len: contributions_len_large, .. } = FileInfo::parse_large(&path_large_old); println!("parsed large file"); assert_eq!( h_len_small, h_len_large, "parsed files have different h_len: small: {}, large: {}", h_len_small, h_len_large ); let h_len = h_len_small; assert_eq!( l_len_small, l_len_large, "parsed files have different l_len: small: {}, large: {}", l_len_small, l_len_large, ); let l_len = l_len_small; assert_eq!( &cs_hash_small[..], &cs_hash_large[..], "parsed files have different cs_hash: small: {:?}, large: {:?}", &cs_hash_small[..], &cs_hash_large[..], ); assert!( contributions_len_small > contributions_len_large, "small file does not contain additional contributions, small: {}, large: {}", contributions_len_small, contributions_len_large ); println!("files are consistent"); println!("copying large file"); let path_large_new = path_small.replace("small", "large"); let large_len_old = fs::copy(&path_large_old, &path_large_new).expect("failed to copy large file"); let append_len = (contributions_len_small - contributions_len_large) * PUBKEY_SIZE; let large_len_new = large_len_old + append_len; let mut file_large_new = OpenOptions::new() .write(true) .open(&path_large_new) .expect("failed to open new large file"); file_large_new .set_len(large_len_new) .expect("failed to set new large file length"); println!("merging small file into copy"); let mut file_small = File::open(path_small).expect("failed to open small file"); // Copy delta_g1/g2 let mut delta_bytes = (&mut file_small).take(G1_SIZE + G2_SIZE); seek(&mut file_large_new, delta_g1_offset_large) .expect("failed to seek to delta_g1 in new file"); io::copy(&mut delta_bytes, &mut file_large_new) .expect("failed to merge delta_g1/g2"); println!("merged delta_g1/g2"); // Copy h_len, h, l_len, l let mut h_l_bytes = (&mut file_small) .take(VEC_LEN_SIZE + h_len * G1_SIZE + VEC_LEN_SIZE + l_len * G1_SIZE); seek(&mut file_large_new, h_len_offset_large) .expect("failed to seek to h in new file"); io::copy(&mut h_l_bytes, &mut file_large_new) .expect("failed to merge h, h_len, and l"); println!("merged h_len, h, l_len, and l"); // Copy contributions_len and contributions seek(&mut file_small, contributions_len_offset_small) .expect("failed to seek to contributions_len in small file"); seek(&mut file_large_new, contributions_len_offset_large) .expect("failed to seek to contributions_len in new file"); io::copy(&mut file_small, &mut file_large_new) .expect("failed to merge contributions"); println!("merged contributions"); println!("successfully merged"); } "split-keys" => { let input_path = matches .value_of("input-path") .expect("failed to read input-path argument"); println!("reading params: {}", input_path); // Get the identifier for the output files based in the input file's name let (proof, _hasher, sector_size_enum, _head, param_num, param_size, _read_raw) = parse_params_filename(input_path); assert!(param_size.is_large(), "params must be large"); let sector_size = sector_size_enum.as_u64(); let identifier = with_shape!(sector_size, parameter_identifier, sector_size, proof); let mut input_file = File::open(input_path) .unwrap_or_else(|_| panic!("failed to open {}", input_path)); // Extract the vk data into its own file. { let vk_data = groth16::VerifyingKey::<Bls12>::read(&input_file) .expect("failed to deserialize vk from input file"); let vk_path = verifying_key_id(&identifier); println!("writing verifying key to file: {}", vk_path); let mut vk_file = File::create(&vk_path) .unwrap_or_else(|_| panic!("failed to create {}", vk_path)); vk_data.write(&mut vk_file).unwrap_or_else(|_| { panic!("failed to write verification keys to file {}", vk_path) }); let vk_file_size = vk_file .seek(SeekFrom::Current(0)) .unwrap_or_else(|_| panic!("failed to seek in {}", vk_path)); println!("size of the verifying key is {} bytes", vk_file_size); } // The params file is the trusted setup phase2 result without the contributions // at the end of the file. { let params_path = parameter_id(&identifier); println!("writing parameters to file: {}", params_path); let mut params_file = File::create(&params_path) .unwrap_or_else(|_| panic!("failed to create {}", params_path)); // input_file_size - cs_hash - contributions_length - // (num_contributions * public_key_size) let params_file_size = input_file .metadata() .unwrap_or_else(|_| panic!("failed to get filesize of {}", input_path)) .len() - 64 - 4 - (param_num as u64 * 544); println!("size of the parameters file is {} bytes", params_file_size); // Make sure the cursor is at the beginning of the file (it was moved // during the extraction of the vk data) input_file .seek(SeekFrom::Start(0)) .expect("cannot seek to beginning of the input file"); io::copy( &mut Read::by_ref(&mut input_file).take(params_file_size), &mut params_file, ) .unwrap_or_else(|_| { panic!( "Failed to copy params from {} to {}", input_path, params_path ) }); } // Writing the contributions to disk is not needed for the final parameters, // they won't be published, they are only there for verification purpose. { let contribs_path = format!("v{}-{}.contribs", parameter_cache::VERSION, &identifier); println!("writing contributions to file: {}", contribs_path); let mut contribs_file = File::create(&contribs_path) .unwrap_or_else(|_| panic!("failed to create {}", contribs_path)); // The input file is already sought to the right offset, due to writing the // params file let contribs_file_size = io::copy(&mut input_file, &mut contribs_file) .unwrap_or_else(|_| { panic!( "Failed to copy contributions from {} to {}", input_path, contribs_path ) }); println!( "size of the contributions file is {} bytes", contribs_file_size ); } // The metadata is needed for the publication of the parameters. { let meta_path = metadata_id(&identifier); println!("writing metadata to file: {}", meta_path); let mut meta_file = File::create(&meta_path) .unwrap_or_else(|_| panic!("failed to create {}", meta_path)); write!(&mut meta_file, r#"{{"sector_size":{}}}"#, sector_size).unwrap_or_else( |_| panic!("failed to write meta information to {}", meta_path), ); } // The info file contains the filename the parameter was created of. { let info_path = format!("v{}-{}.info", parameter_cache::VERSION, &identifier); println!("writing info to file: {}", info_path); let mut info_file = File::create(&info_path) .unwrap_or_else(|_| panic!("failed to create {}", info_path)); writeln!(&mut info_file, "{}", input_path) .unwrap_or_else(|_| panic!("failed to write info data to {}", info_path)); } } "parse" => { let path = matches.value_of("path").expect("path match failure"); let (_, _, _, _, _, size, raw) = parse_params_filename(&path); if raw { unimplemented!("`parse` command does not currently support raw params"); } let file_info = if size.is_large() { FileInfo::parse_large(&path) } else { FileInfo::parse_small(&path) }; println!("{:#?}", file_info); } "verify-g1" => { let path = matches.value_of("path").expect("path match failure"); let (_, _, _, _, _, _, raw) = parse_params_filename(&path); assert!( raw, "found non-raw params, `verify-g1` is relevant only when verifying small-raw \ params" ); let file = File::open(&path).expect("failed to open params file"); let mut reader = BufReader::with_capacity(1024 * 1024, file); println!("starting deserialization"); let start = Instant::now(); let _params = MPCSmall::read(&mut reader, raw, true).expect("mpc small read failure"); println!( "succesfully verified h and l G1 points, dt={}s", start.elapsed().as_secs() ); } _ => unreachable!(), } } }
use std::io::{stderr, Write}; use std::mem; use peg_parser::{Parser, ParserData}; use crate::laze_parser::matcher::extract_ast; use super::{ dec::{self, ClassMemberList, Dec, DecData, DecList, Dec_}, exp::{ASTExp, ASTExpData, ASTExpList, ASTExp_}, field::{Field, FieldData, FieldList, Field_}, ifelse::{IfElse, IfElseList}, op::{Oper, OperList}, stm::{Stm, StmData, StmList, Stm_}, suffix::ASTExpSuffixList, ty::{Type, TypeData, TypeList, Type_}, var::{Var, VarData, Var_}, }; pub type AST = dec::DecList; #[derive(Clone, Debug, PartialEq)] pub enum ASTNode { Dec(Dec), Stm(Stm), Exp(ASTExp), Type(Type), Field(Field), String(String), Var(Var), IfElse(IfElse), Op(Oper), DecList(DecList), StmList(StmList), ExpList(ASTExpList), FieldList(FieldList), TypeList(TypeList), StringList(Vec<String>), IfElseList(IfElseList), ExpSuffixList(ASTExpSuffixList), OperList(OperList), ClassMemberList(ClassMemberList), None, } impl ASTNode { pub fn get_var_data(self, pos: (usize, usize), name: &str, rule: &str) -> Var { if let ASTNode::Var(var) = self { var } else { let _ = writeln!(stderr(), "{name} in {rule} is not a declaration."); Box::new(Var_ { pos, data: VarData::None, }) } } pub fn get_dec_data(self, pos: (usize, usize), name: &str, rule: &str) -> Dec { match self { ASTNode::Dec(dec) => dec, ASTNode::DecList(mut declist) => { if declist.len() == 1 { let mut temp_dec = Box::new(Dec_ { pos, data: DecData::None, }); mem::swap(&mut declist[0], &mut temp_dec); temp_dec } else { let _ = writeln!(stderr(), "{name} in {rule} is not a declaration."); Box::new(Dec_ { pos, data: DecData::None, }) } } _ => { let _ = writeln!(stderr(), "{name} in {rule} is not a declaration."); Box::new(Dec_ { pos, data: DecData::None, }) } } } pub fn get_declist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> DecList { if let ASTNode::DecList(declist) = self { declist } else { let _ = writeln!(stderr(), "{name} in {rule} is not a declaration list."); vec![] } } pub fn get_stm_data(self, pos: (usize, usize), name: &str, rule: &str) -> Stm { match self { ASTNode::Stm(stm) => stm, ASTNode::StmList(mut stmlist) => { if stmlist.len() == 1 { let mut temp_stm = Box::new(Stm_ { pos, data: StmData::None, }); mem::swap(&mut stmlist[0], &mut temp_stm); temp_stm } else { let _ = writeln!(stderr(), "{name} in {rule} is not a statement."); Box::new(Stm_ { pos, data: StmData::None, }) } } _ => { let _ = writeln!(stderr(), "{name} in {rule} is not a statement."); Box::new(Stm_ { pos, data: StmData::None, }) } } } pub fn get_stmlist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> StmList { if let ASTNode::StmList(stmlist) = self { stmlist } else { let _ = writeln!(stderr(), "{name} in {rule} is not a statement list."); vec![] } } pub fn get_exp_data(self, pos: (usize, usize), name: &str, rule: &str) -> ASTExp { match self { ASTNode::Exp(exp) => exp, ASTNode::ExpList(mut explist) => { let mut temp_exp = Box::new(ASTExp_ { pos, data: ASTExpData::None, }); if explist.len() == 1 { mem::swap(&mut explist[0], &mut temp_exp); } else if explist.len() == 0 { } else { // let _ = writeln!(stderr(), "{name} in {rule} is not an expression."); mem::swap(&mut explist[0], &mut temp_exp); } temp_exp } _ => { let _ = writeln!(stderr(), "{name} in {rule} is not an expression."); return Box::new(ASTExp_ { pos, data: ASTExpData::None, }); } } } pub fn get_explist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> ASTExpList { if let ASTNode::ExpList(explist) = self { explist } else { let _ = writeln!(stderr(), "{name} in {rule} is not an expression list."); vec![] } } pub fn get_classmembers_data( self, _pos: (usize, usize), name: &str, rule: &str, ) -> ClassMemberList { if let ASTNode::ClassMemberList(members) = self { members } else { let _ = writeln!(stderr(), "{name} in {rule} is not an class members list."); vec![] } } pub fn get_ty_data(self, pos: (usize, usize), name: &str, rule: &str) -> Type { if let ASTNode::Type(ty) = self { ty } else { let _ = writeln!(stderr(), "{name} in {rule} is not a type."); Box::new(Type_ { pos, data: TypeData::None, }) } } pub fn get_tylist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> TypeList { if let ASTNode::TypeList(tylist) = self { tylist } else { let _ = writeln!(stderr(), "{name} in {rule} is not a type."); vec![] } } pub fn get_field_data(self, pos: (usize, usize), name: &str, rule: &str) -> Field { if let ASTNode::Field(field) = self { field } else { let _ = writeln!(stderr(), "{name} in {rule} is not a field."); Box::new(Field_ { pos, data: FieldData::None, }) } } pub fn get_oper_data(self, _pos: (usize, usize), name: &str, rule: &str) -> Oper { if let ASTNode::Op(oper) = self { oper } else { let _ = writeln!(stderr(), "{name} in {rule} is not a field."); Oper::None } } pub fn get_fieldlist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> FieldList { if let ASTNode::FieldList(fieldlist) = self { fieldlist } else { let _ = writeln!(stderr(), "{name} in {rule} is not a fieldlist."); vec![] } } pub fn get_string_data(self, _pos: (usize, usize), name: &str, rule: &str) -> String { if let ASTNode::String(str) = self { str } else { let _ = writeln!(stderr(), "{name} in {rule} is not a string."); "".to_string() } } pub fn get_stringlist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> Vec<String> { match self { ASTNode::StringList(strlist) => strlist, ASTNode::String(str) => vec![str], _ => { let _ = writeln!(stderr(), "{name} in {rule} is not a string list."); vec![] } } } pub fn get_suffixlist_data( self, _pos: (usize, usize), name: &str, rule: &str, ) -> ASTExpSuffixList { if let ASTNode::ExpSuffixList(suffixlist) = self { suffixlist } else { let _ = writeln!(stderr(), "{name} in {rule} is not a string list."); vec![] } } pub fn get_ifelselist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> IfElseList { if let ASTNode::IfElseList(ifelselist) = self { ifelselist } else { let _ = writeln!(stderr(), "{name} in {rule} is not a string list."); vec![] } } pub fn get_operlist_data(self, _pos: (usize, usize), name: &str, rule: &str) -> OperList { if let ASTNode::OperList(oplist) = self { oplist } else { let _ = writeln!(stderr(), "{name} in {rule} is not a string list."); vec![] } } } impl ParserData for ASTNode { fn string(_: (usize, usize), str: String) -> Self { Self::String(str) } fn null() -> Self { Self::None } fn data(pos: (usize, usize), name: &str, parser: &mut Parser<Self>) -> Self { extract_ast(pos, name, parser) } fn is_null(&self) -> bool { if let Self::None = self { true } else { false } } }
pub fn solve_puzzle_part_1(input: &str) -> String { ":(".to_string() } pub fn solve_puzzle_part_2(input: &str) -> String { ":(".to_string() }
use std::fs::File; use std::io::Error; use std::io::prelude::*; fn main() { match get_checksum1_from_file("input.txt") { Ok(checksum) => println!("Part 1 answer: {}", checksum), Err(e) => println!("{}", e), } match get_checksum2_from_file("input.txt") { Ok(checksum) => println!("Part 2 answer: {}", checksum), Err(e) => println!("{}", e), } } fn read_input(filename: &str) -> Result<String, Error> { let mut input = String::new(); File::open(filename)?.read_to_string(&mut input)?; return Ok(input); } fn get_line_checksum1(line: &str) -> i32 { let mut number_strs = line.split_whitespace(); let mut done = false; let mut highest: Option<i32> = None; let mut lowest: Option<i32> = None; while !done { match number_strs.next() { None => { done = true; }, Some(number_str) => { let number = match number_str.parse() { Ok(n) => { n }, _ => { 0 } }; match highest { None => { highest = Some(number); }, Some(h) => { if h < number { highest = Some(number); } }, } match lowest { None => { lowest = Some(number); }, Some(l) => { if l > number { lowest = Some(number); } }, } }, } } if highest == None { return 0; } if lowest == None { return 0; } return highest.unwrap() - lowest.unwrap(); } fn get_checksum1(input: String) -> i32 { let mut lines = input.lines(); let mut done = false; let mut checksum = 0; while !done { match lines.next() { None => { done = true; }, Some(line) => { checksum += get_line_checksum1(line); }, } } return checksum; } fn get_checksum1_from_file(filename: &str) -> Result<i32, Error> { let input = read_input(filename)?; return Ok(get_checksum1(input)); } fn get_line_checksum2(line: &str) -> i32 { let numbers: Vec<&str> = line.split_whitespace().collect(); for i in 0..numbers.len() { for j in 0..numbers.len() { if i != j { let ni = match numbers[i].parse() { Ok(n) => n, _ => 0 }; let nj = match numbers[j].parse() { Ok(n) => n, _ => 0 }; if is_divisible(ni, nj) { return ni / nj; } } } } return 0; } fn is_divisible(x: i32, y: i32) -> bool { if y == 0 { return false; } let fx = x as f32; let fy = y as f32; let fdiv = fx / fy; let div = x / y; return fdiv == div as f32; } fn get_checksum2(input: String) -> i32 { /* imperative version: let mut lines = input.lines(); let mut checksum = 0; let mut done = false; while !done { match lines.next() { None => { done = true; }, Some(line) => { checksum += get_line_checksum2(line); }, } } return checksum; */ return input.lines().map(|l| get_line_checksum2(l)).sum(); } fn get_checksum2_from_file(filename: &str) -> Result<i32, Error> { let input = read_input(filename)?; return Ok(get_checksum2(input)); }
#![allow(unused_variables)] use crate::k8sserver; use seahorse::{Command, Context, Flag, FlagType}; pub fn server_command() -> Command { Command::new("server") .action(server_action) .flag(Flag::new("addr", FlagType::String).alias("a")) } fn server_action(c: &Context) { let addr = c .string_flag("addr") .ok() .or(Option::from("0.0.0.0:8080".to_owned())); let _ = k8sserver::start_server(addr.unwrap()); }
use crate::{consts::VIEWPORT_ROTATE_SNAP_INTERVAL, frontend::layer_panel::*}; use glam::{DAffine2, DVec2}; use graphene::{ layers::{Layer, LayerData as DocumentLayerData}, LayerId, }; use serde::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Copy)] pub struct LayerData { pub selected: bool, pub expanded: bool, pub translation: DVec2, pub rotation: f64, pub snap_rotate: bool, pub scale: f64, } impl LayerData { pub fn new(expanded: bool) -> LayerData { LayerData { selected: false, expanded, translation: DVec2::ZERO, rotation: 0., snap_rotate: false, scale: 1., } } pub fn snapped_angle(&self) -> f64 { let increment_radians: f64 = VIEWPORT_ROTATE_SNAP_INTERVAL.to_radians(); if self.snap_rotate { (self.rotation / increment_radians).round() * increment_radians } else { self.rotation } } pub fn calculate_offset_transform(&self, offset: DVec2) -> DAffine2 { // TODO: replace with DAffine2::from_scale_angle_translation and fix the errors let offset_transform = DAffine2::from_translation(offset); let scale_transform = DAffine2::from_scale(DVec2::new(self.scale, self.scale)); let angle_transform = DAffine2::from_angle(self.snapped_angle()); let translation_transform = DAffine2::from_translation(self.translation); scale_transform * offset_transform * angle_transform * translation_transform } } pub fn layer_data<'a>(layer_data: &'a mut HashMap<Vec<LayerId>, LayerData>, path: &[LayerId]) -> &'a mut LayerData { if !layer_data.contains_key(path) { layer_data.insert(path.to_vec(), LayerData::new(false)); } layer_data.get_mut(path).unwrap() } pub fn layer_panel_entry(layer_data: &LayerData, transform: DAffine2, layer: &Layer, path: Vec<LayerId>) -> LayerPanelEntry { let layer_type: LayerType = (&layer.data).into(); let name = layer.name.clone().unwrap_or_else(|| format!("Unnamed {}", layer_type)); let arr = layer.data.bounding_box(transform).unwrap_or([DVec2::ZERO, DVec2::ZERO]); let arr = arr.iter().map(|x| (*x).into()).collect::<Vec<(f64, f64)>>(); let mut thumbnail = String::new(); layer.data.clone().render(&mut thumbnail, &mut vec![transform]); let transform = transform.to_cols_array().iter().map(ToString::to_string).collect::<Vec<_>>().join(","); let thumbnail = if let [(x_min, y_min), (x_max, y_max)] = arr.as_slice() { format!( r#"<svg xmlns="http://www.w3.org/2000/svg" viewBox="{} {} {} {}"><g transform="matrix({})">{}</g></svg>"#, x_min, y_min, x_max - x_min, y_max - y_min, transform, thumbnail, ) } else { String::new() }; // LayerIds are sent as (u32, u32) because jsond does not support u64s let path = path.iter().map(|id| ((id >> 32) as u32, (id << 32 >> 32) as u32)).collect::<Vec<_>>(); LayerPanelEntry { name, visible: layer.visible, blend_mode: layer.blend_mode, opacity: layer.opacity, layer_type: (&layer.data).into(), layer_data: *layer_data, path, thumbnail, } }
#[derive(PartialEq, Debug)] pub enum TokenKind { Identifier, Space, Separator, Newline, QuotedIdentifier, Unknown, } #[derive(Debug)] pub struct Token { pub kind: TokenKind, pub start_position: usize, pub end_position: usize, pub token: String, } impl Token { pub fn new(kind: TokenKind, start_position: usize, end_position: usize, token: String) -> Self { Token { kind, start_position, end_position, token, } } }
extern crate advent_of_code_2017_day_15 as advent; fn main() { let gen_a_start = 516; let gen_b_start = 190; println!( "the answer for part 1 is {}", advent::solve_puzzle_part_1(gen_a_start, gen_b_start) ); println!( "the answer for part 2 is {}", advent::solve_puzzle_part_2(gen_a_start, gen_b_start) ); }
use crate::ast::rules; use super::utils::interpret_rule; #[test] fn test_do_block() { let (_val, env) = interpret_rule("y = 3; do y = 5 end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(5.0) }}"#); } #[test] fn test_do_block_local() { let (_val, env) = interpret_rule("y = 3; do local y = 5 end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(3.0) }}"#); } #[test] fn test_while() { let (_val, env) = interpret_rule("y = 3; while y ~= 5 do y = y + 1 end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(5.0) }}"#); } #[test] fn test_if() { let (_val, env) = interpret_rule("y = 3; if 5 == 5 then y = 5 end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(5.0) }}"#); let (_val, env) = interpret_rule("y = 3; if 5 ~= 5 then y = 5 end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(3.0) }}"#); let (_val, env) = interpret_rule("y = 3; if 5 ~= 5 then y = 5 else y = 7 end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(7.0) }}"#); let (_val, env) = interpret_rule( "y = 3; if 5 ~= 5 then y = 5 elseif 5 == 5 then y = 7 end", rules::block, ); assert_eq!(env, r#"{"y": RefCell { value: Number(7.0) }}"#); let (_val, env) = interpret_rule( "y = 3; if 5 == 5 then y = 5 elseif 5 == 5 then y = 7 end", rules::block, ); assert_eq!(env, r#"{"y": RefCell { value: Number(5.0) }}"#); let (_val, env) = interpret_rule( "y = 3; if 5 ~= 5 then y = 5 elseif 3 == 5 then y = 7 else y = -1 end", rules::block, ); assert_eq!(env, r#"{"y": RefCell { value: Number(-1.0) }}"#); } #[test] fn test_numerical_for() { let (_val, env) = interpret_rule("y = 3 for i = 0, 10 do y = y + i end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(48.0) }}"#); let (_val, env) = interpret_rule("y = 3 for i = 9, -1, -1 do y = y + i end", rules::block); assert_eq!(env, r#"{"y": RefCell { value: Number(48.0) }}"#); }
use crate::*; #[derive(Eq, PartialEq, Clone)] pub enum PlatformEvent { WindowClose(WindowId), WindowResize(WindowId), CursorMove { window: WindowId, position: LogicalPosition, }, MouseButtonDown { window: WindowId, button: MouseButton, }, MouseButtonUp { window: WindowId, button: MouseButton, }, KeyDown { window: WindowId, platform_scancode: u32, scancode: Scancode, vkey: Option<VirtualKey>, }, KeyUp { window: WindowId, platform_scancode: u32, scancode: Scancode, vkey: Option<VirtualKey>, }, TextInput { window: WindowId, text: String, }, EventQueueEmpty, Unknown, }
fn main() { println!("Hello, world!"); // === REFERENCES and BORROWING === // let s1 = String::from("hello"); let len = calculate_len(&s1); // passing a reference of s1 println!("length of {} is {}", s1, len); println!("s1 still usable!: {}", s1); /* & is called a reference, they allow to refer to some value without taking ownership the &s1 syntax lets us create a reference that refers to the value of s1. Because it doesn't own s1, the value it points to will not be dropped when the reference goes out of scope */ //let s = String::from("hello"); // change_borrowed(&s); // can't do because references are immutable by default //the following code will work let mut s = String::from("hello"); println!("mutable String {}", s); can_change_borrowed(&mut s); println!("s1 changed to: {}", s); /* mutable references have one big restriction: you can have only one mutable reference to a particular piece of data in a particular scope. This code will fail: let mut s = String::from("hello"); let r1 = &mut s; // this is enough let r2 = &mut s; // too much this prevents data race the following code will work: */ let mut s = String::from("hello"); { let r1 = &mut s; println!("r1: {}", r1) } // scope ends let r2 = &mut s; println!("r2: {}", r2); /* A similar rule exists for combining mutable and immutable references. This code results in an error: let mut s = String::from("hello"); let r1 = &s; // no problem let r2 = &s; // no problem let r3 = &mut s; // BIG PROBLEM println!("{}, {}, and {}", r1, r2, r3); we can't have immutable and mutable references of the same data at the same time the following code will work: */ let mut s = String::from("hello"); let r1 = &s; let r2 = &s; println!("r1: {}, and r2: {}", r1,r2); // r1 and r2 we already used let r3 = &mut s; println!("r3 before mut: {}", r3); r3.push_str(", world!"); println!("r3 after mut: {}", r3); // println!("r1: {}, and r2: {}", r1,r2); //cant use these anymore println!("original s: {}",s); // === DANGLING REFERENCES === // // let reference_to_nothing = dangle(); /* code above doesn't compile because when the function goes out of scope the reference to the local var is dropped, so we're left if a dangling refence. See dangle() function's definition */ } // fn dangle() -> &String { // returns refence to a String // let s = String::from("hello"); // s is the new string // &s; // return a reference // } // s goes out of scope and it's memory goes away fn calculate_len(s: &String) -> usize { s.len() } // fn change_borrowed(some_string: &String) { // some_string.push_str(", world"); // } fn can_change_borrowed(some_string: &mut String) { some_string.push_str(", world!"); }
use anyhow::{bail, Context, Result}; use bitcoin::{ consensus::{deserialize, serialize}, hashes::hex::{FromHex, ToHex}, BlockHash, Txid, }; use crossbeam_channel::Receiver; use rayon::prelude::*; use serde_derive::Deserialize; use serde_json::{self, json, Value}; use std::collections::{hash_map::Entry, HashMap}; use std::iter::FromIterator; use crate::{ cache::Cache, config::{Config, ELECTRS_VERSION}, daemon::{self, extract_bitcoind_error, Daemon}, merkle::Proof, metrics::{self, Histogram}, signals::Signal, status::ScriptHashStatus, tracker::Tracker, types::ScriptHash, }; const PROTOCOL_VERSION: &str = "1.4"; const UNKNOWN_FEE: isize = -1; // (allowed by Electrum protocol) /// Per-client Electrum protocol state #[derive(Default)] pub struct Client { tip: Option<BlockHash>, scripthashes: HashMap<ScriptHash, ScriptHashStatus>, } #[derive(Deserialize)] struct Request { id: Value, method: String, #[serde(default)] params: Value, } #[derive(Deserialize)] #[serde(untagged)] enum Requests { Single(Request), Batch(Vec<Request>), } #[derive(Deserialize, Debug, PartialEq, Eq)] #[serde(untagged)] enum Version { Single(String), Range(String, String), } #[derive(Deserialize)] #[serde(untagged)] enum TxGetArgs { Txid((Txid,)), TxidVerbose(Txid, bool), } impl From<TxGetArgs> for (Txid, bool) { fn from(args: TxGetArgs) -> Self { match args { TxGetArgs::Txid((txid,)) => (txid, false), TxGetArgs::TxidVerbose(txid, verbose) => (txid, verbose), } } } enum StandardError { ParseError, InvalidRequest, MethodNotFound, InvalidParams, } enum RpcError { // JSON-RPC spec errors Standard(StandardError), // Electrum-specific errors BadRequest(anyhow::Error), DaemonError(daemon::RpcError), } impl RpcError { fn to_value(&self) -> Value { match self { RpcError::Standard(err) => match err { StandardError::ParseError => json!({"code": -32700, "message": "parse error"}), StandardError::InvalidRequest => { json!({"code": -32600, "message": "invalid request"}) } StandardError::MethodNotFound => { json!({"code": -32601, "message": "method not found"}) } StandardError::InvalidParams => { json!({"code": -32602, "message": "invalid params"}) } }, RpcError::BadRequest(err) => json!({"code": 1, "message": err.to_string()}), RpcError::DaemonError(err) => json!({"code": 2, "message": err.message}), } } } /// Electrum RPC handler pub struct Rpc { tracker: Tracker, cache: Cache, rpc_duration: Histogram, daemon: Daemon, signal: Signal, banner: String, port: u16, } impl Rpc { /// Perform initial index sync (may take a while on first run). pub fn new(config: &Config) -> Result<Self> { let tracker = Tracker::new(config)?; let rpc_duration = tracker.metrics().histogram_vec( "rpc_duration", "RPC duration (in seconds)", "method", metrics::default_duration_buckets(), ); let signal = Signal::new(); let daemon = Daemon::connect(config, signal.exit_flag(), tracker.metrics())?; let cache = Cache::new(tracker.metrics()); Ok(Self { tracker, cache, rpc_duration, daemon, signal, banner: config.server_banner.clone(), port: config.electrum_rpc_addr.port(), }) } pub(crate) fn signal(&self) -> &Signal { &self.signal } pub fn new_block_notification(&self) -> Receiver<()> { self.daemon.new_block_notification() } pub fn sync(&mut self) -> Result<()> { self.tracker.sync(&self.daemon, self.signal.exit_flag()) } pub fn update_client(&self, client: &mut Client) -> Result<Vec<String>> { let chain = self.tracker.chain(); let mut notifications = client .scripthashes .par_iter_mut() .filter_map(|(scripthash, status)| -> Option<Result<Value>> { match self .tracker .update_scripthash_status(status, &self.daemon, &self.cache) { Ok(true) => Some(Ok(notification( "blockchain.scripthash.subscribe", &[json!(scripthash), json!(status.statushash())], ))), Ok(false) => None, // statushash is the same Err(e) => Some(Err(e)), } }) .collect::<Result<Vec<Value>>>() .context("failed to update status")?; if let Some(old_tip) = client.tip { let new_tip = self.tracker.chain().tip(); if old_tip != new_tip { client.tip = Some(new_tip); let height = chain.height(); let header = chain.get_block_header(height).unwrap(); notifications.push(notification( "blockchain.headers.subscribe", &[json!({"hex": serialize(&header).to_hex(), "height": height})], )); } } Ok(notifications.into_iter().map(|v| v.to_string()).collect()) } fn headers_subscribe(&self, client: &mut Client) -> Result<Value> { let chain = self.tracker.chain(); client.tip = Some(chain.tip()); let height = chain.height(); let header = chain.get_block_header(height).unwrap(); Ok(json!({"hex": serialize(header).to_hex(), "height": height})) } fn block_header(&self, (height,): (usize,)) -> Result<Value> { let chain = self.tracker.chain(); let header = match chain.get_block_header(height) { None => bail!("no header at {}", height), Some(header) => header, }; Ok(json!(serialize(header).to_hex())) } fn block_headers(&self, (start_height, count): (usize, usize)) -> Result<Value> { let chain = self.tracker.chain(); let max_count = 2016usize; let count = std::cmp::min( std::cmp::min(count, max_count), chain.height() - start_height + 1, ); let heights = start_height..(start_height + count); let hex_headers = String::from_iter( heights.map(|height| serialize(chain.get_block_header(height).unwrap()).to_hex()), ); Ok(json!({"count": count, "hex": hex_headers, "max": max_count})) } fn estimate_fee(&self, (nblocks,): (u16,)) -> Result<Value> { Ok(self .daemon .estimate_fee(nblocks)? .map(|fee_rate| json!(fee_rate.as_btc())) .unwrap_or_else(|| json!(UNKNOWN_FEE))) } fn relayfee(&self) -> Result<Value> { Ok(json!(self.daemon.get_relay_fee()?.as_btc())) // [BTC/kB] } fn scripthash_get_balance( &self, client: &Client, (scripthash,): (ScriptHash,), ) -> Result<Value> { let balance = match client.scripthashes.get(&scripthash) { Some(status) => self.tracker.get_balance(status), None => { warn!( "blockchain.scripthash.get_balance called for unsubscribed scripthash: {}", scripthash ); self.tracker.get_balance(&self.new_status(scripthash)?) } }; Ok(json!(balance)) } fn scripthash_get_history( &self, client: &Client, (scripthash,): (ScriptHash,), ) -> Result<Value> { let history_entries = match client.scripthashes.get(&scripthash) { Some(status) => json!(status.get_history()), None => { warn!( "blockchain.scripthash.get_history called for unsubscribed scripthash: {}", scripthash ); json!(self.new_status(scripthash)?.get_history()) } }; Ok(history_entries) } fn scripthash_list_unspent( &self, client: &Client, (scripthash,): (ScriptHash,), ) -> Result<Value> { let unspent_entries = match client.scripthashes.get(&scripthash) { Some(status) => self.tracker.get_unspent(status), None => { warn!( "blockchain.scripthash.listunspent called for unsubscribed scripthash: {}", scripthash ); self.tracker.get_unspent(&self.new_status(scripthash)?) } }; Ok(json!(unspent_entries)) } fn scripthash_subscribe( &self, client: &mut Client, (scripthash,): (ScriptHash,), ) -> Result<Value> { let result = match client.scripthashes.entry(scripthash) { Entry::Occupied(e) => e.get().statushash(), Entry::Vacant(e) => e.insert(self.new_status(scripthash)?).statushash(), }; Ok(json!(result)) } fn new_status(&self, scripthash: ScriptHash) -> Result<ScriptHashStatus> { let mut status = ScriptHashStatus::new(scripthash); self.tracker .update_scripthash_status(&mut status, &self.daemon, &self.cache)?; Ok(status) } fn transaction_broadcast(&self, (tx_hex,): (String,)) -> Result<Value> { let tx_bytes = Vec::from_hex(&tx_hex).context("non-hex transaction")?; let tx = deserialize(&tx_bytes).context("invalid transaction")?; let txid = self.daemon.broadcast(&tx)?; Ok(json!(txid)) } fn transaction_get(&self, args: TxGetArgs) -> Result<Value> { let (txid, verbose) = args.into(); if verbose { let blockhash = self.tracker.get_blockhash_by_txid(txid); return self.daemon.get_transaction_info(&txid, blockhash); } let cached = self.cache.get_tx(&txid, |tx| serialize(tx).to_hex()); Ok(match cached { Some(tx_hex) => json!(tx_hex), None => { debug!("tx cache miss: {}", txid); let blockhash = self.tracker.get_blockhash_by_txid(txid); json!(self.daemon.get_transaction_hex(&txid, blockhash)?) } }) } fn transaction_get_merkle(&self, (txid, height): (Txid, usize)) -> Result<Value> { let chain = self.tracker.chain(); let blockhash = match chain.get_block_hash(height) { None => bail!("missing block at {}", height), Some(blockhash) => blockhash, }; let proof_to_value = |proof: &Proof| { json!({ "block_height": height, "pos": proof.position(), "merkle": proof.to_hex(), }) }; if let Some(result) = self.cache.get_proof(blockhash, txid, proof_to_value) { return Ok(result); } debug!("txids cache miss: {}", blockhash); let txids = self.daemon.get_block_txids(blockhash)?; match txids.iter().position(|current_txid| *current_txid == txid) { None => bail!("missing txid {} in block {}", txid, blockhash), Some(position) => Ok(proof_to_value(&Proof::create(&txids, position))), } } fn get_fee_histogram(&self) -> Result<Value> { Ok(json!(self.tracker.fees_histogram())) } fn server_id(&self) -> String { format!("electrs/{}", ELECTRS_VERSION) } fn version(&self, (client_id, client_version): (String, Version)) -> Result<Value> { match client_version { Version::Single(v) if v == PROTOCOL_VERSION => { Ok(json!([self.server_id(), PROTOCOL_VERSION])) } _ => { bail!( "{} requested {:?}, server supports {}", client_id, client_version, PROTOCOL_VERSION ); } } } fn features(&self) -> Result<Value> { Ok(json!({ "genesis_hash": self.tracker.chain().get_block_hash(0), "hosts": { "tcp_port": self.port }, "protocol_max": PROTOCOL_VERSION, "protocol_min": PROTOCOL_VERSION, "pruning": null, "server_version": self.server_id(), "hash_function": "sha256" })) } pub fn handle_request(&self, client: &mut Client, line: &str) -> String { let error_msg_no_id = |err| error_msg(Value::Null, RpcError::Standard(err)); let response: Value = match serde_json::from_str(line) { // parse JSON from str Ok(value) => match serde_json::from_value(value) { // parse RPC from JSON Ok(requests) => match requests { Requests::Single(request) => self.call(client, request), Requests::Batch(requests) => json!(requests .into_iter() .map(|request| self.call(client, request)) .collect::<Vec<Value>>()), }, Err(err) => { warn!("invalid RPC request ({:?}): {}", line, err); error_msg_no_id(StandardError::InvalidRequest) } }, Err(err) => { warn!("invalid JSON ({:?}): {}", line, err); error_msg_no_id(StandardError::ParseError) } }; response.to_string() } fn call(&self, client: &mut Client, request: Request) -> Value { let Request { id, method, params } = request; let call = match Call::parse(&method, params) { Ok(call) => call, Err(err) => return error_msg(id, RpcError::Standard(err)), }; self.rpc_duration.observe_duration(&method, || { let result = match call { Call::Banner => Ok(json!(self.banner)), Call::BlockHeader(args) => self.block_header(args), Call::BlockHeaders(args) => self.block_headers(args), Call::Donation => Ok(Value::Null), Call::EstimateFee(args) => self.estimate_fee(args), Call::Features => self.features(), Call::HeadersSubscribe => self.headers_subscribe(client), Call::MempoolFeeHistogram => self.get_fee_histogram(), Call::PeersSubscribe => Ok(json!([])), Call::Ping => Ok(Value::Null), Call::RelayFee => self.relayfee(), Call::ScriptHashGetBalance(args) => self.scripthash_get_balance(client, args), Call::ScriptHashGetHistory(args) => self.scripthash_get_history(client, args), Call::ScriptHashListUnspent(args) => self.scripthash_list_unspent(client, args), Call::ScriptHashSubscribe(args) => self.scripthash_subscribe(client, args), Call::TransactionBroadcast(args) => self.transaction_broadcast(args), Call::TransactionGet(args) => self.transaction_get(args), Call::TransactionGetMerkle(args) => self.transaction_get_merkle(args), Call::Version(args) => self.version(args), }; match result { Ok(value) => result_msg(id, value), Err(err) => { warn!("RPC {} failed: {:#}", method, err); match err .downcast_ref::<bitcoincore_rpc::Error>() .and_then(extract_bitcoind_error) { Some(e) => error_msg(id, RpcError::DaemonError(e.clone())), None => error_msg(id, RpcError::BadRequest(err)), } } } }) } } #[derive(Deserialize)] enum Call { Banner, BlockHeader((usize,)), BlockHeaders((usize, usize)), TransactionBroadcast((String,)), Donation, EstimateFee((u16,)), Features, HeadersSubscribe, MempoolFeeHistogram, PeersSubscribe, Ping, RelayFee, ScriptHashGetBalance((ScriptHash,)), ScriptHashGetHistory((ScriptHash,)), ScriptHashListUnspent((ScriptHash,)), ScriptHashSubscribe((ScriptHash,)), TransactionGet(TxGetArgs), TransactionGetMerkle((Txid, usize)), Version((String, Version)), } impl Call { fn parse(method: &str, params: Value) -> std::result::Result<Call, StandardError> { Ok(match method { "blockchain.block.header" => Call::BlockHeader(convert(params)?), "blockchain.block.headers" => Call::BlockHeaders(convert(params)?), "blockchain.estimatefee" => Call::EstimateFee(convert(params)?), "blockchain.headers.subscribe" => Call::HeadersSubscribe, "blockchain.relayfee" => Call::RelayFee, "blockchain.scripthash.get_balance" => Call::ScriptHashGetBalance(convert(params)?), "blockchain.scripthash.get_history" => Call::ScriptHashGetHistory(convert(params)?), "blockchain.scripthash.listunspent" => Call::ScriptHashListUnspent(convert(params)?), "blockchain.scripthash.subscribe" => Call::ScriptHashSubscribe(convert(params)?), "blockchain.transaction.broadcast" => Call::TransactionBroadcast(convert(params)?), "blockchain.transaction.get" => Call::TransactionGet(convert(params)?), "blockchain.transaction.get_merkle" => Call::TransactionGetMerkle(convert(params)?), "mempool.get_fee_histogram" => Call::MempoolFeeHistogram, "server.banner" => Call::Banner, "server.donation_address" => Call::Donation, "server.features" => Call::Features, "server.peers.subscribe" => Call::PeersSubscribe, "server.ping" => Call::Ping, "server.version" => Call::Version(convert(params)?), _ => { warn!("unknown method {}", method); return Err(StandardError::MethodNotFound); } }) } } fn convert<T>(params: Value) -> std::result::Result<T, StandardError> where T: serde::de::DeserializeOwned, { let params_str = params.to_string(); serde_json::from_value(params).map_err(|err| { warn!("invalid params {}: {}", params_str, err); StandardError::InvalidParams }) } fn notification(method: &str, params: &[Value]) -> Value { json!({"jsonrpc": "2.0", "method": method, "params": params}) } fn result_msg(id: Value, result: Value) -> Value { json!({"jsonrpc": "2.0", "id": id, "result": result}) } fn error_msg(id: Value, error: RpcError) -> Value { json!({"jsonrpc": "2.0", "id": id, "error": error.to_value()}) }
#[macro_use] extern crate diesel; pub mod server; pub mod storage; pub mod configuration;
// Copyright 2019-2021 Parity Technologies (UK) Ltd. // This file is part of Cumulus. // Cumulus is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Cumulus is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Cumulus. If not, see <http://www.gnu.org/licenses/>. use futures::lock::Mutex; use sc_consensus::import_queue::{BasicQueue, Verifier as VerifierT}; use sc_consensus::{BlockCheckParams, BlockImport, BlockImportParams, ImportResult}; use sp_blockchain::Result as ClientResult; use sp_consensus::error::Error as ConsensusError; use sp_core::traits::SpawnEssentialNamed; use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; use std::marker::PhantomData; use substrate_prometheus_endpoint::Registry; /// Domain specific block import. /// We use it to wrap Custom Block import the Core domain would need and share it between tasks. pub struct DomainBlockImport<I> { inner: Mutex<I>, } impl<I> DomainBlockImport<I> { /// Create a new instance. pub fn new(inner: I) -> Self { Self { inner: Mutex::new(inner), } } } #[async_trait::async_trait] impl<Block, I> BlockImport<Block> for &DomainBlockImport<I> where Block: BlockT, I: BlockImport<Block> + Send, { type Error = I::Error; type Transaction = I::Transaction; async fn check_block( &mut self, block: BlockCheckParams<Block>, ) -> Result<ImportResult, Self::Error> { let mut inner = self.inner.lock().await; inner.check_block(block).await } async fn import_block( &mut self, block_import_params: BlockImportParams<Block, Self::Transaction>, ) -> Result<ImportResult, Self::Error> { let mut inner = self.inner.lock().await; inner.import_block(block_import_params).await } } /// A verifier that just checks the inherents. pub struct Verifier<Block> { _marker: PhantomData<Block>, } impl<Block> Default for Verifier<Block> { /// Create a new instance. #[inline] fn default() -> Self { Self { _marker: PhantomData, } } } #[async_trait::async_trait] impl<Block> VerifierT<Block> for Verifier<Block> where Block: BlockT, { async fn verify( &mut self, mut block_params: BlockImportParams<Block, ()>, ) -> Result<BlockImportParams<Block, ()>, String> { block_params.post_hash = Some(block_params.header.hash()); Ok(block_params) } } /// Start an import queue for a Cumulus collator that does not uses any special authoring logic. #[allow(clippy::type_complexity)] pub fn import_queue<Block: BlockT, I>( block_import: I, spawner: &impl SpawnEssentialNamed, registry: Option<&Registry>, ) -> ClientResult<BasicQueue<Block, I::Transaction>> where I: BlockImport<Block, Error = ConsensusError> + Send + Sync + 'static, I::Transaction: Send, { Ok(BasicQueue::new( Verifier::default(), Box::new(block_import), None, spawner, registry, )) }
/* * Copyright Stalwart Labs Ltd. See the COPYING * file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or * https://www.apache.org/licenses/LICENSE-2.0> or the MIT license * <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your * option. This file may not be copied, modified, or distributed * except according to those terms. */ use crate::{ client::Client, core::{ changes::{ChangesRequest, ChangesResponse}, get::GetRequest, query::{Comparator, Filter, QueryRequest, QueryResponse}, query_changes::{QueryChangesRequest, QueryChangesResponse}, request::{Arguments, Request}, response::{PrincipalGetResponse, PrincipalSetResponse}, set::{SetObject, SetRequest}, }, Get, Method, Set, }; use super::{Principal, Property, Type, DKIM}; impl Client { #[maybe_async::maybe_async] pub async fn individual_create( &self, email: impl Into<String>, secret: impl Into<String>, name: impl Into<String>, ) -> crate::Result<Principal> { let mut request = self.build(); let id = request .set_principal() .create() .name(name) .secret(secret) .email(email) .ptype(Type::Individual) .create_id() .unwrap(); request .send_single::<PrincipalSetResponse>() .await? .created(&id) } #[maybe_async::maybe_async] pub async fn domain_create(&self, name: impl Into<String>) -> crate::Result<Principal> { let mut request = self.build(); let id = request .set_principal() .create() .name(name) .ptype(Type::Domain) .create_id() .unwrap(); request .send_single::<PrincipalSetResponse>() .await? .created(&id) } #[maybe_async::maybe_async] pub async fn domain_enable_dkim( &self, id: &str, key: impl Into<String>, selector: impl Into<String>, expiration: Option<i64>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request .set_principal() .update(id) .secret(key) .dkim(DKIM::new(Some(selector), expiration)); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn list_create( &self, email: impl Into<String>, name: impl Into<String>, members: impl IntoIterator<Item = impl Into<String>>, ) -> crate::Result<Principal> { let mut request = self.build(); let id = request .set_principal() .create() .name(name) .email(email) .ptype(Type::List) .members(members.into()) .create_id() .unwrap(); request .send_single::<PrincipalSetResponse>() .await? .created(&id) } #[maybe_async::maybe_async] pub async fn group_create( &self, email: impl Into<String>, name: impl Into<String>, members: impl IntoIterator<Item = impl Into<String>>, ) -> crate::Result<Principal> { let mut request = self.build(); let id = request .set_principal() .create() .name(name) .email(email) .ptype(Type::Group) .members(members.into()) .create_id() .unwrap(); request .send_single::<PrincipalSetResponse>() .await? .created(&id) } #[maybe_async::maybe_async] pub async fn principal_set_name( &self, id: &str, name: impl Into<String>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request.set_principal().update(id).name(name); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn principal_set_secret( &self, id: &str, secret: impl Into<String>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request.set_principal().update(id).secret(secret); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn principal_set_email( &self, id: &str, email: impl Into<String>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request.set_principal().update(id).email(email); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn principal_set_timezone( &self, id: &str, timezone: Option<impl Into<String>>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request.set_principal().update(id).timezone(timezone); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn principal_set_members( &self, id: &str, members: Option<impl IntoIterator<Item = impl Into<String>>>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request.set_principal().update(id).members(members); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn principal_set_aliases( &self, id: &str, aliases: Option<impl IntoIterator<Item = impl Into<String>>>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request.set_principal().update(id).aliases(aliases); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn principal_set_capabilities( &self, id: &str, capabilities: Option<impl IntoIterator<Item = impl Into<String>>>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); request .set_principal() .update(id) .capabilities(capabilities); request .send_single::<PrincipalSetResponse>() .await? .updated(id) } #[maybe_async::maybe_async] pub async fn principal_destroy(&self, id: &str) -> crate::Result<()> { let mut request = self.build(); request.set_principal().destroy([id]).arguments(); request .send_single::<PrincipalSetResponse>() .await? .destroyed(id) } #[maybe_async::maybe_async] pub async fn principal_get( &self, id: &str, properties: Option<impl IntoIterator<Item = Property>>, ) -> crate::Result<Option<Principal>> { let mut request = self.build(); let get_request = request.get_principal().ids([id]); if let Some(properties) = properties { get_request.properties(properties.into_iter()); } request .send_single::<PrincipalGetResponse>() .await .map(|mut r| r.take_list().pop()) } #[maybe_async::maybe_async] pub async fn principal_query( &self, filter: Option<impl Into<Filter<super::query::Filter>>>, sort: Option<impl IntoIterator<Item = Comparator<super::query::Comparator>>>, ) -> crate::Result<QueryResponse> { let mut request = self.build(); let query_request = request.query_principal(); if let Some(filter) = filter { query_request.filter(filter); } if let Some(sort) = sort { query_request.sort(sort.into_iter()); } request.send_single::<QueryResponse>().await } #[maybe_async::maybe_async] pub async fn principal_changes( &self, since_state: impl Into<String>, max_changes: usize, ) -> crate::Result<ChangesResponse<Principal<Get>>> { let mut request = self.build(); request .changes_principal(since_state) .max_changes(max_changes); request.send_single().await } } impl Request<'_> { pub fn get_principal(&mut self) -> &mut GetRequest<Principal<Set>> { self.add_method_call( Method::GetPrincipal, Arguments::principal_get(self.params(Method::GetPrincipal)), ) .principal_get_mut() } #[maybe_async::maybe_async] pub async fn send_get_principal(self) -> crate::Result<PrincipalGetResponse> { self.send_single().await } pub fn changes_principal(&mut self, since_state: impl Into<String>) -> &mut ChangesRequest { self.add_method_call( Method::ChangesPrincipal, Arguments::changes(self.params(Method::ChangesPrincipal), since_state.into()), ) .changes_mut() } #[maybe_async::maybe_async] pub async fn send_changes_principal(self) -> crate::Result<ChangesResponse<Principal<Get>>> { self.send_single().await } pub fn query_principal(&mut self) -> &mut QueryRequest<Principal<Set>> { self.add_method_call( Method::QueryPrincipal, Arguments::principal_query(self.params(Method::QueryPrincipal)), ) .principal_query_mut() } #[maybe_async::maybe_async] pub async fn send_query_principal(self) -> crate::Result<QueryResponse> { self.send_single().await } pub fn query_principal_changes( &mut self, since_query_state: impl Into<String>, ) -> &mut QueryChangesRequest<Principal<Set>> { self.add_method_call( Method::QueryChangesPrincipal, Arguments::principal_query_changes( self.params(Method::QueryChangesPrincipal), since_query_state.into(), ), ) .principal_query_changes_mut() } #[maybe_async::maybe_async] pub async fn send_query_principal_changes(self) -> crate::Result<QueryChangesResponse> { self.send_single().await } pub fn set_principal(&mut self) -> &mut SetRequest<Principal<Set>> { self.add_method_call( Method::SetPrincipal, Arguments::principal_set(self.params(Method::SetPrincipal)), ) .principal_set_mut() } #[maybe_async::maybe_async] pub async fn send_set_principal(self) -> crate::Result<PrincipalSetResponse> { self.send_single().await } }
use crate::storage::error::Error; use serde_derive::Serialize; use std::convert::Infallible; use warp::http::StatusCode; use warp::reject::{Reject, Rejection}; #[derive(Debug)] struct NotFound { series: String, } impl Reject for NotFound {} pub fn not_found<S: AsRef<str>>(series: S) -> Rejection { warp::reject::custom(NotFound { series: series.as_ref().to_owned(), }) } #[derive(Debug)] struct BadRequest { reason: String, } impl Reject for BadRequest {} pub fn bad_request<S: AsRef<str>>(reason: S) -> Rejection { warp::reject::custom(BadRequest { reason: reason.as_ref().to_owned(), }) } #[derive(Debug)] struct InternalError { error: Error, } impl Reject for InternalError {} pub fn internal(error: Error) -> Rejection { warp::reject::custom(InternalError { error: error }) } #[derive(Debug)] struct Conflict { series: String, } impl Reject for Conflict {} pub fn conflict<S: AsRef<str>>(series: S) -> Rejection { warp::reject::custom(Conflict { series: series.as_ref().to_owned(), }) } #[derive(Serialize)] struct ErrorMessage { code: u16, message: String, } pub async fn handle(err: Rejection) -> Result<impl warp::Reply, Infallible> { let code; let message; if let Some(not_found) = err.find::<NotFound>() { code = StatusCode::NOT_FOUND; message = format!("series '{}' not found", not_found.series); } else if let Some(internal) = err.find::<InternalError>() { code = StatusCode::INTERNAL_SERVER_ERROR; message = format!("internal error: {}", internal.error); } else if let Some(bad_request) = err.find::<BadRequest>() { code = StatusCode::BAD_REQUEST; message = format!("{}", bad_request.reason); } else if let Some(conflict) = err.find::<Conflict>() { code = StatusCode::CONFLICT; message = format!("'{}' already exists", conflict.series); } else if let Some(_) = err.find::<warp::filters::body::BodyDeserializeError>() { message = "invalid json body".to_owned(); code = StatusCode::BAD_REQUEST; } else { code = StatusCode::INTERNAL_SERVER_ERROR; message = "unhandled rejection".to_string(); } let json = warp::reply::json(&ErrorMessage { code: code.as_u16(), message: message.into(), }); Ok(warp::reply::with_status(json, code)) }
pub type Error = Box<dyn std::error::Error>; pub use serde::{Serialize,Deserialize}; pub use bincode::{serialize_into,deserialize_from,serialize,deserialize}; pub use math_lib::vec3::*; pub fn random() -> usize { rand::random() }
// This file is part of woff2-sys. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/woff2-sys/master/COPYRIGHT. No part of woff2-sys, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2017 The developers of woff2-sys. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/woff2-sys/master/COPYRIGHT. #![allow(non_upper_case_globals)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #[macro_use] extern crate cpp; use ::std::mem::uninitialized; cpp! {{ #include "woff2_enc.h" using std::string; using woff2::MaxWOFF2CompressedSize; using woff2::ConvertTTFToWOFF2; using woff2::WOFF2Params; }} #[link(name = "brotli")] extern "C" { } /// brotliQuality should normally be 11, allowTransforms should normally be true pub fn convertTtfToWoff2(ttfFontBytes: &[u8], additionalExtendedMetadataBytes: &[u8], brotliQuality: u8, allowTransforms: bool) -> Result<Vec<u8>, ()> { debug_assert!(brotliQuality < 12, "brotliQuality should be between 0 and 11 inclusive"); let capacity = MaxWOFF2CompressedSize(ttfFontBytes.len(), additionalExtendedMetadataBytes.len()); let mut woffFontBytes = Vec::with_capacity(capacity); let mut woffFontBytesLength = unsafe { uninitialized() }; let success = ConvertTTFToWOFF2(ttfFontBytes.as_ptr(), ttfFontBytes.len(), woffFontBytes.as_mut_ptr(), &mut woffFontBytesLength, additionalExtendedMetadataBytes.as_ptr(), additionalExtendedMetadataBytes.len(), brotliQuality as i32, allowTransforms); if success { unsafe { woffFontBytes.set_len(woffFontBytesLength) }; woffFontBytes.shrink_to_fit(); Ok(woffFontBytes) } else { Err(()) } } #[inline(always)] fn MaxWOFF2CompressedSize(length: usize, extended_metadata_length: usize) -> usize { length + 1024 + extended_metadata_length // cpp!([data as "uint8_t *", length as "size_t", extended_metadata as "const char *", extended_metadata_length as "size_t"] -> usize as "size_t" // { // string copyOfExtendedMetadata(extended_metadata, extended_metadata_length); // // return MaxWOFF2CompressedSize(data, length, &copyOfExtendedMetadata); // }) } fn ConvertTTFToWOFF2(data: *const u8, length: usize, result: *mut u8, result_length: *mut usize, extended_metadata: *const u8, extended_metadata_length: usize, brotli_quality: i32, allow_transforms: bool) -> bool { unsafe { cpp!([data as "const uint8_t *", length as "size_t", result as "uint8_t *", result_length as "size_t *", extended_metadata as "const char *", extended_metadata_length as "size_t", brotli_quality as "int", allow_transforms as "bool"] -> bool as "bool" { string copyOfExtendedMetadata(extended_metadata, extended_metadata_length); struct WOFF2Params params; params.extended_metadata = copyOfExtendedMetadata; params.brotli_quality = brotli_quality; params.allow_transforms = allow_transforms; return ConvertTTFToWOFF2(data, length, result, result_length, params); }) } }
extern crate kafka; use kafka::client::KafkaClient; /// This program demonstrates the low level api for fetching messages. /// Please look at examles/consume.rs for an easier to use API. fn main() { let broker = "localhost:9092"; let topic = "my-topic"; let partition = 0; let offset = 0; println!("About to fetch messages at {} from: {} (partition {}, offset {}) ", broker, topic, partition, offset); let mut client = KafkaClient::new(vec!(broker.to_owned())); if let Err(e) = client.load_metadata_all() { println!("Failed to load meta data from {}: {}", broker, e); return; } // ~ make sure to print out a warning message when the target // topic does not yet exist if !client.topic_partitions.contains_key(topic) { println!("No such topic at {}: {}", broker, topic); return; } match client.fetch_messages(topic.to_owned(), partition, offset) { Err(e) => { println!("Failed to fetch messages: {}", e); } Ok(msgs) => { for msg in msgs { println!("{:?}", msg); } println!("No more messages."); } } }
use std::collections::HashSet; use std::io::{self, Read}; type Result<T> = ::std::result::Result<T, Box<::std::error::Error>>; fn units_react(x: u8, y: u8) -> bool { x.to_ascii_uppercase() == y.to_ascii_uppercase() && x.is_ascii_uppercase() != y.is_ascii_uppercase() } fn react_polymer<T>(polymer: T) -> Result<Vec<u8>> where T: Iterator<Item = ::std::result::Result<u8, ::std::io::Error>>, { let mut reacted = Vec::new(); for unit in polymer { reacted.push(unit?); while reacted.len() >= 2 && units_react(reacted[reacted.len() - 1], reacted[reacted.len() - 2]) { reacted.truncate(reacted.len() - 2); } } Ok(reacted) } #[test] fn test_react_polymer() -> Result<()> { assert_eq!(react_polymer("foo".as_bytes().bytes())?, "foo".as_bytes()); assert_eq!(react_polymer("foO".as_bytes().bytes())?, "f".as_bytes()); assert_eq!(react_polymer("foOFoo".as_bytes().bytes())?, "oo".as_bytes()); assert_eq!( react_polymer("dabAcCaCBAcCcaDA".as_bytes().bytes())?, "dabCBAcaDA".as_bytes() ); Ok(()) } fn remove_unit(polymer: &Vec<u8>, unit: u8) -> Vec<u8> { polymer .into_iter() .map(|x| *x) .filter(|u| u.to_ascii_uppercase() != unit) .collect() } fn main() -> Result<()> { let mut polymer = react_polymer(io::stdin().lock().bytes())?; while polymer.last() == Some(&b'\n') { polymer.pop(); } println!("{}", polymer.len()); let mut units = HashSet::new(); for unit in &polymer { units.insert(unit.to_ascii_uppercase()); } let minimized_length = units .into_iter() .map(|unit| { react_polymer(remove_unit(&polymer, unit).bytes()) .unwrap() .len() }) .min() .unwrap(); println!("{}", minimized_length); Ok(()) }
/*======================================= * @FileName: 4寻找两个正序数组的中位数.rs * @Description: * @Author: TonyLaw * @Date: 2021-10-11 00:06:58 Monday * @Copyright: © 2021 TonyLaw. All Rights Reserved. =========================================*/ /*======================================= (题目难度:困难) 给定两个大小分别为 m 和 n 的正序(从小到大)数组 nums1 和 nums2。请你找出并返回这两个正序数组的 中位数 。   示例 1: 输入:nums1 = [1,3], nums2 = [2] 输出:2.00000 解释:合并数组 = [1,2,3] ,中位数 2 示例 2: 输入:nums1 = [1,2], nums2 = [3,4] 输出:2.50000 解释:合并数组 = [1,2,3,4] ,中位数 (2 + 3) / 2 = 2.5 示例 3: 输入:nums1 = [0,0], nums2 = [0,0] 输出:0.00000 示例 4: 输入:nums1 = [], nums2 = [1] 输出:1.00000 示例 5: 输入:nums1 = [2], nums2 = [] 输出:2.00000   提示: nums1.length == m nums2.length == n 0 <= m <= 1000 0 <= n <= 1000 1 <= m + n <= 2000 -106 <= nums1[i], nums2[i] <= 106 =========================================*/ struct Solution; use std::cmp::min; impl Solution { pub fn find_median_sorted_arrays(nums1: Vec<i32>, nums2: Vec<i32>) -> f64 { let min_left = (nums1.len() + nums2.len() + 1) / 2; let min_right = (nums1.len() + nums2.len() + 2) / 2; (find_k(&nums1, 0, &nums2, 0, min_left) + find_k(&nums1, 0, &nums2, 0, min_right)) as f64 / 2.0 } } fn find_k(v1: &Vec<i32>, i: usize, v2: &Vec<i32>, j: usize, k: usize) -> i32 { // 当nums1 删除完, 则直接返回j + k - 1位置的数字,nums2 删完同理 if i >= v1.len() { return v2[j + k - 1]; } if j >= v2.len() { return v1[i + k - 1]; } // 当k==1 时表示找最小的数字 if k == 1 { return min(v1[i], v2[j]); } let max1 = if (i + k / 2 - 1) < v1.len() {v1[i + k/2 -1]} else {i32::MAX}; let max2 = if (j + k/2 -1) < v2.len() {v2[j + k/2 -1]} else {i32::MAX}; return if max1 > max2 { find_k(v1, i, v2, j + k/2, k - k/2) } else { find_k(v1, i + k/2, v2, j, k - k/2) }; } fn main() { let l1 = vec![1, 3]; let l2 = vec![2]; let result = Solution::find_median_sorted_arrays(l1, l2); println!("{:?}", result); }
extern crate reqwest; extern crate semver; extern crate serde; use clap::{App, AppSettings, SubCommand}; mod mirror; mod remote; mod version; fn main() { let matches = App::new("arc") .version(env!("CARGO_PKG_VERSION")) .about("Node version manager") .setting(AppSettings::SubcommandRequiredElseHelp) .subcommand(SubCommand::with_name("ls-remote")) .get_matches(); match matches.subcommand() { ("ls-remote", Some(_)) => remote::ls(), _ => unreachable!(), } }
use std::io::BufReader; use std::io::prelude::*; use std::fs::File; use std::str::FromStr; trait Node { fn parse(&mut self, context: &mut Context); } struct ProgramNode {} impl ProgramNode { fn new() -> ProgramNode { ProgramNode {} } } impl Node for ProgramNode { fn parse(&mut self, context: &mut Context) { context.skip_token("program".to_string()); let mut command_list_node = CommandListNode::new(); print!("node = [program"); command_list_node.parse(context); println!("]"); } } struct CommandListNode {} impl CommandListNode { fn new() -> CommandListNode { CommandListNode {} } } impl Node for CommandListNode { fn parse(&mut self, context: &mut Context) { print!(" ["); loop { let current_token = context.current_token(); if current_token == "".to_string() { panic!(format!("Missing 'end'")); } else if current_token == "end".to_string() { context.skip_token("end".to_string()); break; } else { let mut command_node = CommandNode::new(); command_node.parse(context); print!(", "); } } print!("]"); } } struct CommandNode {} impl CommandNode { fn new() -> CommandNode { CommandNode {} } } impl Node for CommandNode { fn parse(&mut self, context: &mut Context) { if context.current_token() == "repeat".to_string() { let mut node = RepeatCommandNode::new(); print!("["); node.parse(context); print!("]"); } else { let mut node = PrimitiveCommandNode::new(); node.parse(context); } } } struct RepeatCommandNode { number: u32, } impl RepeatCommandNode { fn new() -> RepeatCommandNode { RepeatCommandNode { number: 0, } } } impl Node for RepeatCommandNode { fn parse(&mut self, context: &mut Context) { context.skip_token("repeat".to_string()); self.number = context.current_number(); context.next_token(); let mut command_list_node = CommandListNode::new(); print!("repeat {}", self.number); command_list_node.parse(context); } } struct PrimitiveCommandNode { name: String, } impl PrimitiveCommandNode { fn new() -> PrimitiveCommandNode { PrimitiveCommandNode { name: "".to_string(), } } } impl Node for PrimitiveCommandNode { fn parse(&mut self, context: &mut Context) { self.name = context.current_token(); context.skip_token(self.name.clone()); print!("{}", self.name); if self.name != "go".to_string() && self.name != "right".to_string() && self.name != "left".to_string() { panic!(format!("{} is undefined", self.name)); } } } struct Context { tokens: Vec<String>, current_token: String, } impl Context { fn new(text: String) -> Context { let v: Vec<String> = text.split(" ").map(|s| s.to_string()).collect(); let tokens: Vec<String> = v.into_iter().map(|s| s).rev().collect(); let mut c = Context { tokens: tokens, current_token: "".to_string(), }; c.next_token(); c } fn next_token(&mut self) -> String { self.current_token = match self.tokens.pop() { Some(t) => t, None => "".to_string(), }; self.current_token.clone() } fn current_token(&self) -> String { self.current_token.clone() } fn skip_token(&mut self, token: String) { if token != self.current_token { panic!(format!("Warning: {} is expected, but {} is found.", token, self.current_token)); } self.next_token(); } fn current_number(&self) -> u32 { u32::from_str(self.current_token.clone().as_str()).unwrap() } } fn main() { let file = match File::open("program.txt") { Ok(f) => BufReader::new(f), Err(e) => panic!(e), }; for line in file.lines() { let text = line.unwrap(); let mut node = ProgramNode::new(); let mut context = Context::new(text.clone()); println!("text = \"{}\"", text); node.parse(&mut context); } }
use std::fmt; #[derive(Debug, PartialEq)] pub enum Error { ConnectionTerminated, CommandFailed, NoValue, BodyDeleted } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::ConnectionTerminated => write!(f, "ConnectionTerminated"), Error::CommandFailed => write!(f, "Command failed, real error probably in bullet's log"), Error::NoValue => write!(f, "No such value"), Error::BodyDeleted => write!(f, "Trying to use deleted body"), } } }