text
stringlengths
8
4.13M
fn main() { let mut count = 1; while count <= 100 { if count % 3 == 0 { print("fizz"); } if count % 5 == 0 { print("buzz"); } if count % 3 != 0 && count % 5 != 0 { print!("{}", count); } print("\n"); count += 1; } }
#[doc = "Register `I2C_IPIDR` reader"] pub type R = crate::R<I2C_IPIDR_SPEC>; #[doc = "Field `ID` reader - ID"] pub type ID_R = crate::FieldReader<u32>; impl R { #[doc = "Bits 0:31 - ID"] #[inline(always)] pub fn id(&self) -> ID_R { ID_R::new(self.bits) } } #[doc = "I2C identification register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`i2c_ipidr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct I2C_IPIDR_SPEC; impl crate::RegisterSpec for I2C_IPIDR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`i2c_ipidr::R`](R) reader structure"] impl crate::Readable for I2C_IPIDR_SPEC {} #[doc = "`reset()` method sets I2C_IPIDR to value 0x0013_0012"] impl crate::Resettable for I2C_IPIDR_SPEC { const RESET_VALUE: Self::Ux = 0x0013_0012; }
#[macro_export] macro_rules! bind_params_sqlx_postgres { ( $query:expr, $params:expr ) => {{ let mut query = $query; for value in $params.iter() { query = match value { Value::Null => query.bind(None::<bool>), Value::Bool(v) => query.bind(v), Value::TinyInt(v) => query.bind(v), Value::SmallInt(v) => query.bind(v), Value::Int(v) => query.bind(v), Value::BigInt(v) => query.bind(v), Value::TinyUnsigned(v) => query.bind(*v as u32), Value::SmallUnsigned(v) => query.bind(*v as u32), Value::Unsigned(v) => query.bind(v), Value::BigUnsigned(v) => query.bind(*v as i64), Value::Float(v) => query.bind(v), Value::Double(v) => query.bind(v), Value::String(v) => query.bind(v.as_str()), Value::Bytes(v) => query.bind(v.as_ref()), _ => { if value.is_json() { query.bind(value.as_ref_json()) } else if value.is_date_time() { query.bind(value.as_ref_date_time()) } else if value.is_uuid() { query.bind(value.as_ref_uuid()) } else { unimplemented!(); } } }; } query }}; } #[macro_export] macro_rules! sea_query_driver_postgres { () => { mod sea_query_driver_postgres { use sqlx::{postgres::PgArguments, query::Query, query::QueryAs, Postgres}; use $crate::{Value, Values}; type SqlxQuery<'a> = sqlx::query::Query<'a, Postgres, PgArguments>; type SqlxQueryAs<'a, T> = sqlx::query::QueryAs<'a, Postgres, T, PgArguments>; pub fn bind_query<'a>(query: SqlxQuery<'a>, params: &'a Values) -> SqlxQuery<'a> { $crate::bind_params_sqlx_postgres!(query, params.0) } pub fn bind_query_as<'a, T>( query: SqlxQueryAs<'a, T>, params: &'a Values, ) -> SqlxQueryAs<'a, T> { $crate::bind_params_sqlx_postgres!(query, params.0) } } }; }
use crate::utils::Side; use amethyst::{ assets::PrefabData, derive::PrefabData, ecs::{Component, DenseVecStorage, Entity, WriteStorage}, Error, }; use serde::{Deserialize, Serialize}; #[derive(Clone, Copy, Debug, Serialize, Deserialize, PrefabData)] #[prefab(Component)] pub struct Net { pub side: Side, } impl Component for Net { type Storage = DenseVecStorage<Self>; }
use fancy_regex::Regex; use nu_ansi_term::{ Color::{Default, Red, White}, Style, }; use nu_color_config::get_color_config; use nu_engine::{get_full_help, CallExt}; use nu_protocol::{ ast::Call, engine::{Command, EngineState, Stack}, span, Category, Example, IntoInterruptiblePipelineData, IntoPipelineData, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Value, }; use std::borrow::Borrow; #[derive(Clone)] pub struct Help; impl Command for Help { fn name(&self) -> &str { "help" } fn signature(&self) -> Signature { Signature::build("help") .rest( "rest", SyntaxShape::String, "the name of command to get help on", ) .named( "find", SyntaxShape::String, "string to find in command names, usage, and search terms", Some('f'), ) .category(Category::Core) } fn usage(&self) -> &str { "Display help information about commands." } fn run( &self, engine_state: &EngineState, stack: &mut Stack, call: &Call, _input: PipelineData, ) -> Result<PipelineData, ShellError> { help(engine_state, stack, call) } fn examples(&self) -> Vec<Example> { vec![ Example { description: "show all commands and sub-commands", example: "help commands", result: None, }, Example { description: "show help for single command", example: "help match", result: None, }, Example { description: "show help for single sub-command", example: "help str lpad", result: None, }, Example { description: "search for string in command names, usage and search terms", example: "help --find char", result: None, }, ] } } fn help( engine_state: &EngineState, stack: &mut Stack, call: &Call, ) -> Result<PipelineData, ShellError> { let head = call.head; let find: Option<Spanned<String>> = call.get_flag(engine_state, stack, "find")?; let rest: Vec<Spanned<String>> = call.rest(engine_state, stack, 0)?; let commands = engine_state.get_decl_ids_sorted(false); let config = engine_state.get_config(); let color_hm = get_color_config(config); let default_style = Style::new().fg(Default).on(Default); let string_style = match color_hm.get("string") { Some(style) => style, None => &default_style, }; if let Some(f) = find { let org_search_string = f.item.clone(); let search_string = f.item.to_lowercase(); let mut found_cmds_vec = Vec::new(); for decl_id in commands { let mut cols = vec![]; let mut vals = vec![]; let decl = engine_state.get_decl(decl_id); let sig = decl.signature().update_from_command(decl.borrow()); let key = sig.name; let usage = sig.usage; let search_terms = sig.search_terms; let matches_term = if !search_terms.is_empty() { search_terms .iter() .any(|term| term.to_lowercase().contains(&search_string)) } else { false }; let key_match = key.to_lowercase().contains(&search_string); let usage_match = usage.to_lowercase().contains(&search_string); if key_match || usage_match || matches_term { cols.push("name".into()); vals.push(Value::String { val: if key_match { highlight_search_string(&key, &org_search_string, string_style)? } else { key }, span: head, }); cols.push("category".into()); vals.push(Value::String { val: sig.category.to_string(), span: head, }); cols.push("is_plugin".into()); vals.push(Value::Bool { val: decl.is_plugin().is_some(), span: head, }); cols.push("is_custom".into()); vals.push(Value::Bool { val: decl.is_custom_command(), span: head, }); cols.push("is_keyword".into()); vals.push(Value::Bool { val: decl.is_parser_keyword(), span: head, }); cols.push("usage".into()); vals.push(Value::String { val: if usage_match { highlight_search_string(&usage, &org_search_string, string_style)? } else { usage }, span: head, }); cols.push("search_terms".into()); vals.push(if search_terms.is_empty() { Value::nothing(head) } else { Value::String { val: if matches_term { search_terms .iter() .map(|term| { if term.to_lowercase().contains(&search_string) { match highlight_search_string( term, &org_search_string, string_style, ) { Ok(s) => s, Err(_) => { string_style.paint(term.to_string()).to_string() } } } else { string_style.paint(term.to_string()).to_string() } }) .collect::<Vec<_>>() .join(", ") } else { search_terms.join(", ") }, span: head, } }); found_cmds_vec.push(Value::Record { cols, vals, span: head, }); } } return Ok(found_cmds_vec .into_iter() .into_pipeline_data(engine_state.ctrlc.clone())); } if !rest.is_empty() { let mut found_cmds_vec = Vec::new(); if rest[0].item == "commands" { for decl_id in commands { let mut cols = vec![]; let mut vals = vec![]; let decl = engine_state.get_decl(decl_id); let sig = decl.signature().update_from_command(decl.borrow()); let key = sig.name; let usage = sig.usage; let search_terms = sig.search_terms; cols.push("name".into()); vals.push(Value::String { val: key, span: head, }); cols.push("category".into()); vals.push(Value::String { val: sig.category.to_string(), span: head, }); cols.push("is_plugin".into()); vals.push(Value::Bool { val: decl.is_plugin().is_some(), span: head, }); cols.push("is_custom".into()); vals.push(Value::Bool { val: decl.is_custom_command(), span: head, }); cols.push("is_keyword".into()); vals.push(Value::Bool { val: decl.is_parser_keyword(), span: head, }); cols.push("usage".into()); vals.push(Value::String { val: usage, span: head, }); cols.push("search_terms".into()); vals.push(if search_terms.is_empty() { Value::nothing(head) } else { Value::String { val: search_terms.join(", "), span: head, } }); found_cmds_vec.push(Value::Record { cols, vals, span: head, }); } Ok(found_cmds_vec .into_iter() .into_pipeline_data(engine_state.ctrlc.clone())) } else { let mut name = String::new(); for r in &rest { if !name.is_empty() { name.push(' '); } name.push_str(&r.item); } let output = engine_state .get_signatures_with_examples(false) .iter() .filter(|(signature, _, _, _)| signature.name == name) .map(|(signature, examples, _, _)| { get_full_help(signature, examples, engine_state, stack) }) .collect::<Vec<String>>(); if !output.is_empty() { Ok(Value::String { val: output.join("======================\n\n"), span: call.head, } .into_pipeline_data()) } else { Err(ShellError::CommandNotFound(span(&[ rest[0].span, rest[rest.len() - 1].span, ]))) } } } else { let msg = r#"Welcome to Nushell. Here are some tips to help you get started. * help commands - list all available commands * help <command name> - display help about a particular command * help --find <text to search> - search through all of help Nushell works on the idea of a "pipeline". Pipelines are commands connected with the '|' character. Each stage in the pipeline works together to load, parse, and display information to you. [Examples] List the files in the current directory, sorted by size: ls | sort-by size Get information about the current system: sys | get host Get the processes on your system actively using CPU: ps | where cpu > 0 You can also learn more at https://www.nushell.sh/book/"#; Ok(Value::String { val: msg.into(), span: head, } .into_pipeline_data()) } } // Highlight the search string using ANSI escape sequences and regular expressions. pub fn highlight_search_string( haystack: &str, needle: &str, string_style: &Style, ) -> Result<String, ShellError> { let regex_string = format!("(?i){}", needle); let regex = match Regex::new(&regex_string) { Ok(regex) => regex, Err(err) => { return Err(ShellError::GenericError( "Could not compile regex".into(), err.to_string(), Some(Span::test_data()), None, Vec::new(), )); } }; let mut last_match_end = 0; let style = Style::new().fg(White).on(Red); let mut highlighted = String::new(); for cap in regex.captures_iter(haystack) { match cap { Ok(capture) => { let start = match capture.get(0) { Some(acap) => acap.start(), None => 0, }; let end = match capture.get(0) { Some(acap) => acap.end(), None => 0, }; highlighted.push_str( &string_style .paint(&haystack[last_match_end..start]) .to_string(), ); highlighted.push_str(&style.paint(&haystack[start..end]).to_string()); last_match_end = end; } Err(e) => { return Err(ShellError::GenericError( "Error with regular expression capture".into(), e.to_string(), None, None, Vec::new(), )); } } } highlighted.push_str(&string_style.paint(&haystack[last_match_end..]).to_string()); Ok(highlighted) }
#![allow(clippy::needless_range_loop)] extern crate bellman_ce; extern crate rand; use std::str; use std::fs::{self, OpenOptions, File}; use std::io::{BufReader, Read, Seek}; use std::collections::BTreeMap; use std::iter::repeat; use std::sync::Arc; use itertools::Itertools; use rand::{Rng, OsRng}; use bellman_ce::{ Circuit, SynthesisError, Variable, Index, ConstraintSystem, LinearCombination, source::QueryDensity, groth16::{ Parameters, Proof, generate_random_parameters as generate_random_parameters2, prepare_verifying_key, create_random_proof, verify_proof, prepare_prover, }, pairing::{ Engine, CurveAffine, ff::PrimeField, ff::ScalarEngine, bn256::{ Bn256, Fq, Fq2, G1Affine, G2Affine, } } }; use crate::utils::{ repr_to_big, proof_to_hex, p1_to_vec, p2_to_vec, pairing_to_vec, }; #[derive(Serialize, Deserialize)] struct CircuitJson { pub constraints: Vec<Vec<BTreeMap<String, String>>>, #[serde(rename = "nPubInputs")] pub num_inputs: usize, #[serde(rename = "nOutputs")] pub num_outputs: usize, #[serde(rename = "nVars")] pub num_variables: usize, } #[derive(Serialize, Deserialize)] struct ProofJson { pub protocol: String, pub proof: Option<String>, pub pi_a: Vec<String>, pub pi_b: Vec<Vec<String>>, pub pi_c: Vec<String>, } #[derive(Serialize, Deserialize)] struct ProvingKeyJson { #[serde(rename = "polsA")] pub pols_a: Vec<BTreeMap<String, String>>, #[serde(rename = "polsB")] pub pols_b: Vec<BTreeMap<String, String>>, #[serde(rename = "polsC")] pub pols_c: Vec<BTreeMap<String, String>>, #[serde(rename = "A")] pub a: Vec<Vec<String>>, #[serde(rename = "B1")] pub b1: Vec<Vec<String>>, #[serde(rename = "B2")] pub b2: Vec<Vec<Vec<String>>>, #[serde(rename = "C")] pub c: Vec<Option<Vec<String>>>, pub vk_alfa_1: Vec<String>, pub vk_beta_1: Vec<String>, pub vk_delta_1: Vec<String>, pub vk_beta_2: Vec<Vec<String>>, pub vk_delta_2: Vec<Vec<String>>, #[serde(rename = "hExps")] pub h: Vec<Vec<String>>, pub protocol: String, #[serde(rename = "nPublic")] pub n_public: usize, #[serde(rename = "nVars")] pub n_vars: usize, #[serde(rename = "domainBits")] pub domain_bits: usize, #[serde(rename = "domainSize")] pub domain_size: usize, } #[derive(Serialize, Deserialize)] struct VerifyingKeyJson { #[serde(rename = "IC")] pub ic: Vec<Vec<String>>, pub vk_alfa_1: Vec<String>, pub vk_alpha_1: Vec<String>, pub vk_beta_2: Vec<Vec<String>>, pub vk_gamma_2: Vec<Vec<String>>, pub vk_delta_2: Vec<Vec<String>>, pub vk_alfabeta_12: Vec<Vec<Vec<String>>>, pub vk_alphabeta_12: Vec<Vec<Vec<String>>>, pub curve: String, pub protocol: String, #[serde(rename = "nPublic")] pub inputs_count: usize, } pub type Constraint<E> = ( Vec<(usize, <E as ScalarEngine>::Fr)>, Vec<(usize, <E as ScalarEngine>::Fr)>, Vec<(usize, <E as ScalarEngine>::Fr)>, ); #[derive(Clone)] pub struct R1CS<E: Engine> { pub num_inputs: usize, pub num_aux: usize, pub num_variables: usize, pub constraints: Vec<Constraint<E>>, } #[derive(Clone)] pub struct CircomCircuit<E: Engine> { pub r1cs: R1CS<E>, pub witness: Option<Vec<E::Fr>>, pub wire_mapping: Option<Vec<usize>>, // debug symbols } impl<'a, E: Engine> CircomCircuit<E> { pub fn get_public_inputs(&self) -> Option<Vec<E::Fr>> { match &self.witness { None => None, Some(w) => match &self.wire_mapping { None => Some(w[1..self.r1cs.num_inputs].to_vec()), Some(m) => Some(m[1..self.r1cs.num_inputs].iter().map(|i| w[*i]).collect_vec()), } } } pub fn get_public_inputs_json(&self) -> String { let inputs = self.get_public_inputs(); let inputs = match inputs { None => return String::from("[]"), Some(inp) => inp.iter().map(|x| repr_to_big(x.into_repr())).collect_vec(), }; serde_json::to_string_pretty(&inputs).unwrap() } } /// Our demo circuit implements this `Circuit` trait which /// is used during paramgen and proving in order to /// synthesize the constraint system. impl<'a, E: Engine> Circuit<E> for CircomCircuit<E> { //noinspection RsBorrowChecker fn synthesize<CS: ConstraintSystem<E>>( self, cs: &mut CS ) -> Result<(), SynthesisError> { let witness = &self.witness; let wire_mapping = &self.wire_mapping; for i in 1..self.r1cs.num_inputs { cs.alloc_input( || format!("variable {}", i), || { Ok(match witness { None => E::Fr::from_str("1").unwrap(), Some(w) => match wire_mapping { None => w[i], Some(m) => w[m[i]], } }) }, )?; } for i in 0..self.r1cs.num_aux { cs.alloc( || format!("aux {}", i), || { Ok(match witness { None => E::Fr::from_str("1").unwrap(), Some(w) => match wire_mapping { None => w[i + self.r1cs.num_inputs], Some(m) => w[m[i + self.r1cs.num_inputs]], }, }) }, )?; } let make_index = |index| if index < self.r1cs.num_inputs { Index::Input(index) } else { Index::Aux(index - self.r1cs.num_inputs) }; let make_lc = |lc_data: Vec<(usize, E::Fr)>| lc_data.iter().fold( LinearCombination::<E>::zero(), |lc: LinearCombination<E>, (index, coeff)| lc + (*coeff, Variable::new_unchecked(make_index(*index))) ); for (i, constraint) in self.r1cs.constraints.iter().enumerate() { // 0 * LC = 0 must be ignored if !((constraint.0.is_empty() || constraint.1.is_empty()) && constraint.2.is_empty()) { cs.enforce(|| format!("constraint {}", i), |_| make_lc(constraint.0.clone()), |_| make_lc(constraint.1.clone()), |_| make_lc(constraint.2.clone()), ); } } Ok(()) } } pub fn prove<E: Engine, R: Rng>(circuit: CircomCircuit<E>, params: &Parameters<E>, mut rng: R) -> Result<Proof<E>, SynthesisError> { let mut params2 = params.clone(); filter_params(&mut params2); create_random_proof(circuit, &params2, &mut rng) } pub fn generate_random_parameters<E: Engine, R: Rng>(circuit: CircomCircuit<E>, mut rng: R) -> Result<Parameters<E>, SynthesisError> { generate_random_parameters2(circuit, &mut rng) } pub fn verify_circuit<E: Engine>(circuit: &CircomCircuit<E>, params: &Parameters<E>, proof: &Proof<E>) -> Result<bool, SynthesisError> { let inputs = match circuit.get_public_inputs() { None => return Err(SynthesisError::AssignmentMissing), Some(inp) => inp, }; verify_proof(&prepare_verifying_key(&params.vk), proof, &inputs) } pub fn verify<E: Engine>(params: &Parameters<E>, proof: &Proof<E>, inputs: &[E::Fr]) -> Result<bool, SynthesisError> { verify_proof(&prepare_verifying_key(&params.vk), proof, &inputs) } pub fn create_verifier_sol(params: &Parameters<Bn256>) -> String { // TODO: use a simple template engine let bytes = include_bytes!("verifier_groth.sol"); let template = String::from_utf8_lossy(bytes); let p1_to_str = |p: &<Bn256 as Engine>::G1Affine| { if p.is_zero() { // todo: throw instead return String::from("<POINT_AT_INFINITY>"); } let xy = p.into_xy_unchecked(); let x = repr_to_big(xy.0.into_repr()); let y = repr_to_big(xy.1.into_repr()); format!("uint256({}), uint256({})", x, y) }; let p2_to_str = |p: &<Bn256 as Engine>::G2Affine| { if p.is_zero() { // todo: throw instead return String::from("<POINT_AT_INFINITY>"); } let xy = p.into_xy_unchecked(); let x_c0 = repr_to_big(xy.0.c0.into_repr()); let x_c1 = repr_to_big(xy.0.c1.into_repr()); let y_c0 = repr_to_big(xy.1.c0.into_repr()); let y_c1 = repr_to_big(xy.1.c1.into_repr()); format!("[uint256({}), uint256({})], [uint256({}), uint256({})]", x_c1, x_c0, y_c1, y_c0) }; let template = template.replace("<%vk_alfa1%>", &*p1_to_str(&params.vk.alpha_g1)); let template = template.replace("<%vk_beta2%>", &*p2_to_str(&params.vk.beta_g2)); let template = template.replace("<%vk_gamma2%>", &*p2_to_str(&params.vk.gamma_g2)); let template = template.replace("<%vk_delta2%>", &*p2_to_str(&params.vk.delta_g2)); let template = template.replace("<%vk_ic_length%>", &*params.vk.ic.len().to_string()); let template = template.replace("<%vk_input_length%>", &*(params.vk.ic.len() - 1).to_string()); let mut vi = String::from(""); for i in 0..params.vk.ic.len() { vi = format!("{}{}vk.IC[{}] = Pairing.G1Point({});\n", vi, if vi.is_empty() { "" } else { " " }, i, &*p1_to_str(&params.vk.ic[i])); } template.replace("<%vk_ic_pts%>", &*vi) } pub fn create_verifier_sol_file(params: &Parameters<Bn256>, filename: &str) -> std::io::Result<()> { fs::write(filename, create_verifier_sol(params).as_bytes()) } pub fn proof_to_json(proof: &Proof<Bn256>) -> Result<String, serde_json::error::Error> { serde_json::to_string_pretty(&ProofJson { protocol: "groth".to_string(), proof: Some(proof_to_hex(&proof)), pi_a: p1_to_vec(&proof.a), pi_b: p2_to_vec(&proof.b), pi_c: p1_to_vec(&proof.c), }) } pub fn proof_to_json_file(proof: &Proof<Bn256>, filename: &str) -> std::io::Result<()> { let str = proof_to_json(proof).unwrap(); // TODO: proper error handling fs::write(filename, str.as_bytes()) } pub fn load_params_file(filename: &str) -> Parameters<Bn256> { let reader = OpenOptions::new() .read(true) .open(filename) .expect("unable to open."); load_params(reader) } pub fn load_params<R: Read>(reader: R) -> Parameters<Bn256> { Parameters::read(reader, true).expect("unable to read params") } pub fn load_inputs_json_file<E: Engine>(filename: &str) -> Vec<E::Fr> { let reader = OpenOptions::new() .read(true) .open(filename) .expect("unable to open."); load_inputs_json::<E, BufReader<File>>(BufReader::new(reader)) } pub fn load_inputs_json<E: Engine, R: Read>(reader: R) -> Vec<E::Fr> { let inputs: Vec<String> = serde_json::from_reader(reader).unwrap(); inputs.into_iter().map(|x| E::Fr::from_str(&x).unwrap()).collect::<Vec<E::Fr>>() } pub fn load_proof_json_file<E: Engine>(filename: &str) -> Proof<Bn256> { let reader = OpenOptions::new() .read(true) .open(filename) .expect("unable to open."); load_proof_json(BufReader::new(reader)) } pub fn load_proof_json<R: Read>(reader: R) -> Proof<Bn256> { let proof: ProofJson = serde_json::from_reader(reader).unwrap(); Proof { a: G1Affine::from_xy_checked( Fq::from_str(&proof.pi_a[0]).unwrap(), Fq::from_str(&proof.pi_a[1]).unwrap(), ).unwrap(), b: G2Affine::from_xy_checked( Fq2 { c0: Fq::from_str(&proof.pi_b[0][0]).unwrap(), c1: Fq::from_str(&proof.pi_b[0][1]).unwrap(), }, Fq2 { c0: Fq::from_str(&proof.pi_b[1][0]).unwrap(), c1: Fq::from_str(&proof.pi_b[1][1]).unwrap(), }, ).unwrap(), c: G1Affine::from_xy_checked( Fq::from_str(&proof.pi_c[0]).unwrap(), Fq::from_str(&proof.pi_c[1]).unwrap(), ).unwrap(), } } pub fn filter_params<E: Engine>(params: &mut Parameters<E>) { params.vk.ic = params.vk.ic.clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>(); params.h = Arc::new((*params.h).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>()); params.a = Arc::new((*params.a).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>()); params.b_g1 = Arc::new((*params.b_g1).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>()); params.b_g2 = Arc::new((*params.b_g2).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>()); } pub fn proving_key_json(params: &Parameters<Bn256>, circuit: CircomCircuit<Bn256>) -> Result<String, serde_json::error::Error> { let mut pols_a: Vec<BTreeMap<String, String>> = vec![]; let mut pols_b: Vec<BTreeMap<String, String>> = vec![]; let mut pols_c: Vec<BTreeMap<String, String>> = vec![]; for _ in 0..circuit.r1cs.num_aux + circuit.r1cs.num_inputs { pols_a.push(BTreeMap::new()); pols_b.push(BTreeMap::new()); pols_c.push(BTreeMap::new()); } for c in 0..circuit.r1cs.constraints.len() { for item in circuit.r1cs.constraints[c].0.iter() { pols_a[item.0].insert(c.to_string(), repr_to_big(item.1.into_repr())); } for item in circuit.r1cs.constraints[c].1.iter() { pols_b[item.0].insert(c.to_string(), repr_to_big(item.1.into_repr())); } for item in circuit.r1cs.constraints[c].2.iter() { pols_c[item.0].insert(c.to_string(), repr_to_big(item.1.into_repr())); } } for i in 0..circuit.r1cs.num_inputs { pols_a[i].insert((circuit.r1cs.constraints.len() + i).to_string(), String::from("1")); } let domain_bits = log2_floor(circuit.r1cs.constraints.len() + circuit.r1cs.num_inputs) + 1; let n_public = circuit.r1cs.num_inputs - 1; let n_vars = circuit.r1cs.num_variables; let p = prepare_prover(circuit).unwrap().assignment; let mut a_iter = params.a.iter(); let mut b1_iter = params.b_g1.iter(); let mut b2_iter = params.b_g2.iter(); let zero1 = G1Affine::zero(); let zero2 = G2Affine::zero(); let a = repeat(true).take(params.vk.ic.len()) .chain(p.a_aux_density.iter()) .map(|item| if item { a_iter.next().unwrap() } else { &zero1 }) .map(|e| p1_to_vec(e)) .collect_vec(); let b1 = p.b_input_density.iter() .chain(p.b_aux_density.iter()) .map(|item| if item { b1_iter.next().unwrap() } else { &zero1 }) .map(|e| p1_to_vec(e)) .collect_vec(); let b2 = p.b_input_density.iter() .chain(p.b_aux_density.iter()) .map(|item| if item { b2_iter.next().unwrap() } else { &zero2 }) .map(|e| p2_to_vec(e)) .collect_vec(); let c = repeat(None).take(params.vk.ic.len()) .chain(params.l.iter().map(|e| Some(p1_to_vec(e)))) .collect_vec(); let proving_key = ProvingKeyJson { pols_a, pols_b, pols_c, a, b1, b2, c, vk_alfa_1: p1_to_vec(&params.vk.alpha_g1), vk_beta_1: p1_to_vec(&params.vk.beta_g1), vk_delta_1: p1_to_vec(&params.vk.delta_g1), vk_beta_2: p2_to_vec(&params.vk.beta_g2), vk_delta_2: p2_to_vec(&params.vk.delta_g2), h: params.h.iter().map(|e| p1_to_vec(e)).collect_vec(), protocol: String::from("groth"), n_public, n_vars, domain_bits, domain_size: 1 << domain_bits, }; serde_json::to_string(&proving_key) } fn log2_floor(num: usize) -> usize { assert!(num > 0); let mut pow = 0; while (1 << (pow + 1)) <= num { pow += 1; } pow } pub fn proving_key_json_file(params: &Parameters<Bn256>, circuit: CircomCircuit<Bn256>, filename: &str) -> std::io::Result<()> { let str = proving_key_json(params, circuit).unwrap(); // TODO: proper error handling fs::write(filename, str.as_bytes()) } pub fn verification_key_json(params: &Parameters<Bn256>) -> Result<String, serde_json::error::Error> { let verification_key = VerifyingKeyJson { ic: params.vk.ic.iter().map(|e| p1_to_vec(e)).collect_vec(), vk_alfa_1: p1_to_vec(&params.vk.alpha_g1), vk_alpha_1: p1_to_vec(&params.vk.alpha_g1), vk_beta_2: p2_to_vec(&params.vk.beta_g2), vk_gamma_2: p2_to_vec(&params.vk.gamma_g2), vk_delta_2: p2_to_vec(&params.vk.delta_g2), vk_alfabeta_12: pairing_to_vec(&Bn256::pairing(params.vk.alpha_g1, params.vk.beta_g2)), vk_alphabeta_12: pairing_to_vec(&Bn256::pairing(params.vk.alpha_g1, params.vk.beta_g2)), inputs_count: params.vk.ic.len() - 1, curve: String::from("BN254"), protocol: String::from("groth"), }; serde_json::to_string_pretty(&verification_key) } pub fn verification_key_json_file(params: &Parameters<Bn256>, filename: &str) -> std::io::Result<()> { let str = verification_key_json(params).unwrap(); // TODO: proper error handling fs::write(filename, str.as_bytes()) } pub fn witness_from_json_file<E: Engine>(filename: &str) -> Vec<E::Fr> { let reader = OpenOptions::new() .read(true) .open(filename) .expect("unable to open."); witness_from_json::<E, BufReader<File>>(BufReader::new(reader)) } pub fn witness_from_json<E: Engine, R: Read>(reader: R) -> Vec<E::Fr> { let witness: Vec<String> = serde_json::from_reader(reader).unwrap(); witness.into_iter().map(|x| E::Fr::from_str(&x).unwrap()).collect::<Vec<E::Fr>>() } pub fn witness_from_bin_file<E: Engine>(filename: &str) -> Result<Vec<E::Fr>, std::io::Error> { let reader = OpenOptions::new() .read(true) .open(filename) .expect("unable to open."); witness_from_bin::<E, BufReader<File>>(BufReader::new(reader)) } pub fn witness_from_bin<E: Engine, R: Read>(reader: R) -> Result<Vec<E::Fr>, std::io::Error> { let file = crate::wtns_reader::read::<E, R>(reader)?; Ok(file.witness) } pub fn r1cs_from_json_file<E: Engine>(filename: &str) -> R1CS<E> { let reader = OpenOptions::new() .read(true) .open(filename) .expect("unable to open."); r1cs_from_json(BufReader::new(reader)) } pub fn r1cs_from_json<E: Engine, R: Read>(reader: R) -> R1CS<E> { let circuit_json: CircuitJson = serde_json::from_reader(reader).unwrap(); let num_inputs = circuit_json.num_inputs + circuit_json.num_outputs + 1; let num_aux = circuit_json.num_variables - num_inputs; let convert_constraint = |lc: &BTreeMap<String, String>| { lc.iter().map(|(index, coeff)| (index.parse().unwrap(), E::Fr::from_str(coeff).unwrap())).collect_vec() }; let constraints = circuit_json.constraints.iter().map( |c| (convert_constraint(&c[0]), convert_constraint(&c[1]), convert_constraint(&c[2])) ).collect_vec(); R1CS { num_inputs, num_aux, num_variables: circuit_json.num_variables, constraints, } } pub fn r1cs_from_bin<R: Read + Seek>(reader: R) -> Result<(R1CS<Bn256>, Vec<usize>), std::io::Error> { let file = crate::r1cs_reader::read(reader)?; let num_inputs = (1 + file.header.n_pub_in + file.header.n_pub_out) as usize; let num_variables = file.header.n_wires as usize; let num_aux = num_variables - num_inputs; Ok(( R1CS { num_aux, num_inputs, num_variables, constraints: file.constraints, }, file.wire_mapping.iter().map(|e| *e as usize).collect_vec() )) } pub fn r1cs_from_bin_file(filename: &str) -> Result<(R1CS<Bn256>, Vec<usize>), std::io::Error> { let reader = OpenOptions::new() .read(true) .open(filename) .expect("unable to open."); r1cs_from_bin(BufReader::new(reader)) } pub fn create_rng() -> Box<dyn Rng> { Box::new(OsRng::new().unwrap()) }
use std::fmt; // Structure of a Point #[derive(Copy, Clone)] struct Point { x: i32, y: i32, } // Implementing fmt::Display for Point{} impl fmt::Display for Point { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "({}:{})", self.x, self.y) } } impl Point { // Distance between two points fn distance(&self, p: &Point) -> f32{ f32::sqrt((i32::pow(self.y - p.y, 2) + i32::pow(self.x - p.x, 2)) as f32) } } // Structure of a Triangle struct Triangle { a: Point, b: Point, c: Point, } // Implementing fmt::Display for Triangle{} impl fmt::Display for Triangle { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "A{}, B{}, C{}", self.a, self.b, self.c) } } impl Triangle { // Find perimeter of a triangle fn perimeter(&self) -> f32{ self.a.distance(&self.b) + self.b.distance(&self.c) + self.c.distance(&self.a) } // Find area of a triangle fn area(&self) -> f32 { ((self.a.x*(self.b.y-self.c.y) + self.b.x*(self.c.y - self.a.y) + self.c.x*(self.a.y - self.b.y)) as f32 /2.0).abs() } // Find area of a triangle with Heron's Formula fn area_heron(&self, perimeter:f32) -> f32 { let p = perimeter/(2 as f32); f32::sqrt(p * (p-self.a.distance(&self.b)) * (p-self.b.distance(&self.c)) * (p-self.c.distance(&self.a))) } // Check whether a point is inside a triangle. // This is done by creating smaller triangles with the point and seeing whether `ABC = ABP + PBC + APC` fn is_point_inside(&self, point: &Point) -> bool { let tr_a = Triangle{a: self.a, b: self.b, c: *point}; let tr_b = Triangle{a: *point, b: self.b, c: self.c}; let tr_c = Triangle{a: self.a, b: *point, c: self.c}; self.area() == tr_a.area() + tr_b.area() + tr_c.area() } fn can_contain_triangle(&self, tr: &Triangle) -> bool { self.area() >= tr.area() } fn is_triangle_inside(&self, tr: &Triangle) -> bool{ self.is_point_inside(&tr.a) && self.is_point_inside(&tr.b) && self.is_point_inside(&tr.c) } } fn main() { println!("\n==== Ultimate Triangle Functions"); let tr = Triangle{ a: Point{x: 5, y: 5}, b: Point{x: 10, y: 0}, c: Point{x: 0, y: 0}, }; println!("Points of the triangle : {}", tr); println!("The length of the three sides of the triangle : {} {} {}", tr.a.distance(&tr.b), tr.b.distance(&tr.c), tr.c.distance(&tr.a)); println!("Perimeter of the triangle is : {}", tr.area()); println!("Area of the triangle is : {}", tr.area()); println!("Area of the triangle is using Heron's Formula : {}", tr.area_heron(tr.perimeter())); let random_point = Point{x: 4, y: 3}; println!("Is the point {} in the triangle? : {}", random_point, tr.is_point_inside(&random_point)); let random_triangle = Triangle{ a: Point{x: 2, y: 3}, b: Point{x: 3, y: 1}, c: Point{x: 5, y: 0}, }; println!("Can the traingle {} be contained in the triangle? : {}", random_triangle, tr.can_contain_triangle(&random_triangle)); println!("Is the triangle {} in the triangle? : {}", random_triangle, tr.is_triangle_inside(&random_triangle)); }
extern crate orbclient; #[derive(Clone, Copy, Debug)] pub struct UVPoint { x: f32, y: f32, } #[derive(Clone, Copy, Debug)] pub struct UVData { p1: UVPoint, p2: UVPoint, p3: UVPoint, } pub struct UVTexture { pub bmp: orbclient::BmpFile, pub p1: UVPoint, pub p2: UVPoint, pub p3: UVPoint, } impl UVTexture { pub fn default() -> UVTexture { UVTexture { bmp: orbclient::BmpFile::default(), p1: UVPoint {x: 0.0, y: 0.0}, p2: UVPoint {x: 0.0, y: 1.0}, p3: UVPoint {x: 1.0, y: 0.0}, } } pub fn path(path: &str) -> UVTexture { UVTexture { bmp: orbclient::BmpFile::from_path(path), p1: UVPoint {x: 0.0, y: 0.0}, p2: UVPoint {x: 0.0, y: 1.0}, p3: UVPoint {x: 1.0, y: 0.0}, } } pub fn get_by_uv(&self, x: f32, y: f32) -> orbclient::Color { use std::ops::Deref; let px = (x * self.bmp.width() as f32) as i32; let py = (y * self.bmp.height() as f32) as i32; let img_w = self.bmp.width(); let img_slice = self.bmp.deref(); if (px + py *(img_w as i32)) as usize > 260000 { orbclient::Color::rgb(255,255,255) } else { img_slice[(px + py *(img_w as i32)) as usize] } } }
use clap::{load_yaml, App}; use colored::*; use std::fs; mod day_01; mod day_02; mod day_03; mod day_04; mod day_05; mod day_06; mod day_07; mod day_08; mod day_09; mod day_10; mod day_11; mod day_12; mod day_13; mod day_14; mod day_15; mod day_16; mod day_17; mod day_18; mod day_19; mod day_20; mod day_21; mod day_22; mod day_23; mod day_24; mod day_25; fn main() { let yaml = load_yaml!("cli.yaml"); let matches = App::from(yaml).get_matches(); // Get and validate the command-line arguments. let day = parse_integer_option(&matches, "DAY", 1, 25); let part = parse_integer_option(&matches, "PART", 1, 2); let sample = matches.is_present("SAMPLE"); // Display a fancy header. println!( "{} / {} {}", format!("Day {:02}", day).green(), format!("Part {}", part).red(), if sample { "(Sample)" } else { "" }, ); println!("{}", "-".repeat(80)); // Read the input file into a string. let suffix = if sample { "sample" } else { "unique" }; let filename = format!("input/day_{:02}_{}.txt", day, suffix); let input = fs::read_to_string(filename).expect("Failed to read input file"); // Find the solver function. let solver: fn(String) = match day { 1 => match part { 1 => day_01::part_1, _ => day_01::part_2, }, 2 => match part { 1 => day_02::part_1, _ => day_02::part_2, }, 3 => match part { 1 => day_03::part_1, _ => day_03::part_2, }, 4 => match part { 1 => day_04::part_1, _ => day_04::part_2, }, 5 => match part { 1 => day_05::part_1, _ => day_05::part_2, }, 6 => match part { 1 => day_06::part_1, _ => day_06::part_2, }, 7 => match part { 1 => day_07::part_1, _ => day_07::part_2, }, 8 => match part { 1 => day_08::part_1, _ => day_08::part_2, }, 9 => match part { 1 => day_09::part_1, _ => day_09::part_2, }, 10 => match part { 1 => day_10::part_1, _ => day_10::part_2, }, 11 => match part { 1 => day_11::part_1, _ => day_11::part_2, }, 12 => match part { 1 => day_12::part_1, _ => day_12::part_2, }, 13 => match part { 1 => day_13::part_1, _ => day_13::part_2, }, 14 => match part { 1 => day_14::part_1, _ => day_14::part_2, }, 15 => match part { 1 => day_15::part_1, _ => day_15::part_2, }, 16 => match part { 1 => day_16::part_1, _ => day_16::part_2, }, 17 => match part { 1 => day_17::part_1, _ => day_17::part_2, }, 18 => match part { 1 => day_18::part_1, _ => day_18::part_2, }, 19 => match part { 1 => day_19::part_1, _ => day_19::part_2, }, 20 => match part { 1 => day_20::part_1, _ => day_20::part_2, }, 21 => match part { 1 => day_21::part_1, _ => day_21::part_2, }, 22 => match part { 1 => day_22::part_1, _ => day_22::part_2, }, 23 => match part { 1 => day_23::part_1, _ => day_23::part_2, }, 24 => match part { 1 => day_24::part_1, _ => day_24::part_2, }, 25 => match part { 1 => day_25::part_1, _ => day_25::part_2, }, _ => { panic!("No solver is available for day {}.", day) } }; solver(input); } /// Parses the given command-line option into an integer. fn parse_integer_option(matches: &clap::ArgMatches, option: &str, min: i32, max: i32) -> i32 { if let Some(string) = matches.value_of(option) { let value = string.parse().unwrap(); if value < min || value > max { panic!( "The value of option '{}' does not fall in the range [{}, {}].", option, min, max ); } return value; } panic!("The value of option '{}' could not be retrieved.", option); }
#[macro_use] extern crate failure; extern crate winapi; #[macro_use] extern crate log; use std::thread; use std::time::{Duration, Instant}; pub use errors::{Error, HcbResult}; pub use procext::{MonitoredProcess, MonitoredThread}; use win::Process; pub mod errors; pub mod procext; pub mod win; /// Returns a handle to the Rocket League process. pub fn rl_process() -> HcbResult<Process> { Process::all()? .find(|p| { p.name() .map(|name| name == "RocketLeague.exe") .unwrap_or(false) }) .ok_or(Error::NoProcess) } fn wait_for_three_threads( process: &mut MonitoredProcess, poll_interval: Duration, ) -> HcbResult<[u32; 3]> { loop { if let Some(active_threads) = process.thread_ids_by_activity().get(0..3) { return Ok([active_threads[0], active_threads[1], active_threads[2]]); } thread::sleep(poll_interval); process.update()?; } } fn top_three_ideal(ids: &[u32; 3], process: &mut MonitoredProcess) -> HcbResult<[u32; 3]> { let mut get_ideal = |i| { process .threads_mut() .get_mut(&ids[i]) .unwrap() .thread() .ideal_processor() }; Ok([get_ideal(0)?, get_ideal(1)?, get_ideal(2)?]) } fn set_top_three_ideal(ids: &[u32; 3], process: &mut MonitoredProcess) -> HcbResult<()> { println!("{:?}", ids); let mut core_num = 0; for core in (0u32..3) { core_num = ((core + 1) * 2) - 1; let s: String = core_num.to_string(); println!("{}", s); process .threads_mut() .get_mut(&ids[core as usize]) .unwrap() .thread_mut() .set_ideal_processor(core_num)?; } Ok(()) } /// Monitors the Rocket League process, assigning its three most active threads to separate cores. pub fn manage_rl_threads(poll_interval: Duration, settling_period: Duration) -> HcbResult<()> { let mut process = rl_process().and_then(|p| MonitoredProcess::new(p))?; info!("Process found."); // The threads which have had affinity assigned. let mut set_top_three: Option<[u32; 3]> = None; // The top three threads at the moment of the last poll. let mut prev_top_three = wait_for_three_threads(&mut process, poll_interval)?; // When the thread order last changed. let mut last_changed = Instant::now(); let changing_soon_fraction = settling_period / 10; let changing_soon_period = changing_soon_fraction * 8; let mut notified_changing_soon = false; // Whether the current top three threads equal the ones with set affinities. let mut stable = false; loop { process.update()?; let mut current_top_three = wait_for_three_threads(&mut process, poll_interval)?; current_top_three.sort_unstable(); if prev_top_three != current_top_three { prev_top_three = current_top_three; last_changed = Instant::now(); match set_top_three { Some(set) if set == prev_top_three => { debug!( "Previously set top three threads returned: {:?}", prev_top_three ); stable = true; } Some(_) | None => { debug!("Top three threads changed: {:?}", prev_top_three); stable = false; } } } else { if !stable && !notified_changing_soon && last_changed.elapsed() > changing_soon_period { info!( "Threads appear to have settled. Assigning affinities on the next poll if stable after {} seconds.", (changing_soon_fraction * 2).as_secs() ); notified_changing_soon = true; } if !stable && last_changed.elapsed() > settling_period { info!("Assigning thread affinities."); set_top_three_ideal(&prev_top_three, &mut process)?; set_top_three = Some(prev_top_three); stable = true; notified_changing_soon = false; } } if stable && &top_three_ideal(&current_top_three, &mut process)? != &[1, 3, 5] { info!("Correcting affinities."); set_top_three_ideal(&prev_top_three, &mut process)?; } thread::sleep(poll_interval) } } //#[cfg(test)] //mod tests { // use super::*; // // #[test] // fn finds_rl() { // let process = rl_process().unwrap(); // println!("{:?}", process) // } //}
use std::boxed::Box; use objects::*; use std::str::FromStr; use chrono::{DateTime, TimeZone, NaiveDateTime, Utc,NaiveTime}; //puts informtion in each objects and internal feild and creates objects //takes vectors of string which consist of feild title followed by elements of single words split from next title by newlinw // eg ["Title", "timmy", "is", "cool", "/n", Datetime, "a_date_time", "/n", "List", "tomato", "carrot", "leek", "/n"] pub fn Event_parser< T: entry_type>(input:Vec<&str>,heading_list:Vec<String>) -> T { let mut output:Vec<Option<String>> = Vec::new(); for heading in heading_list{ let mut interim_output: Vec<String> = Vec::new(); let mut catch = false; for value in input.clone() { if value == "\n"{ catch=false; } if catch == true{ interim_output.push(value.to_string()); } if value == heading{ catch = true; } } if interim_output == (Vec::<String>::new()){ output.push(None) }else{ output.push(Some(unifier(interim_output))); } } let Title = output[0].clone(); let DateTime= match output[1].clone(){ Some(a) => Box::new(Some(DateTime::<Utc>::from_str(&a).unwrap())), None => Box::new(None), }; let List = output[2].clone(); let Other = output[3].clone(); return T::new( Title, DateTime, List, Other, ); } //converts vectors of strings into single string fn unifier(vector:Vec<String>)->String{ let mut output = String::new(); for a in vector { output = format!("{} {}", output, a); } output } //only use for values that you know definately have datetime components will panic if none pub fn time_inside(value:&entrys) -> NaiveTime { match value { entrys::Todo(a) => return a.get_date_time().unwrap().time(), entrys::Events(b) =>return b.get_date_time().unwrap().time(), entrys::appointments(c) =>return c.get_date_time().unwrap().time(), } } #[cfg(test)] mod tests { use super::*; #[test] fn it_works() { assert_eq!(2 + 2, 4); } #[test] fn test_unifier(){ let test_vector = vec!["a".to_string(),"cute".to_string(),"cat".to_string()]; let test_string = unifier(test_vector); assert_eq!(test_string," a cute cat".to_string()); } #[test] fn test_time_inside(){ let original_date_time = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(61, 0), Utc); let title = "timmy".to_string(); let list ="buy tomato".to_string(); let datetime = Box::new(Some(original_date_time.clone())); let other = "sdnjabjlkvkjbaj".to_string(); let tester = Todo::new(Some(title), datetime, Some(list), Some(other)); let time = time_inside(&entrys::Todo(tester)); assert_eq!(time,original_date_time.time()); } }
use tsukuyomi::{ endpoint, // server::Server, App, }; fn main() -> Result<(), exitfailure::ExitFailure> { let app = App::build(|mut scope| { scope .at("/")? // .to(endpoint::call(|| "Hello, world!\n")) })?; let mut server = Server::new(app)?; println!("Listening on http://127.0.0.1:4000/"); server.bind("127.0.0.1:4000")?; server.run_forever(); Ok(()) }
use camera::*; use glutin; /// This function implements the suggested handling of mouse and keyboard input. Feel free to /// intercept RecievedCharacter events if you do not want to use those shortcuts pub fn camera_event_handler<'a>(cam: &'a mut Camera, event: glutin::Event) { match event { glutin::Event::WindowEvent { event, .. } => match event { glutin::WindowEvent::MouseWheel { delta: glutin::MouseScrollDelta::PixelDelta(_, y), .. } => { cam.handle_scroll(y); } glutin::WindowEvent::CursorMoved { position: (x, y), .. } => { cam.handle_mouse_move(x as f32, y as f32); } glutin::WindowEvent::MouseInput { state, button, .. } => match (state, button) { (glutin::ElementState::Pressed, glutin::MouseButton::Left) => { cam.handle_mouse_input(MouseButton::Left, ButtonState::Pressed); } (glutin::ElementState::Pressed, glutin::MouseButton::Right) => { cam.handle_mouse_input(MouseButton::Right, ButtonState::Pressed); } (glutin::ElementState::Released, glutin::MouseButton::Left) => { cam.handle_mouse_input(MouseButton::Left, ButtonState::Released); } (glutin::ElementState::Released, glutin::MouseButton::Right) => { cam.handle_mouse_input(MouseButton::Right, ButtonState::Released); } _ => (), }, glutin::WindowEvent::ReceivedCharacter(c) => match c { 's' => { cam.set_current_as_default(); } 'd' => { cam.transition_to_default(); } _ => (), }, _ => (), }, _ => (), } }
#[warn(dead_code)] // use mysql::{Pool, from_row}; use crate::lang::Language; // 获取数据库中所有表的查询语句 // const TABLES_COMMAND_TEXT: &str = "SELECT TABLE_NAME AS Name, TABLE_COMMENT as Description,TABLE_ROWS,CREATE_TIME FROM information_schema.tables where table_schema='{}' and table_type='base table';"; // 获取指定表中的所有列字段 // const COLUMNS_COMMAND_TEXT: &str = "SELECT * FROM information_schema.COLUMNS where table_schema = '{db_name}' AND table_name='{table_name}';"; /// 列信息 #[derive(Debug, Clone)] pub struct Column { // 列名 pub column_name: Option<String>, // 数据类型 pub data_type: Option<String>, // 列类型 pub column_type: Option<String>, // 列名描述 pub column_comment: Option<String>, // 列给定长度 pub column_length: Option<i32> } impl Column { /// 获取代码 #[allow(dead_code)] pub fn get_codes(&self, lang: Language) { println!("{:?}", lang); // match lang { // // Rust语言 // Rust => { // "" // }, // _ => "" // } } } // /// 获取指定表的所有列信息 // pub fn get_columns(conn_str: &str, table_name: &str, db_name: &str) -> Vec<Column> { // let pool = Pool::new(conn_str).unwrap(); // let sql = format!("SELECT column_name, data_type, column_type, column_comment, character_maximum_length FROM information_schema.COLUMNS where table_schema = '{db_name}' AND table_name='{table_name}';", db_name=db_name, table_name=table_name); // // let mut columns: Vec<Column> = vec![]; // // println!("{}", sql); // // for row in pool.prep_exec(sql, ()).unwrap() { // // let (column_name, data_type, column_type, column_comment, character_maximum_length) = from_row(row.unwrap()); // // let col = Column { // // column_name: column_name, // // data_type: data_type, // // column_type: column_type, // // column_comment: column_comment, // // column_length: character_maximum_length, // // }; // // columns.push(col); // // } // let columns: Vec<Column> = pool.prep_exec(sql, ()) // .map(|result| { // result.map(|x| x.unwrap()) // .map(|row| { // let (column_name, data_type, column_type, column_comment, character_maximum_length) = from_row(row); // Column { // column_name: column_name, // data_type: data_type, // column_type: column_type, // column_comment: column_comment, // column_length: character_maximum_length, // } // }).collect() // }).unwrap(); // columns // }
#[doc = "Register `CSR` reader"] pub type R = crate::R<CSR_SPEC>; #[doc = "Register `CSR` writer"] pub type W = crate::W<CSR_SPEC>; #[doc = "Field `FUNC` reader - Function"] pub type FUNC_R = crate::FieldReader<FUNC_A>; #[doc = "Function\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum FUNC_A { #[doc = "0: Cosine function"] Cosine = 0, #[doc = "1: Sine function"] Sine = 1, #[doc = "2: Phase function"] Phase = 2, #[doc = "3: Modulus function"] Modulus = 3, #[doc = "4: Arctangent function"] Arctangent = 4, #[doc = "5: Hyperbolic Cosine function"] HyperbolicCosine = 5, #[doc = "6: Hyperbolic Sine function"] HyperbolicSine = 6, #[doc = "7: Arctanh function"] Arctanh = 7, #[doc = "8: Natural Logarithm function"] NaturalLogarithm = 8, #[doc = "9: Square Root function"] SquareRoot = 9, } impl From<FUNC_A> for u8 { #[inline(always)] fn from(variant: FUNC_A) -> Self { variant as _ } } impl crate::FieldSpec for FUNC_A { type Ux = u8; } impl FUNC_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<FUNC_A> { match self.bits { 0 => Some(FUNC_A::Cosine), 1 => Some(FUNC_A::Sine), 2 => Some(FUNC_A::Phase), 3 => Some(FUNC_A::Modulus), 4 => Some(FUNC_A::Arctangent), 5 => Some(FUNC_A::HyperbolicCosine), 6 => Some(FUNC_A::HyperbolicSine), 7 => Some(FUNC_A::Arctanh), 8 => Some(FUNC_A::NaturalLogarithm), 9 => Some(FUNC_A::SquareRoot), _ => None, } } #[doc = "Cosine function"] #[inline(always)] pub fn is_cosine(&self) -> bool { *self == FUNC_A::Cosine } #[doc = "Sine function"] #[inline(always)] pub fn is_sine(&self) -> bool { *self == FUNC_A::Sine } #[doc = "Phase function"] #[inline(always)] pub fn is_phase(&self) -> bool { *self == FUNC_A::Phase } #[doc = "Modulus function"] #[inline(always)] pub fn is_modulus(&self) -> bool { *self == FUNC_A::Modulus } #[doc = "Arctangent function"] #[inline(always)] pub fn is_arctangent(&self) -> bool { *self == FUNC_A::Arctangent } #[doc = "Hyperbolic Cosine function"] #[inline(always)] pub fn is_hyperbolic_cosine(&self) -> bool { *self == FUNC_A::HyperbolicCosine } #[doc = "Hyperbolic Sine function"] #[inline(always)] pub fn is_hyperbolic_sine(&self) -> bool { *self == FUNC_A::HyperbolicSine } #[doc = "Arctanh function"] #[inline(always)] pub fn is_arctanh(&self) -> bool { *self == FUNC_A::Arctanh } #[doc = "Natural Logarithm function"] #[inline(always)] pub fn is_natural_logarithm(&self) -> bool { *self == FUNC_A::NaturalLogarithm } #[doc = "Square Root function"] #[inline(always)] pub fn is_square_root(&self) -> bool { *self == FUNC_A::SquareRoot } } #[doc = "Field `FUNC` writer - Function"] pub type FUNC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O, FUNC_A>; impl<'a, REG, const O: u8> FUNC_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "Cosine function"] #[inline(always)] pub fn cosine(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::Cosine) } #[doc = "Sine function"] #[inline(always)] pub fn sine(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::Sine) } #[doc = "Phase function"] #[inline(always)] pub fn phase(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::Phase) } #[doc = "Modulus function"] #[inline(always)] pub fn modulus(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::Modulus) } #[doc = "Arctangent function"] #[inline(always)] pub fn arctangent(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::Arctangent) } #[doc = "Hyperbolic Cosine function"] #[inline(always)] pub fn hyperbolic_cosine(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::HyperbolicCosine) } #[doc = "Hyperbolic Sine function"] #[inline(always)] pub fn hyperbolic_sine(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::HyperbolicSine) } #[doc = "Arctanh function"] #[inline(always)] pub fn arctanh(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::Arctanh) } #[doc = "Natural Logarithm function"] #[inline(always)] pub fn natural_logarithm(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::NaturalLogarithm) } #[doc = "Square Root function"] #[inline(always)] pub fn square_root(self) -> &'a mut crate::W<REG> { self.variant(FUNC_A::SquareRoot) } } #[doc = "Field `PRECISION` reader - Precision required (number of iterations/cycles), where PRECISION = (number of iterations/4)"] pub type PRECISION_R = crate::FieldReader<PRECISION_A>; #[doc = "Precision required (number of iterations/cycles), where PRECISION = (number of iterations/4)\n\nValue on reset: 5"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[repr(u8)] pub enum PRECISION_A { #[doc = "1: 4 iterations"] Iters4 = 1, #[doc = "2: 8 iterations"] Iters8 = 2, #[doc = "3: 12 iterations"] Iters12 = 3, #[doc = "4: 16 iterations"] Iters16 = 4, #[doc = "5: 20 iterations"] Iters20 = 5, #[doc = "6: 24 iterations"] Iters24 = 6, #[doc = "7: 28 iterations"] Iters28 = 7, #[doc = "8: 32 iterations"] Iters32 = 8, #[doc = "9: 36 iterations"] Iters36 = 9, #[doc = "10: 40 iterations"] Iters40 = 10, #[doc = "11: 44 iterations"] Iters44 = 11, #[doc = "12: 48 iterations"] Iters48 = 12, #[doc = "13: 52 iterations"] Iters52 = 13, #[doc = "14: 56 iterations"] Iters56 = 14, #[doc = "15: 60 iterations"] Iters60 = 15, } impl From<PRECISION_A> for u8 { #[inline(always)] fn from(variant: PRECISION_A) -> Self { variant as _ } } impl crate::FieldSpec for PRECISION_A { type Ux = u8; } impl PRECISION_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<PRECISION_A> { match self.bits { 1 => Some(PRECISION_A::Iters4), 2 => Some(PRECISION_A::Iters8), 3 => Some(PRECISION_A::Iters12), 4 => Some(PRECISION_A::Iters16), 5 => Some(PRECISION_A::Iters20), 6 => Some(PRECISION_A::Iters24), 7 => Some(PRECISION_A::Iters28), 8 => Some(PRECISION_A::Iters32), 9 => Some(PRECISION_A::Iters36), 10 => Some(PRECISION_A::Iters40), 11 => Some(PRECISION_A::Iters44), 12 => Some(PRECISION_A::Iters48), 13 => Some(PRECISION_A::Iters52), 14 => Some(PRECISION_A::Iters56), 15 => Some(PRECISION_A::Iters60), _ => None, } } #[doc = "4 iterations"] #[inline(always)] pub fn is_iters4(&self) -> bool { *self == PRECISION_A::Iters4 } #[doc = "8 iterations"] #[inline(always)] pub fn is_iters8(&self) -> bool { *self == PRECISION_A::Iters8 } #[doc = "12 iterations"] #[inline(always)] pub fn is_iters12(&self) -> bool { *self == PRECISION_A::Iters12 } #[doc = "16 iterations"] #[inline(always)] pub fn is_iters16(&self) -> bool { *self == PRECISION_A::Iters16 } #[doc = "20 iterations"] #[inline(always)] pub fn is_iters20(&self) -> bool { *self == PRECISION_A::Iters20 } #[doc = "24 iterations"] #[inline(always)] pub fn is_iters24(&self) -> bool { *self == PRECISION_A::Iters24 } #[doc = "28 iterations"] #[inline(always)] pub fn is_iters28(&self) -> bool { *self == PRECISION_A::Iters28 } #[doc = "32 iterations"] #[inline(always)] pub fn is_iters32(&self) -> bool { *self == PRECISION_A::Iters32 } #[doc = "36 iterations"] #[inline(always)] pub fn is_iters36(&self) -> bool { *self == PRECISION_A::Iters36 } #[doc = "40 iterations"] #[inline(always)] pub fn is_iters40(&self) -> bool { *self == PRECISION_A::Iters40 } #[doc = "44 iterations"] #[inline(always)] pub fn is_iters44(&self) -> bool { *self == PRECISION_A::Iters44 } #[doc = "48 iterations"] #[inline(always)] pub fn is_iters48(&self) -> bool { *self == PRECISION_A::Iters48 } #[doc = "52 iterations"] #[inline(always)] pub fn is_iters52(&self) -> bool { *self == PRECISION_A::Iters52 } #[doc = "56 iterations"] #[inline(always)] pub fn is_iters56(&self) -> bool { *self == PRECISION_A::Iters56 } #[doc = "60 iterations"] #[inline(always)] pub fn is_iters60(&self) -> bool { *self == PRECISION_A::Iters60 } } #[doc = "Field `PRECISION` writer - Precision required (number of iterations/cycles), where PRECISION = (number of iterations/4)"] pub type PRECISION_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O, PRECISION_A>; impl<'a, REG, const O: u8> PRECISION_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, REG::Ux: From<u8>, { #[doc = "4 iterations"] #[inline(always)] pub fn iters4(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters4) } #[doc = "8 iterations"] #[inline(always)] pub fn iters8(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters8) } #[doc = "12 iterations"] #[inline(always)] pub fn iters12(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters12) } #[doc = "16 iterations"] #[inline(always)] pub fn iters16(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters16) } #[doc = "20 iterations"] #[inline(always)] pub fn iters20(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters20) } #[doc = "24 iterations"] #[inline(always)] pub fn iters24(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters24) } #[doc = "28 iterations"] #[inline(always)] pub fn iters28(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters28) } #[doc = "32 iterations"] #[inline(always)] pub fn iters32(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters32) } #[doc = "36 iterations"] #[inline(always)] pub fn iters36(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters36) } #[doc = "40 iterations"] #[inline(always)] pub fn iters40(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters40) } #[doc = "44 iterations"] #[inline(always)] pub fn iters44(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters44) } #[doc = "48 iterations"] #[inline(always)] pub fn iters48(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters48) } #[doc = "52 iterations"] #[inline(always)] pub fn iters52(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters52) } #[doc = "56 iterations"] #[inline(always)] pub fn iters56(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters56) } #[doc = "60 iterations"] #[inline(always)] pub fn iters60(self) -> &'a mut crate::W<REG> { self.variant(PRECISION_A::Iters60) } } #[doc = "Field `SCALE` reader - Scaling factor (2^-n for arguments, 2^n for results)"] pub type SCALE_R = crate::FieldReader; #[doc = "Field `SCALE` writer - Scaling factor (2^-n for arguments, 2^n for results)"] pub type SCALE_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 3, O>; #[doc = "Field `IEN` reader - Enable interrupt"] pub type IEN_R = crate::BitReader<IEN_A>; #[doc = "Enable interrupt\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum IEN_A { #[doc = "0: Disable interrupt request generation"] Disabled = 0, #[doc = "1: Enable interrupt request generation"] Enabled = 1, } impl From<IEN_A> for bool { #[inline(always)] fn from(variant: IEN_A) -> Self { variant as u8 != 0 } } impl IEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> IEN_A { match self.bits { false => IEN_A::Disabled, true => IEN_A::Enabled, } } #[doc = "Disable interrupt request generation"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == IEN_A::Disabled } #[doc = "Enable interrupt request generation"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == IEN_A::Enabled } } #[doc = "Field `IEN` writer - Enable interrupt"] pub type IEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, IEN_A>; impl<'a, REG, const O: u8> IEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Disable interrupt request generation"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(IEN_A::Disabled) } #[doc = "Enable interrupt request generation"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(IEN_A::Enabled) } } #[doc = "Field `DMAREN` reader - Enable DMA wread channel"] pub type DMAREN_R = crate::BitReader<DMAREN_A>; #[doc = "Enable DMA wread channel\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum DMAREN_A { #[doc = "0: No DMA channel reads are generated"] Disabled = 0, #[doc = "1: Read requests are generated on the DMA channel when RRDY flag is set"] Enabled = 1, } impl From<DMAREN_A> for bool { #[inline(always)] fn from(variant: DMAREN_A) -> Self { variant as u8 != 0 } } impl DMAREN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DMAREN_A { match self.bits { false => DMAREN_A::Disabled, true => DMAREN_A::Enabled, } } #[doc = "No DMA channel reads are generated"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == DMAREN_A::Disabled } #[doc = "Read requests are generated on the DMA channel when RRDY flag is set"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == DMAREN_A::Enabled } } #[doc = "Field `DMAREN` writer - Enable DMA wread channel"] pub type DMAREN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DMAREN_A>; impl<'a, REG, const O: u8> DMAREN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "No DMA channel reads are generated"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(DMAREN_A::Disabled) } #[doc = "Read requests are generated on the DMA channel when RRDY flag is set"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(DMAREN_A::Enabled) } } #[doc = "Field `DMAWEN` reader - Enable DMA write channel"] pub type DMAWEN_R = crate::BitReader<DMAWEN_A>; #[doc = "Enable DMA write channel\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum DMAWEN_A { #[doc = "0: No DMA channel writes are generated"] Disabled = 0, #[doc = "1: Write requests are generated on the DMA channel when no operation is pending"] Enabled = 1, } impl From<DMAWEN_A> for bool { #[inline(always)] fn from(variant: DMAWEN_A) -> Self { variant as u8 != 0 } } impl DMAWEN_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DMAWEN_A { match self.bits { false => DMAWEN_A::Disabled, true => DMAWEN_A::Enabled, } } #[doc = "No DMA channel writes are generated"] #[inline(always)] pub fn is_disabled(&self) -> bool { *self == DMAWEN_A::Disabled } #[doc = "Write requests are generated on the DMA channel when no operation is pending"] #[inline(always)] pub fn is_enabled(&self) -> bool { *self == DMAWEN_A::Enabled } } #[doc = "Field `DMAWEN` writer - Enable DMA write channel"] pub type DMAWEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, DMAWEN_A>; impl<'a, REG, const O: u8> DMAWEN_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "No DMA channel writes are generated"] #[inline(always)] pub fn disabled(self) -> &'a mut crate::W<REG> { self.variant(DMAWEN_A::Disabled) } #[doc = "Write requests are generated on the DMA channel when no operation is pending"] #[inline(always)] pub fn enabled(self) -> &'a mut crate::W<REG> { self.variant(DMAWEN_A::Enabled) } } #[doc = "Field `NRES` reader - Number of results in the RDATA register"] pub type NRES_R = crate::BitReader<NRES_A>; #[doc = "Number of results in the RDATA register\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum NRES_A { #[doc = "0: Only single result value will be returned. After a single read RRDY will be automatically cleared"] Num1 = 0, #[doc = "1: Two return reads need to be performed. After two reads RRDY will be automatically cleared"] Num2 = 1, } impl From<NRES_A> for bool { #[inline(always)] fn from(variant: NRES_A) -> Self { variant as u8 != 0 } } impl NRES_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> NRES_A { match self.bits { false => NRES_A::Num1, true => NRES_A::Num2, } } #[doc = "Only single result value will be returned. After a single read RRDY will be automatically cleared"] #[inline(always)] pub fn is_num1(&self) -> bool { *self == NRES_A::Num1 } #[doc = "Two return reads need to be performed. After two reads RRDY will be automatically cleared"] #[inline(always)] pub fn is_num2(&self) -> bool { *self == NRES_A::Num2 } } #[doc = "Field `NRES` writer - Number of results in the RDATA register"] pub type NRES_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, NRES_A>; impl<'a, REG, const O: u8> NRES_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Only single result value will be returned. After a single read RRDY will be automatically cleared"] #[inline(always)] pub fn num1(self) -> &'a mut crate::W<REG> { self.variant(NRES_A::Num1) } #[doc = "Two return reads need to be performed. After two reads RRDY will be automatically cleared"] #[inline(always)] pub fn num2(self) -> &'a mut crate::W<REG> { self.variant(NRES_A::Num2) } } #[doc = "Field `NARGS` reader - Number of arguments expected by the WDATA register"] pub type NARGS_R = crate::BitReader<NARGS_A>; #[doc = "Number of arguments expected by the WDATA register\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum NARGS_A { #[doc = "0: Only single argument write is needed for next calculation"] Num1 = 0, #[doc = "1: Two argument writes need to be performed for next calculation"] Num2 = 1, } impl From<NARGS_A> for bool { #[inline(always)] fn from(variant: NARGS_A) -> Self { variant as u8 != 0 } } impl NARGS_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> NARGS_A { match self.bits { false => NARGS_A::Num1, true => NARGS_A::Num2, } } #[doc = "Only single argument write is needed for next calculation"] #[inline(always)] pub fn is_num1(&self) -> bool { *self == NARGS_A::Num1 } #[doc = "Two argument writes need to be performed for next calculation"] #[inline(always)] pub fn is_num2(&self) -> bool { *self == NARGS_A::Num2 } } #[doc = "Field `NARGS` writer - Number of arguments expected by the WDATA register"] pub type NARGS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, NARGS_A>; impl<'a, REG, const O: u8> NARGS_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Only single argument write is needed for next calculation"] #[inline(always)] pub fn num1(self) -> &'a mut crate::W<REG> { self.variant(NARGS_A::Num1) } #[doc = "Two argument writes need to be performed for next calculation"] #[inline(always)] pub fn num2(self) -> &'a mut crate::W<REG> { self.variant(NARGS_A::Num2) } } #[doc = "Field `RESSIZE` reader - Width of output data"] pub type RESSIZE_R = crate::BitReader<RESSIZE_A>; #[doc = "Width of output data\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum RESSIZE_A { #[doc = "0: Use 32 bit output values"] Bits32 = 0, #[doc = "1: Use 16 bit output values"] Bits16 = 1, } impl From<RESSIZE_A> for bool { #[inline(always)] fn from(variant: RESSIZE_A) -> Self { variant as u8 != 0 } } impl RESSIZE_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RESSIZE_A { match self.bits { false => RESSIZE_A::Bits32, true => RESSIZE_A::Bits16, } } #[doc = "Use 32 bit output values"] #[inline(always)] pub fn is_bits32(&self) -> bool { *self == RESSIZE_A::Bits32 } #[doc = "Use 16 bit output values"] #[inline(always)] pub fn is_bits16(&self) -> bool { *self == RESSIZE_A::Bits16 } } #[doc = "Field `RESSIZE` writer - Width of output data"] pub type RESSIZE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RESSIZE_A>; impl<'a, REG, const O: u8> RESSIZE_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Use 32 bit output values"] #[inline(always)] pub fn bits32(self) -> &'a mut crate::W<REG> { self.variant(RESSIZE_A::Bits32) } #[doc = "Use 16 bit output values"] #[inline(always)] pub fn bits16(self) -> &'a mut crate::W<REG> { self.variant(RESSIZE_A::Bits16) } } #[doc = "Field `ARGSIZE` reader - Width of input data"] pub type ARGSIZE_R = crate::BitReader<ARGSIZE_A>; #[doc = "Width of input data\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ARGSIZE_A { #[doc = "0: Use 32 bit input values"] Bits32 = 0, #[doc = "1: Use 16 bit input values"] Bits16 = 1, } impl From<ARGSIZE_A> for bool { #[inline(always)] fn from(variant: ARGSIZE_A) -> Self { variant as u8 != 0 } } impl ARGSIZE_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ARGSIZE_A { match self.bits { false => ARGSIZE_A::Bits32, true => ARGSIZE_A::Bits16, } } #[doc = "Use 32 bit input values"] #[inline(always)] pub fn is_bits32(&self) -> bool { *self == ARGSIZE_A::Bits32 } #[doc = "Use 16 bit input values"] #[inline(always)] pub fn is_bits16(&self) -> bool { *self == ARGSIZE_A::Bits16 } } #[doc = "Field `ARGSIZE` writer - Width of input data"] pub type ARGSIZE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ARGSIZE_A>; impl<'a, REG, const O: u8> ARGSIZE_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Use 32 bit input values"] #[inline(always)] pub fn bits32(self) -> &'a mut crate::W<REG> { self.variant(ARGSIZE_A::Bits32) } #[doc = "Use 16 bit input values"] #[inline(always)] pub fn bits16(self) -> &'a mut crate::W<REG> { self.variant(ARGSIZE_A::Bits16) } } #[doc = "Field `RRDY` reader - Result ready flag"] pub type RRDY_R = crate::BitReader<RRDYR_A>; #[doc = "Result ready flag\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum RRDYR_A { #[doc = "0: Results from computation are not read"] NotReady = 0, #[doc = "1: Results are ready, this flag will be automatically cleared once value is read"] Ready = 1, } impl From<RRDYR_A> for bool { #[inline(always)] fn from(variant: RRDYR_A) -> Self { variant as u8 != 0 } } impl RRDY_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RRDYR_A { match self.bits { false => RRDYR_A::NotReady, true => RRDYR_A::Ready, } } #[doc = "Results from computation are not read"] #[inline(always)] pub fn is_not_ready(&self) -> bool { *self == RRDYR_A::NotReady } #[doc = "Results are ready, this flag will be automatically cleared once value is read"] #[inline(always)] pub fn is_ready(&self) -> bool { *self == RRDYR_A::Ready } } #[doc = "Field `RRDY` writer - Result ready flag"] pub type RRDY_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RRDYR_A>; impl<'a, REG, const O: u8> RRDY_W<'a, REG, O> where REG: crate::Writable + crate::RegisterSpec, { #[doc = "Results from computation are not read"] #[inline(always)] pub fn not_ready(self) -> &'a mut crate::W<REG> { self.variant(RRDYR_A::NotReady) } #[doc = "Results are ready, this flag will be automatically cleared once value is read"] #[inline(always)] pub fn ready(self) -> &'a mut crate::W<REG> { self.variant(RRDYR_A::Ready) } } impl R { #[doc = "Bits 0:3 - Function"] #[inline(always)] pub fn func(&self) -> FUNC_R { FUNC_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - Precision required (number of iterations/cycles), where PRECISION = (number of iterations/4)"] #[inline(always)] pub fn precision(&self) -> PRECISION_R { PRECISION_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:10 - Scaling factor (2^-n for arguments, 2^n for results)"] #[inline(always)] pub fn scale(&self) -> SCALE_R { SCALE_R::new(((self.bits >> 8) & 7) as u8) } #[doc = "Bit 16 - Enable interrupt"] #[inline(always)] pub fn ien(&self) -> IEN_R { IEN_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - Enable DMA wread channel"] #[inline(always)] pub fn dmaren(&self) -> DMAREN_R { DMAREN_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - Enable DMA write channel"] #[inline(always)] pub fn dmawen(&self) -> DMAWEN_R { DMAWEN_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - Number of results in the RDATA register"] #[inline(always)] pub fn nres(&self) -> NRES_R { NRES_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - Number of arguments expected by the WDATA register"] #[inline(always)] pub fn nargs(&self) -> NARGS_R { NARGS_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - Width of output data"] #[inline(always)] pub fn ressize(&self) -> RESSIZE_R { RESSIZE_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - Width of input data"] #[inline(always)] pub fn argsize(&self) -> ARGSIZE_R { ARGSIZE_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 31 - Result ready flag"] #[inline(always)] pub fn rrdy(&self) -> RRDY_R { RRDY_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bits 0:3 - Function"] #[inline(always)] #[must_use] pub fn func(&mut self) -> FUNC_W<CSR_SPEC, 0> { FUNC_W::new(self) } #[doc = "Bits 4:7 - Precision required (number of iterations/cycles), where PRECISION = (number of iterations/4)"] #[inline(always)] #[must_use] pub fn precision(&mut self) -> PRECISION_W<CSR_SPEC, 4> { PRECISION_W::new(self) } #[doc = "Bits 8:10 - Scaling factor (2^-n for arguments, 2^n for results)"] #[inline(always)] #[must_use] pub fn scale(&mut self) -> SCALE_W<CSR_SPEC, 8> { SCALE_W::new(self) } #[doc = "Bit 16 - Enable interrupt"] #[inline(always)] #[must_use] pub fn ien(&mut self) -> IEN_W<CSR_SPEC, 16> { IEN_W::new(self) } #[doc = "Bit 17 - Enable DMA wread channel"] #[inline(always)] #[must_use] pub fn dmaren(&mut self) -> DMAREN_W<CSR_SPEC, 17> { DMAREN_W::new(self) } #[doc = "Bit 18 - Enable DMA write channel"] #[inline(always)] #[must_use] pub fn dmawen(&mut self) -> DMAWEN_W<CSR_SPEC, 18> { DMAWEN_W::new(self) } #[doc = "Bit 19 - Number of results in the RDATA register"] #[inline(always)] #[must_use] pub fn nres(&mut self) -> NRES_W<CSR_SPEC, 19> { NRES_W::new(self) } #[doc = "Bit 20 - Number of arguments expected by the WDATA register"] #[inline(always)] #[must_use] pub fn nargs(&mut self) -> NARGS_W<CSR_SPEC, 20> { NARGS_W::new(self) } #[doc = "Bit 21 - Width of output data"] #[inline(always)] #[must_use] pub fn ressize(&mut self) -> RESSIZE_W<CSR_SPEC, 21> { RESSIZE_W::new(self) } #[doc = "Bit 22 - Width of input data"] #[inline(always)] #[must_use] pub fn argsize(&mut self) -> ARGSIZE_W<CSR_SPEC, 22> { ARGSIZE_W::new(self) } #[doc = "Bit 31 - Result ready flag"] #[inline(always)] #[must_use] pub fn rrdy(&mut self) -> RRDY_W<CSR_SPEC, 31> { RRDY_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Control and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CSR_SPEC; impl crate::RegisterSpec for CSR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`csr::R`](R) reader structure"] impl crate::Readable for CSR_SPEC {} #[doc = "`write(|w| ..)` method takes [`csr::W`](W) writer structure"] impl crate::Writable for CSR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CSR to value 0x50"] impl crate::Resettable for CSR_SPEC { const RESET_VALUE: Self::Ux = 0x50; }
use bindings; use std::ffi::{CStr, CString}; /// Checks `input` for SQL injection detection, and returns an option of (is_sqli, fingerprint) pub fn sqli(input: &str) -> Option<(bool, String)> { let mut fingerprint = ['\0'; 8]; let fingerprint_ptr = fingerprint.as_mut_ptr() as *mut i8; let input_cstring = CString::new(input).ok()?; let input_ptr = input_cstring.as_ptr(); let is_sqli = unsafe { bindings::libinjection_sqli(input_ptr, input.len() as u64, fingerprint_ptr) }; let fingerprint = unsafe { CStr::from_ptr(fingerprint_ptr).to_str().ok()?.to_string() }; Some((is_sqli == 1, fingerprint)) } /// Checks `input` for XSS detection, and returns an option of is_xss pub fn xss(input: &str) -> Option<bool> { let input_cstring = CString::new(input).ok()?; let input_ptr = input_cstring.as_ptr(); let is_xss = unsafe { bindings::libinjection_xss(input_ptr, input.len() as u64) }; Some(is_xss == 1) }
use std::{env, error::Error, fs, path::Path}; struct Region { variant: String, id: String, } fn main() -> Result<(), Box<dyn Error>> { println!("cargo:rerun-if-changed=regions.txt"); let regions = include_str!("regions.txt") .lines() .map(|id| Region { variant: id .split('-') .map(|word| { let mut chars = word.chars(); if let Some(a) = chars.next() { a.to_uppercase() .chain(chars.as_str().to_lowercase().chars()) .collect() } else { String::new() } }) .collect::<Vec<_>>() .join(""), id: id.into(), }) .collect::<Vec<_>>(); let dest_path = Path::new(&env::var("OUT_DIR")?).join("region.rs"); // the enum let mut buf = "/// A list of AWS Regions supported by DynamoDB\n#[non_exhaustive]\npub enum Region {\n" .to_string(); for region in &regions { buf.push_str(" "); buf.push_str(&region.variant); buf.push_str(",\n"); } buf.push_str("}\n"); // the impl buf.push_str("\nimpl Region {\n"); buf.push_str(" /// Short region identifier\n"); buf.push_str(" pub fn id(&self) -> &str {\n"); buf.push_str(" match self {\n"); for region in &regions { buf.push_str(" Region::"); buf.push_str(&region.variant); buf.push_str(" => \""); buf.push_str(&region.id); buf.push_str("\",\n"); } buf.push_str(" }\n }\n"); buf.push_str(" /// region specific dynamodb endpoint\n"); buf.push_str(" pub fn endpoint(&self) -> &str {\n"); buf.push_str(" match self {\n"); for region in &regions { buf.push_str(" Region::"); buf.push_str(&region.variant); buf.push_str(" => \"https://dynamodb."); buf.push_str(&region.id); buf.push_str(".amazonaws.com\",\n"); } buf.push_str(" }\n }\n"); buf.push_str("}\n"); // from str buf.push_str("\nimpl std::str::FromStr for Region {\n"); buf.push_str(" type Err = String;\n"); buf.push_str(" fn from_str(s: &str) -> Result<Self, Self::Err> {\n"); buf.push_str(" match s {\n"); for region in &regions { buf.push_str(" \""); buf.push_str(&region.id); buf.push_str("\" => Ok(Region::"); buf.push_str(&region.variant); buf.push_str("),\n"); } buf.push_str(" _ => Err(\"invalid region\".into()),\n"); buf.push_str(" }\n }\n"); buf.push_str("}\n"); fs::write(&dest_path, buf)?; Ok(()) }
#[doc = "Register `FMC_BCHDSR3` reader"] pub type R = crate::R<FMC_BCHDSR3_SPEC>; #[doc = "Field `EBP5` reader - EBP5"] pub type EBP5_R = crate::FieldReader<u16>; #[doc = "Field `EBP6` reader - EBP6"] pub type EBP6_R = crate::FieldReader<u16>; impl R { #[doc = "Bits 0:12 - EBP5"] #[inline(always)] pub fn ebp5(&self) -> EBP5_R { EBP5_R::new((self.bits & 0x1fff) as u16) } #[doc = "Bits 16:28 - EBP6"] #[inline(always)] pub fn ebp6(&self) -> EBP6_R { EBP6_R::new(((self.bits >> 16) & 0x1fff) as u16) } } #[doc = "The maximum error correction capability of the BCH block embedded in the FMC is 8 errors.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fmc_bchdsr3::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct FMC_BCHDSR3_SPEC; impl crate::RegisterSpec for FMC_BCHDSR3_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`fmc_bchdsr3::R`](R) reader structure"] impl crate::Readable for FMC_BCHDSR3_SPEC {} #[doc = "`reset()` method sets FMC_BCHDSR3 to value 0"] impl crate::Resettable for FMC_BCHDSR3_SPEC { const RESET_VALUE: Self::Ux = 0; }
use std::fs::File; use std::io::prelude::*; #[derive(Debug)] struct Map { trees: Vec<Vec<bool>>, } impl From<&str> for Map { fn from(value: &str) -> Map { let trees = value .lines() .map(|line| { line.bytes() .map(|byte| if byte == b'#' { true } else { false }) .collect() }) .collect(); Map { trees } } } impl Map { pub fn check_position(&self, x: usize, y: usize) -> bool { // we assume the map is rectangular let x = x % self.trees[0].len(); self.trees[y][x] } fn check_slope(&self, dx: usize, dy: usize) -> usize { let mut x = 0; let mut y = 0; let mut tree_count = 0; loop { if self.check_position(x, y) { tree_count += 1; } x += dx; y += dy; if y >= self.trees.len() { break; } } tree_count } fn part_1(&self) -> usize { self.check_slope(3, 1) } fn part_2(&self) -> usize { let slopes = [(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)]; slopes.iter().fold(1, |product, slope| { product * self.check_slope(slope.0, slope.1) }) } } fn main() -> Result<(), Box<dyn std::error::Error>> { let mut file = File::open("input.txt")?; let mut contents = String::new(); file.read_to_string(&mut contents)?; let map = Map::from(contents.as_str()); println!("part1: {}", map.part_1()); println!("part1: {}", map.part_2()); Ok(()) }
pub mod irq; use arch::cortex_m3::isr::VectorTable; #[link_section = ".vector_table"] #[no_mangle] pub static VECTOR_TABLE: VectorTable<irq::InterruptVectors> = VectorTable { exception_handlers: Default::default(), interrupt_handlers: Default::default(), };
use crate::codegen::{ unit::{ function::{find_func, Func, FuncTyMap, FunctionMap}, Slot, }, AatbeModule, }; use std::{collections::HashMap, path::PathBuf}; use llvm_sys_wrapper::{Builder, LLVMBasicBlockRef}; use parser::ast::FunctionType; #[derive(Debug)] pub struct Scope { refs: HashMap<String, Slot>, functions: FunctionMap, name: String, function: Option<(String, FunctionType)>, builder: Option<Builder>, fdir: Option<PathBuf>, } impl Scope { pub fn new() -> Self { Self { refs: HashMap::new(), functions: HashMap::new(), name: String::default(), function: None, builder: None, fdir: None, } } pub fn with_name(name: &String) -> Self { Self { refs: HashMap::new(), functions: HashMap::new(), name: name.clone(), function: None, builder: None, fdir: None, } } pub fn with_builder(builder: Builder) -> Self { Self { refs: HashMap::new(), functions: HashMap::new(), name: String::default(), function: None, builder: Some(builder), fdir: None, } } pub fn with_builder_and_fdir(builder: Builder, fdir: PathBuf) -> Self { Self { refs: HashMap::new(), functions: HashMap::new(), name: String::default(), function: None, builder: Some(builder), fdir: Some(fdir), } } pub fn with_function(func: (String, FunctionType), builder: Builder) -> Self { Self { refs: HashMap::new(), functions: HashMap::new(), name: func.0.clone(), function: Some(func), builder: Some(builder), fdir: None, } } pub fn func_by_name(&self, name: &String) -> Option<&FuncTyMap> { self.functions.get(name) } pub fn add_function(&mut self, name: &String, func: Func) { if !self.functions.contains_key(name) { self.functions.insert(name.clone(), vec![]); } self.functions.get_mut(name).unwrap().push(func); } pub fn find_symbol(&self, name: &String) -> Option<&Slot> { self.refs.get(name) } pub fn add_symbol(&mut self, name: &String, unit: Slot) { self.refs.insert(name.clone(), unit); } pub fn function(&self) -> Option<(String, FunctionType)> { self.function.clone() } pub fn builder(&self) -> Option<&Builder> { self.builder.as_ref() } pub fn fdir(&self) -> Option<PathBuf> { self.fdir.clone() } pub fn bb(&self, module: &AatbeModule, name: &String) -> Option<LLVMBasicBlockRef> { let func = self.function.as_ref()?; Some( find_func(module.get_func_group(&func.0)?, &func.1) .unwrap() .append_basic_block(name.as_ref()), ) } } /* TODO: Implement local dropping * impl Drop for Scope { * fn drop(&mut self) { * } * } */
extern crate diesel; use juniper; use juniper::{FieldError, FieldResult, RootNode}; use diesel::prelude::*; use crate::db::Pool; use crate::schema::{products, users}; use super::product::{Product, ProductInput}; use super::user::{User, UserInput}; pub struct Context { pub dbpool: Pool, } impl juniper::Context for Context {} pub struct QueryRoot; #[juniper::object(Context = Context)] impl QueryRoot { #[graphql(description = "List of all users")] fn users(context: &Context) -> FieldResult<Vec<User>> { let mut conn = context.dbpool.get().unwrap(); let all_users = users::table .select(users::all_columns) .load::<User>(&*conn) .unwrap(); Ok(all_users) } #[graphql(description = "Get single user reference by user ID")] fn user(context: &Context, id: String) -> FieldResult<User> { let mut conn = context.dbpool.get().unwrap(); let user = users::table.find(id).first::<User>(&*conn); match user { Ok(user) => Ok(user), Err(err) => Err(FieldError::new( "User Not Found", graphql_value!({"not_found": "user not found"}), )), } } #[graphql(description = "List of all products")] fn products(context: &Context) -> FieldResult<Vec<Product>> { let mut conn = context.dbpool.get().unwrap(); let all_products = products::table .select(products::all_columns) .load::<Product>(&*conn) .unwrap(); Ok(all_products) } #[graphql(description = "Get single product reference by product ID")] fn product(context: &Context, id: String) -> FieldResult<Product> { let mut conn = context.dbpool.get().unwrap(); let product = products::table.find(id).first::<Product>(&*conn); match product { Ok(product) => Ok(product), Err(err) => Err(FieldError::new( "Product Not Found", graphql_value!({"not_found": "product not found"}), )), } } } pub struct MutationRoot; #[juniper::object(Context = Context)] impl MutationRoot { fn create_user(context: &Context, user: UserInput) -> FieldResult<User> { let mut conn = context.dbpool.get().unwrap(); let new_id = uuid::Uuid::new_v4().to_simple().to_string(); let new_user = User { id: new_id, name: user.name, email: user.email, }; let insert = diesel::insert_into(users::table) .values(&new_user) .execute(&*conn); match insert { Ok(_) => Ok(new_user), Err(err) => { let msg = format!("{}", err); Err(FieldError::new( "Failed to create new user", graphql_value!({ "internal_error": msg }), )) } } } fn create_product(context: &Context, product: ProductInput) -> FieldResult<Product> { let mut conn = context.dbpool.get().unwrap(); let new_id = uuid::Uuid::new_v4().to_simple().to_string(); let new_product = Product { id: new_id, user_id: product.user_id, name: product.name, price: product.price, }; let insert = diesel::insert_into(products::table) .values(&new_product) .execute(&*conn); match insert { Ok(_) => Ok(new_product), Err(err) => { let msg = format!("{}", err); Err(FieldError::new( "Failed to create new product", graphql_value!({ "internal_error": msg }), )) } } } } pub type Schema = RootNode<'static, QueryRoot, MutationRoot>; pub fn create_schema() -> Schema { Schema::new(QueryRoot, MutationRoot) }
pub mod io_posix; use crate::env; use crate::util::status::State; pub const k_default_page_size: usize = 4 * 1024; #[derive(PartialEq)] pub enum WALRecoveryMode { // Original levelDB recovery // We tolerate incomplete record in trailing data on all logs // Use case : This is legacy behavior kTolerateCorruptedTailRecords = 0x00, // Recover from clean shutdown // We don't expect to find any corruption in the WAL // Use case : This is ideal for unit tests and rare applications that // can require high consistency guarantee kAbsoluteConsistency = 0x01, // Recover to point-in-time consistency (default) // We stop the WAL playback on discovering WAL inconsistency // Use case : Ideal for systems that have disk controller cache like // hard disk, SSD without super capacitor that store related data kPointInTimeRecovery = 0x02, // Recovery after a disaster // We ignore any corruption in the WAL and try to salvage as much data as // possible // Use case : Ideal for last ditch effort to recover data or systems that // operate with low grade unrelated data kSkipAnyCorruptedRecords = 0x03, } #[derive(Debug, Clone)] pub struct EnvOptions { // If true, then use mmap to read data pub use_mmap_reads: bool, // If true, then use mmap to write data pub use_mmap_writes: bool, // If true, then use O_DIRECT for reading data pub use_direct_reads: bool, // If true, then use O_DIRECT for writing data pub use_direct_writes: bool, // If false, fallocate() calls are bypassed pub allow_fallocate: bool, // If true, set the FD_CLOEXEC on open fd. pub set_fd_cloexec: bool, // If true, we will preallocate the file with FALLOC_FL_KEEP_SIZE flag, which // means that file size won't change as part of preallocation. // If false, preallocation will also change the file size. This option will // improve the performance in workloads where you sync the data on every // write. By default, we set it to true for MANIFEST writes and false for // WAL writes pub fallocate_with_keep_size: bool, pub writable_file_max_buffer_size: usize, pub bytes_per_sync: usize, } impl Default for EnvOptions { fn default() -> EnvOptions { EnvOptions { use_mmap_reads: false, use_mmap_writes: true, use_direct_reads: false, use_direct_writes: true, allow_fallocate: true, set_fd_cloexec: true, fallocate_with_keep_size: true, writable_file_max_buffer_size: 1024 * 1024, bytes_per_sync: 0, } } } pub trait WritableFile: Sized { fn new(filename: String, reopen: bool, preallocation_block_size: usize) -> Self; fn append(&mut self, data: Vec<u8>) -> State; fn sync(&self) -> State; fn close(&self) -> State; fn flush(&self) -> State; fn fcntl(&self) -> bool; fn truncate(&mut self, size: usize) -> State; fn get_required_buffer_alignment(&self) -> usize; #[cfg(target_os = "linux")] fn range_sync(&self, offset: i64, nbytes: i64) -> State; #[cfg(not(target_os = "linux"))] fn range_sync(&self, _offset: i64, _nbytes: i64) -> State { return State::ok(); } fn allocate(&self, _offset: i64, _len: i64) -> State { return State::ok(); } fn prepare_write(&mut self, _offset: usize, _len: usize) {} fn positioned_append(&mut self, _data: Vec<u8>, _offset: usize) -> State { return State::not_supported(); } fn fsync(&self) -> State { return self.sync(); } fn get_file_size(&self) -> usize { 0 } fn use_direct_io(&self) -> bool { false } } pub trait SequentialFile<RHS = Self>: Sized { fn new(filename: String, options: env::EnvOptions, ptr: &mut RHS) -> State; fn skip(&self, n: i64) -> State; fn read(&mut self, n: usize, result: &mut Vec<u8>, scratch: &mut Vec<u8>) -> State; fn use_direct_io(&self) -> bool { false } }
#[cfg(feature = "serde")] pub mod serde_notnan { use ordered_float::NotNan; } #[cfg(feature = "serde")] pub use self::serde_notnan::*;
macro_rules! implement { { impl $type:ident { int_type = $int_type:ident, floor = $floor_intrinsic:literal, ceil = $ceil_intrinsic:literal, round = $round_intrinsic:literal, trunc = $trunc_intrinsic:literal, } } => { mod $type { #[allow(improper_ctypes)] extern "C" { #[link_name = $floor_intrinsic] fn floor_intrinsic(x: crate::$type) -> crate::$type; #[link_name = $ceil_intrinsic] fn ceil_intrinsic(x: crate::$type) -> crate::$type; #[link_name = $round_intrinsic] fn round_intrinsic(x: crate::$type) -> crate::$type; #[link_name = $trunc_intrinsic] fn trunc_intrinsic(x: crate::$type) -> crate::$type; } impl crate::$type { /// Returns the largest integer less than or equal to each lane. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] pub fn floor(self) -> Self { unsafe { floor_intrinsic(self) } } /// Returns the smallest integer greater than or equal to each lane. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] pub fn ceil(self) -> Self { unsafe { ceil_intrinsic(self) } } /// Returns the nearest integer to each lane. Round half-way cases away from 0.0. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] pub fn round(self) -> Self { unsafe { round_intrinsic(self) } } /// Returns the integer part of each lane. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] pub fn trunc(self) -> Self { unsafe { trunc_intrinsic(self) } } /// Returns the fractional part of each lane. #[must_use = "method returns a new vector and does not mutate the original value"] #[inline] pub fn fract(self) -> Self { self - self.trunc() } /// Rounds toward zero and converts to the same-width integer type, assuming that /// the value is finite and fits in that type. /// /// # Safety /// The value must: /// /// * Not be NaN /// * Not be infinite /// * Be representable in the return type, after truncating off its fractional part #[inline] pub unsafe fn to_int_unchecked(self) -> crate::$int_type { crate::intrinsics::simd_cast(self) } /// Creates a floating-point vector from an integer vector. Rounds values that are /// not exactly representable. #[inline] pub fn round_from_int(value: crate::$int_type) -> Self { unsafe { crate::intrinsics::simd_cast(value) } } } } } } implement! { impl f32x2 { int_type = i32x2, floor = "llvm.floor.v2f32", ceil = "llvm.ceil.v2f32", round = "llvm.round.v2f32", trunc = "llvm.trunc.v2f32", } } implement! { impl f32x4 { int_type = i32x4, floor = "llvm.floor.v4f32", ceil = "llvm.ceil.v4f32", round = "llvm.round.v4f32", trunc = "llvm.trunc.v4f32", } } implement! { impl f32x8 { int_type = i32x8, floor = "llvm.floor.v8f32", ceil = "llvm.ceil.v8f32", round = "llvm.round.v8f32", trunc = "llvm.trunc.v8f32", } } implement! { impl f32x16 { int_type = i32x16, floor = "llvm.floor.v16f32", ceil = "llvm.ceil.v16f32", round = "llvm.round.v16f32", trunc = "llvm.trunc.v16f32", } } implement! { impl f64x2 { int_type = i64x2, floor = "llvm.floor.v2f64", ceil = "llvm.ceil.v2f64", round = "llvm.round.v2f64", trunc = "llvm.trunc.v2f64", } } implement! { impl f64x4 { int_type = i64x4, floor = "llvm.floor.v4f64", ceil = "llvm.ceil.v4f64", round = "llvm.round.v4f64", trunc = "llvm.trunc.v4f64", } } implement! { impl f64x8 { int_type = i64x8, floor = "llvm.floor.v8f64", ceil = "llvm.ceil.v8f64", round = "llvm.round.v8f64", trunc = "llvm.trunc.v8f64", } }
use crate::bomber::gen::item::InteractiveItem; use crate::bomber::gen::utils::SquareType; use rmps::Serializer; use serde::Serialize; pub trait SerializedEvent { fn to_vec(&self) -> Vec<u8>; } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct PlayerMove { pub msg_type: String, pub id: i32, pub x: f32, pub y: f32 } impl SerializedEvent for PlayerMove { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct BombMove { pub msg_type: String, pub old_x: f32, pub old_y: f32, pub x: f32, pub y: f32 } impl SerializedEvent for BombMove { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct PlayerPutBomb { pub msg_type: String, pub id: i32, pub x: usize, pub y: usize } impl SerializedEvent for PlayerPutBomb { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct BombExplode { pub msg_type: String, pub w: u64, pub h: u64, } impl SerializedEvent for BombExplode { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct PlayerDie { pub msg_type: String, pub id: u64, } impl SerializedEvent for PlayerDie { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct PlayerIdentity { pub msg_type: String, pub id: u64, } impl SerializedEvent for PlayerIdentity { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct CreateItem { pub msg_type: String, pub item: Option<InteractiveItem>, pub w: u64, pub h: u64, } impl SerializedEvent for CreateItem { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct UpdateSquare { pub msg_type: String, pub square: SquareType, pub x: u64, pub y: u64, } impl SerializedEvent for UpdateSquare { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } } #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] pub struct DestroyItem { pub msg_type: String, pub w: u64, pub h: u64, } impl SerializedEvent for DestroyItem { fn to_vec(&self) -> Vec<u8> { let mut buf = Vec::new(); self.serialize(&mut Serializer::new(&mut buf)).unwrap(); buf } }
use std::collections::BinaryHeap; use crate::edge::Edge; #[derive(Debug)] pub struct Node { pub edges: BinaryHeap<Edge> } #[cfg(test)] mod test { use super::*; #[test] fn get_smallest_edge() { let mut node = Node{edges: BinaryHeap::new()}; let negative_edge = Edge{weight: -1, target: 10}; let big_edge = Edge{weight: 10, target: 11}; node.edges.push(big_edge); node.edges.push(negative_edge); let smallest_edge = node.edges.pop(); assert_eq!(smallest_edge.unwrap().weight, -1); assert_eq!(node.edges.len(), 1); } }
/* Copyright 2015 Tyler Neely Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use std::path::{Path, PathBuf}; use std::io::{self, Read, Write, Seek, SeekFrom}; use std::num::{self, ToPrimitive}; use store::ReadStore; use message_and_offset::MessageAndOffset; use logfile::LogFile; use whence::Whence; pub trait Consumer { pub fn read<'b>(&mut self) -> Option<MessageAndOffset<'b>>; } pub struct BasicConsumer<'a> { store: ReadStore<'a>, } pub enum ConsumerStyle<'a> { ClientTxConsumer(&'a str), GlobalTxConsumer, } impl<'a> BasicConsumer<'a> { pub fn new(directory: &str, whence: Whence) -> Result<BasicConsumer, io::Error> { let rs = try!(ReadStore::new(directory, whence)); try!(rs.seek(whence)); Ok(BasicConsumer { store: rs }) } } impl<'a> Consumer for BasicConsumer<'a> { pub fn read<'b>(&mut self) -> Option<MessageAndOffset<'b>> { self.store.read() } } impl<'a,'c> Iterator for BasicConsumer<'c> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { //TODO implement None } #[inline] fn size_hint(&self) -> (usize, Option<usize>) { //TODO implement (0, None) } }
extern crate image; #[macro_use] extern crate vulkano_shader_derive; extern crate vulkano; use image::{ImageBuffer, Luma}; use std::sync::Arc; use vulkano::buffer::BufferUsage; use vulkano::buffer::CpuAccessibleBuffer; use vulkano::command_buffer::AutoCommandBufferBuilder; use vulkano::command_buffer::CommandBuffer; use vulkano::descriptor::descriptor_set::PersistentDescriptorSet; use vulkano::device::Device; use vulkano::device::DeviceExtensions; use vulkano::format::Format; use vulkano::image::Dimensions; use vulkano::image::StorageImage; use vulkano::instance::Features; use vulkano::instance::Instance; use vulkano::instance::InstanceExtensions; use vulkano::instance::PhysicalDevice; use vulkano::instance::PhysicalDeviceType::DiscreteGpu; use vulkano::pipeline::ComputePipeline; use vulkano::sync::GpuFuture; const GRID_SIZE: u32 = 16; mod ngs { #[derive(VulkanoShader)] #[ty = "compute"] #[src = " #version 450 layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in; layout(set = 0, binding = 0, r8) uniform readonly image2D img_in; layout(set = 0, binding = 1, r8) uniform writeonly image2D img_out; layout(set = 0, binding = 2) buffer Toroidal { int opt; } tor; layout(set = 0, binding = 3) buffer Survival { uint rules[]; } srvl; layout(set = 0, binding = 4) buffer Birth { uint rules[]; } brth; void main() { ivec2 offsets[8] = { ivec2(-1, -1), ivec2(0, -1), ivec2(1, -1), ivec2(-1, 0), ivec2(1, 0), ivec2(-1, 1), ivec2(0, 1), ivec2(1, 1) }; ivec2 grid_size = imageSize(img_in); int living_neighbors = 0; for (int i = 0; i < 8; i++) { ivec2 access_coord = ivec2(gl_GlobalInvocationID.xy) + offsets[i]; if (tor.opt != 0) { if (access_coord.x == -1) { access_coord.x = grid_size.x - 1; } if (access_coord.y == -1) { access_coord.y = grid_size.y - 1; } if (access_coord.x == grid_size.x) { access_coord.x = 0; } if (access_coord.y == grid_size.y) { access_coord.y = 0; } } if (access_coord.x >= 0 && access_coord.x < grid_size.x && access_coord.y >= 0 && access_coord.y < grid_size.y) { if (imageLoad(img_in, access_coord).x == 1.0) { living_neighbors++; } } } vec4 to_write = vec4(0.0); if (imageLoad(img_in, ivec2(gl_GlobalInvocationID.xy)).x == 1.0) { for (int i = 0; i < srvl.rules.length(); i++) { if (living_neighbors == srvl.rules[i]) { to_write.x = 1.0; } } } else { for (int i = 0; i < brth.rules.length(); i++) { if (living_neighbors == brth.rules[i]) { to_write.x = 1.0; } } } imageStore(img_out, ivec2(gl_GlobalInvocationID.xy), to_write); } "] struct Dummy; } mod fms { #[derive(VulkanoShader)] #[ty = "compute"] #[src = " #version 450 layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in; layout(set = 0, binding = 0, r8) uniform readonly image2D img; layout(set = 0, binding = 1) buffer FlatMapX { int data[]; } fmx; layout(set = 0, binding = 2) buffer FlatMapY { int data[]; } fmy; void main() { ivec2 access_coord = ivec2(gl_GlobalInvocationID.xy); if (imageLoad(img, access_coord).x == 1.0) { fmx.data[access_coord.x] = 1; fmy.data[access_coord.y] = 1; } } "] struct Dummy; } fn main() { let instance = Instance::new(None, &InstanceExtensions::none(), None).expect("failed to create instance"); let physical = PhysicalDevice::enumerate(&instance) .find(|&dev| dev.ty() == DiscreteGpu) .expect("no discrete GPU available"); let queue_family = physical .queue_families() .find(|&q| q.supports_graphics()) .expect("couldn't find a graphical queue family"); let img_extended_formats_feature = Features { shader_storage_image_extended_formats: true, ..Features::none() }; let (device, mut queues) = Device::new( physical, &img_extended_formats_feature, &DeviceExtensions::none(), [(queue_family, 0.5)].iter().cloned(), ).expect("failed to create device"); let queue = queues.next().unwrap(); let mut grid_in = vec![0u8; (GRID_SIZE * GRID_SIZE) as usize]; for i in 0..3 { grid_in[0 * GRID_SIZE as usize + 0 + i] = 255u8; } for j in 0..3 { grid_in[(4 + j) * GRID_SIZE as usize + 3] = 255u8; } let buff_in = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), grid_in.into_iter()) .expect("failed to create buffer"); recenter_pattern(device.clone(), queue.clone(), buff_in.clone()); let image_in = StorageImage::new( device.clone(), Dimensions::Dim2d { width: GRID_SIZE, height: GRID_SIZE, }, Format::R8Unorm, Some(queue.family()), ).expect("failed to create image"); let image_out = StorageImage::new( device.clone(), Dimensions::Dim2d { width: GRID_SIZE, height: GRID_SIZE, }, Format::R8Unorm, Some(queue.family()), ).expect("failed to create image"); let buff_out = CpuAccessibleBuffer::from_iter( device.clone(), BufferUsage::all(), (0..GRID_SIZE * GRID_SIZE).map(|_| 0u8), ).expect("failed to create buffer"); let toroidal_opt = 1; let toroidal_buff = CpuAccessibleBuffer::from_data(device.clone(), BufferUsage::all(), toroidal_opt) .expect("failed to create buffer"); let survival_opt: Vec<u32> = vec![2, 3]; let survival_buff = CpuAccessibleBuffer::from_iter( device.clone(), BufferUsage::all(), survival_opt.into_iter(), ).expect("failed to create buffer"); let birth_opt: Vec<u32> = vec![3]; let birth_buff = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(), birth_opt.into_iter()) .expect("failed to create buffer"); let shader = ngs::Shader::load(device.clone()).expect("failed to create shader module"); let compute_pipeline = Arc::new( ComputePipeline::new(device.clone(), &shader.main_entry_point(), &()) .expect("failed to create compute pipeline"), ); let set = Arc::new( PersistentDescriptorSet::start(compute_pipeline.clone(), 0) .add_image(image_in.clone()) .unwrap() .add_image(image_out.clone()) .unwrap() .add_buffer(toroidal_buff.clone()) .unwrap() .add_buffer(survival_buff.clone()) .unwrap() .add_buffer(birth_buff.clone()) .unwrap() .build() .unwrap(), ); let command_buffer = AutoCommandBufferBuilder::new(device.clone(), queue.family()) .unwrap() .copy_buffer_to_image(buff_in.clone(), image_in.clone()) .unwrap() .dispatch( [ (GRID_SIZE as f64 / 8.0).ceil() as u32, (GRID_SIZE as f64 / 8.0).ceil() as u32, 1, ], compute_pipeline.clone(), set.clone(), (), ) .unwrap() .copy_image_to_buffer(image_out.clone(), buff_out.clone()) .unwrap() .build() .unwrap(); let finished = command_buffer.execute(queue.clone()).unwrap(); finished .then_signal_fence_and_flush() .unwrap() .wait(None) .unwrap(); let input_content = buff_in.read().unwrap(); let input = ImageBuffer::<Luma<u8>, _>::from_raw(GRID_SIZE, GRID_SIZE, &input_content[..]).unwrap(); input.save("input.png").unwrap(); let output_content = buff_out.read().unwrap(); let output = ImageBuffer::<Luma<u8>, _>::from_raw(GRID_SIZE, GRID_SIZE, &output_content[..]).unwrap(); output.save("output.png").unwrap(); } fn compute_pattern_boundaries( device: Arc<Device>, queue: Arc<vulkano::device::Queue>, grid_buff: Arc<CpuAccessibleBuffer<[u8]>>, ) -> (Option<usize>, Option<usize>, Option<usize>, Option<usize>) { let grid_img = StorageImage::new( device.clone(), Dimensions::Dim2d { width: GRID_SIZE, height: GRID_SIZE, }, Format::R8Unorm, Some(queue.family()), ).expect("failed to create image"); let flat_map_x = CpuAccessibleBuffer::from_iter( device.clone(), BufferUsage::all(), (0..GRID_SIZE).map(|_| 0), ).expect("failed to create buffer"); let flat_map_y = CpuAccessibleBuffer::from_iter( device.clone(), BufferUsage::all(), (0..GRID_SIZE).map(|_| 0), ).expect("failed to create buffer"); let shader = fms::Shader::load(device.clone()).expect("failed to create shader module"); let compute_pipeline = Arc::new( ComputePipeline::new(device.clone(), &shader.main_entry_point(), &()) .expect("failed to create compute pipeline"), ); let set = Arc::new( PersistentDescriptorSet::start(compute_pipeline.clone(), 0) .add_image(grid_img.clone()) .unwrap() .add_buffer(flat_map_x.clone()) .unwrap() .add_buffer(flat_map_y.clone()) .unwrap() .build() .unwrap(), ); let command_buffer = AutoCommandBufferBuilder::new(device.clone(), queue.family()) .unwrap() .copy_buffer_to_image(grid_buff.clone(), grid_img.clone()) .unwrap() .dispatch( [ (GRID_SIZE as f64 / 8.0).ceil() as u32, (GRID_SIZE as f64 / 8.0).ceil() as u32, 1, ], compute_pipeline.clone(), set.clone(), (), ) .unwrap() .build() .unwrap(); let finished = command_buffer.execute(queue.clone()).unwrap(); finished .then_signal_fence_and_flush() .unwrap() .wait(None) .unwrap(); let min_x = flat_map_x.read().unwrap().iter().position(|&n| n > 0); let max_x = flat_map_x.read().unwrap().iter().rposition(|&n| n > 0); let min_y = flat_map_y.read().unwrap().iter().position(|&n| n > 0); let max_y = flat_map_y.read().unwrap().iter().rposition(|&n| n > 0); (min_x, max_x, min_y, max_y) } fn recenter_pattern( device: Arc<Device>, queue: Arc<vulkano::device::Queue>, grid_buff: Arc<CpuAccessibleBuffer<[u8]>>, ) { let (min_x, max_x, min_y, max_y) = compute_pattern_boundaries(device.clone(), queue.clone(), grid_buff.clone()); if min_x.is_none() || max_x.is_none() || min_y.is_none() || max_y.is_none() { return; } let (min_x, max_x, min_y, max_y) = ( min_x.unwrap(), max_x.unwrap(), min_y.unwrap(), max_y.unwrap(), ); let pattern_origin = (min_x as i32, min_y as i32); let pattern_size = ((max_x - min_x + 1) as u32, (max_y - min_y + 1) as u32); let grid_img = StorageImage::new( device.clone(), Dimensions::Dim2d { width: GRID_SIZE, height: GRID_SIZE, }, Format::R8Unorm, Some(queue.family()), ).expect("failed to create image"); let centered_img = StorageImage::new( device.clone(), Dimensions::Dim2d { width: pattern_size.0 + 2, height: pattern_size.1 + 2, }, Format::R8Unorm, Some(queue.family()), ).expect("failed to create image"); let centered_buff = CpuAccessibleBuffer::from_iter( device.clone(), BufferUsage::all(), (0..(pattern_size.0 + 2) * (pattern_size.1 + 2)).map(|_| 0u8), ).expect("failed to create buffer"); let command_buffer = AutoCommandBufferBuilder::new(device.clone(), queue.family()) .unwrap() .clear_color_image( centered_img.clone(), vulkano::format::ClearValue::Float([0.0, 0.0, 0.0, 0.0]), ) .unwrap() .build() .unwrap(); let finished = command_buffer.execute(queue.clone()).unwrap(); finished .then_signal_fence_and_flush() .unwrap() .wait(None) .unwrap(); let command_buffer = AutoCommandBufferBuilder::new(device.clone(), queue.family()) .unwrap() .copy_buffer_to_image(grid_buff.clone(), grid_img.clone()) .unwrap() .copy_image( grid_img.clone(), [pattern_origin.0, pattern_origin.1, 0], 0, 0, centered_img.clone(), [1, 1, 0], 0, 0, [pattern_size.0, pattern_size.1, 1], 1, ) .unwrap() .copy_image_to_buffer(centered_img.clone(), centered_buff.clone()) .unwrap() .build() .unwrap(); let finished = command_buffer.execute(queue.clone()).unwrap(); finished .then_signal_fence_and_flush() .unwrap() .wait(None) .unwrap(); let centered_content = centered_buff.read().unwrap(); let output = ImageBuffer::<Luma<u8>, _>::from_raw( (pattern_size.0 + 2) as u32, (pattern_size.1 + 2) as u32, &centered_content[..], ).unwrap(); output.save("centered.png").unwrap(); }
use std::io::{stderr, Write}; use crate::{ ast::ty::TypeList, wasm::il::{ exp::{Exp, Exp_}, util::{WasmType, WasmTypeList}, }, }; pub type LazeTypeList = Vec<LazeType>; pub type LazeType = Box<LazeType_>; #[derive(Clone, Debug, PartialEq)] pub struct LazeType_ { pub size: i32, pub escape: bool, pub data: LazeTypeData, } #[derive(Clone, Debug, PartialEq)] pub enum LazeTypeData { Void, Int, Short, Real, Bool, Char, Class(String), Template(String, LazeTypeList, TypeList), Array(LazeType, i32), Pointer(LazeType), Func(LazeTypeList, LazeType, i32), None, } impl LazeType_ { pub fn to_wasm_type(&self) -> WasmType { match self.data { LazeTypeData::Void => WasmType::None, LazeTypeData::Int => WasmType::I64, LazeTypeData::Bool => WasmType::I32, LazeTypeData::Char => WasmType::I32, LazeTypeData::Short => WasmType::I32, LazeTypeData::Real => WasmType::F64, LazeTypeData::Array(_, _) => WasmType::I32, LazeTypeData::Class(_) => WasmType::I32, LazeTypeData::Func(_, _, _) => WasmType::I32, LazeTypeData::Pointer(_) => WasmType::I32, LazeTypeData::Template(_, _, _) => WasmType::I32, LazeTypeData::None => WasmType::None, } } pub fn list_to_wasm_type(list: &LazeTypeList) -> WasmTypeList { let mut result = vec![]; for ty in list { result.push(ty.to_wasm_type()); } result } pub fn none_type() -> LazeType { Box::new(LazeType_ { size: 0, escape: false, data: LazeTypeData::None, }) } pub fn void_type() -> LazeType { Box::new(LazeType_ { size: 0, escape: false, data: LazeTypeData::Void, }) } pub fn int_type() -> LazeType { Box::new(LazeType_ { size: 8, escape: false, data: LazeTypeData::Int, }) } pub fn real_type() -> LazeType { Box::new(LazeType_ { size: 8, escape: false, data: LazeTypeData::Real, }) } pub fn char_type() -> LazeType { Box::new(LazeType_ { size: 4, escape: false, data: LazeTypeData::Char, }) } pub fn short_type() -> LazeType { Box::new(LazeType_ { size: 4, escape: false, data: LazeTypeData::Short, }) } pub fn bool_type() -> LazeType { Box::new(LazeType_ { size: 4, escape: false, data: LazeTypeData::Bool, }) } pub fn class_type(name: String, size: i32) -> LazeType { Box::new(LazeType_ { size, escape: true, data: LazeTypeData::Class(name), }) } pub fn array_type(ty: LazeType, size: i32) -> LazeType { Box::new(LazeType_ { size: ty.size * size, escape: true, data: LazeTypeData::Array(ty, size), }) } pub fn pointer_type(ty: LazeType) -> LazeType { Box::new(LazeType_ { size: 4, escape: false, data: LazeTypeData::Pointer(ty), }) } pub fn template_type( name: String, lazetype_params: LazeTypeList, type_params: TypeList, size: i32, ) -> LazeType { Box::new(LazeType_ { size, escape: true, data: LazeTypeData::Template(name, lazetype_params, type_params), }) } pub fn func_type(params: LazeTypeList, result: LazeType, type_index: i32) -> LazeType { Box::new(LazeType_ { size: 4, escape: false, data: LazeTypeData::Func(params, result, type_index), }) } } pub fn comp_type_binop<'a>( left: LazeType, left_exp: Exp, right: LazeType, right_exp: Exp, ) -> Option<(LazeType, Exp, Exp)> { if left == right { Some((left, left_exp, right_exp)) } else { let left_wasm_type = left.to_wasm_type(); let right_wasm_type = right.to_wasm_type(); match left.data { LazeTypeData::Int => match right.data { LazeTypeData::Short => { Some((left, left_exp, Exp_::convert_exp(left_wasm_type, right_exp))) } LazeTypeData::Real => Some(( right, Exp_::convert_exp(right_wasm_type, left_exp), right_exp, )), _ => None, }, LazeTypeData::Real => match right.data { LazeTypeData::Int | LazeTypeData::Short => { Some((left, left_exp, Exp_::convert_exp(left_wasm_type, right_exp))) } _ => None, }, LazeTypeData::Short => match right.data { LazeTypeData::Int | LazeTypeData::Real => Some(( right, Exp_::convert_exp(right_wasm_type, left_exp), right_exp, )), LazeTypeData::Char => { let _ = writeln!( stderr(), "Warning: conversion from char to short is dangerous." ); Some((left, left_exp, right_exp)) } _ => None, }, LazeTypeData::Char => match right.data { LazeTypeData::Short => { let _ = writeln!( stderr(), "Warning: conversion from short to char is dangerous." ); Some((left, left_exp, right_exp)) } _ => None, }, _ => None, } } }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. //! Common data structures. pub use id_map::Entry; pub(crate) use id_map::IdMap; pub use id_map_collection::{IdMapCollection, IdMapCollectionKey}; /// Identifier map data structure. /// /// Defines the [`IdMap`] data structure: A generic container mapped keyed /// by an internally managed pool of identifiers kept densely packed. pub(crate) mod id_map { type Key = usize; /// IdMapEntry where all free blocks are linked together. #[derive(PartialEq, Eq, Debug)] enum IdMapEntry<T> { /// The Entry should either be allocated and contains a value... Allocated(T), /// Or it is not currently used and should be part of a freelist. Free(FreeListLink), } /// The link of the doubly-linked free-list. #[derive(PartialEq, Eq, Debug, Clone, Copy)] struct FreeListLink { /// The index of the previous free block in the list. prev: Option<usize>, /// The index of the next free block in the list. next: Option<usize>, } impl Default for FreeListLink { /// By default, an entry is not linked into the list. fn default() -> Self { Self { prev: None, next: None } } } /// Stores positions of the head and tail of the free-list /// linked by `FreeListLink`. #[derive(PartialEq, Eq, Debug, Clone, Copy)] struct FreeList { /// The index of the first free block. head: usize, /// The index of the last free block. tail: usize, } impl FreeList { /// Construct a freelist with only one element. fn singleton(elem: usize) -> FreeList { FreeList { head: elem, tail: elem } } } // The following is to mimic the `Option<T>` API. impl<T> IdMapEntry<T> { /// If the entry is allocated, return the reference, /// otherwise, return `None`. fn as_ref(&self) -> Option<&T> { match self { IdMapEntry::Allocated(e) => Some(e), IdMapEntry::Free(_) => None, } } /// If the entry is allocated, return the mutable reference, /// otherwise, return `None`. fn as_mut(&mut self) -> Option<&mut T> { match self { IdMapEntry::Allocated(e) => Some(e), IdMapEntry::Free(_) => None, } } /// Convert the entry into an option. fn into_option(self) -> Option<T> { match self { IdMapEntry::Allocated(e) => Some(e), IdMapEntry::Free(_) => None, } } /// Return whether the entry is allocated. fn is_allocated(&self) -> bool { match self { IdMapEntry::Allocated(_) => true, IdMapEntry::Free(_) => false, } } /// Return whether the entry is free. fn is_free(&self) -> bool { !self.is_allocated() } /// Return the old entry and replace it with `new`. fn replace(&mut self, new: T) -> IdMapEntry<T> { std::mem::replace(self, IdMapEntry::Allocated(new)) } /// Take the old allocated entry and replace it with a free entry. /// /// The new free entry's `prev` pointer will be `None`, so the new /// free entry is intended to be inserted at the front of the freelist. /// If the old entry is already free, this is a no-op. fn take_allocated(&mut self, next: Option<usize>) -> Option<T> { if self.is_allocated() { std::mem::replace(self, IdMapEntry::Free(FreeListLink { prev: None, next })) .into_option() } else { // If it is currently free, we don't want to unlink // the entry and link it back at the head again. None } } /// Return the stored value, panic if not allocated. fn unwrap(self) -> T { self.into_option().unwrap() } /// Return a mutable reference to the freelist link, panic if allocated. fn freelist_link_mut(&mut self) -> &mut FreeListLink { match self { IdMapEntry::Free(link) => link, _ => unreachable!("An allocated block should never be linked into the free list"), } } /// Return a reference to the freelist link, panic if allocated. fn freelist_link(&self) -> &FreeListLink { match self { IdMapEntry::Free(link) => link, _ => unreachable!("An allocated block should never be linked into the free list"), } } } /// A generic container for `T` keyed by densily packed integers. /// /// `IdMap` is a generic container keyed by `usize` that manages its own /// key pool. `IdMap` reuses keys that are free to keep its key pool as /// dense as possible. /// /// The main guarantee provided by `IdMap` is that all `get` operations are /// provided in O(1) without the need to hash the keys. The only operations /// of `IdMap` that are used in the hot path are the `get` operations. /// /// All operations that mutate the `IdMap` are O(log(n)) average. /// /// `push` will grab the lowest free `id` and assign it to the given value, /// returning the assigned `id`. `insert` can be used for assigning a /// specific `id` to an object, and returns the previous object at that `id` /// if any. pub(crate) struct IdMap<T> { freelist: Option<FreeList>, data: Vec<IdMapEntry<T>>, } impl<T> IdMap<T> { /// Creates a new empty [`IdMap`]. pub(crate) fn new() -> Self { Self { freelist: None, data: Vec::new() } } /// Returns `true` if there are no items in [`IdMap`]. pub(crate) fn is_empty(&self) -> bool { // Because of `compress`, our map is empty if // and only if the underlying vector is empty. // If the underlying vector is not empty but our // map is empty, it must be the case where the // underlying vector contains nothing but free entries, // and all these entries should be reclaimed when // the last allocated entry is removed. self.data.is_empty() } /// Returns a reference to the item indexed by `key`, or `None` if /// the `key` doesn't exist. pub(crate) fn get(&self, key: Key) -> Option<&T> { self.data.get(key).and_then(|v| v.as_ref()) } /// Returns a mutable reference to the item indexed by `key`, or `None` /// if the `key` doesn't exist. pub(crate) fn get_mut(&mut self, key: Key) -> Option<&mut T> { self.data.get_mut(key).and_then(|v| v.as_mut()) } /// Removes item indexed by `key` from the container. /// /// Returns the removed item if it exists, or `None` otherwise. /// /// Note: the worst case complexity of `remove` is O(key) if the /// backing data structure of the [`IdMap`] is too sparse. pub(crate) fn remove(&mut self, key: Key) -> Option<T> { let old_head = self.freelist.map(|l| l.head); let r = self.data.get_mut(key).and_then(|v| v.take_allocated(old_head)); if r.is_some() { // If it was allocated, we add the removed entry to the // head of the free-list. match self.freelist.as_mut() { Some(FreeList { head, .. }) => { self.data[*head].freelist_link_mut().prev = Some(key); *head = key; } None => self.freelist = Some(FreeList::singleton(key)), } self.compress(); } r } /// Inserts `item` at `key`. /// /// If the [`IdMap`] already contained an item indexed by `key`, /// `insert` returns it, or `None` otherwise. /// /// Note: The worst case complexity of `insert` is O(key) if `key` is /// larger than the number of items currently held by the [`IdMap`]. pub(crate) fn insert(&mut self, key: Key, item: T) -> Option<T> { if key < self.data.len() { if self.data[key].is_free() { self.freelist_unlink(key); } self.data[key].replace(item).into_option() } else { let start_len = self.data.len(); // Fill the gap `start_len .. key` with free entries. // Currently, the free entries introduced by `insert` // is linked at the end of the free list so that hopefully // these free entries near the end will get less likely to // be allocated than those near the beginning, this may help // reduce the memory footprint because we have increased the // chance for the underlying vector to be compressed. // TODO: explore whether we can reorder the list on the fly // to further increase the chance for compressing. for idx in start_len..key { // These new free entries will be linked to each other, except: // - the first entry's prev should point to the old tail. // - the last entry's next should be None. self.data.push(IdMapEntry::Free(FreeListLink { prev: if idx == start_len { self.freelist.map(|l| l.tail) } else { Some(idx - 1) }, next: if idx == key - 1 { None } else { Some(idx + 1) }, })); } // If `key > start_len`, we have inserted at least one free entry, // so we have to update our freelist. if key > start_len { let new_tail = key - 1; match self.freelist.as_mut() { Some(FreeList { tail, .. }) => { self.data[*tail].freelist_link_mut().next = Some(start_len); *tail = new_tail; } None => { self.freelist = Some(FreeList::singleton(new_tail)); } } } // And finally we insert our item into the map. self.data.push(IdMapEntry::Allocated(item)); None } } /// Inserts `item` into the [`IdMap`]. /// /// `push` inserts a new `item` into the [`IdMap`] and returns the /// key value allocated for `item`. `push` will allocate *any* key that /// is currently free in the internal structure, so it may return a key /// that was used previously but has since been removed. /// /// Note: The worst case complexity of `push` is O(n) where n is the /// number of items held by the [`IdMap`]. This can happen if the /// internal structure gets fragmented. pub(crate) fn push(&mut self, item: T) -> Key { if let Some(FreeList { head, .. }) = self.freelist.as_mut() { let ret = *head; let old = std::mem::replace(self.data.get_mut(ret).unwrap(), IdMapEntry::Allocated(item)); // Update the head of the freelist. match old.freelist_link().next { Some(new_head) => *head = new_head, None => self.freelist = None, } ret } else { // If we run out of freelist, we simply push a new entry // into the underlying vector. let key = self.data.len(); self.data.push(IdMapEntry::Allocated(item)); key } } /// Compresses the tail of the internal `Vec`. /// /// `compress` removes all trailing elements in `data` that are `None`, /// shrinking the internal `Vec`. fn compress(&mut self) { // First, find the last non-free entry. if let Some(idx) = self.data.iter().enumerate().rev().find_map(|(k, v)| { if v.is_allocated() { Some(k) } else { None } }) { // Remove all the trailing free entries. for i in idx + 1..self.data.len() { self.freelist_unlink(i); } self.data.truncate(idx + 1); } else { // There is nothing left in the vector. self.data.clear(); self.freelist = None; } } /// Creates an iterator over the containing items and their associated /// keys. pub(crate) fn iter(&self) -> impl Iterator<Item = (Key, &T)> { self.data.iter().enumerate().filter_map(|(k, v)| v.as_ref().map(|t| (k, t))) } /// Creates a mutable iterator over the containing items and their /// associated keys. pub(crate) fn iter_mut(&mut self) -> impl Iterator<Item = (Key, &mut T)> { self.data.iter_mut().enumerate().filter_map(|(k, v)| v.as_mut().map(|t| (k, t))) } /// Gets the given key's corresponding entry in the map for in-place /// manipulation. pub(crate) fn entry(&mut self, key: usize) -> Entry<'_, usize, T> { if key < self.data.len() && self.data[key].is_allocated() { Entry::Occupied(OccupiedEntry { key, id_map: self }) } else { Entry::Vacant(VacantEntry { key, id_map: self }) } } /// Unlink an entry from the freelist. /// /// We want to do so whenever a freed block turns allocated. fn freelist_unlink(&mut self, idx: usize) { let FreeListLink { prev, next } = self.data[idx].freelist_link().clone(); match (prev, next) { (Some(prev), Some(next)) => { // A normal node in the middle of a list. self.data[prev].freelist_link_mut().next = Some(next); self.data[next].freelist_link_mut().prev = Some(prev); } (Some(prev), None) => { // The node at the tail. self.data[prev].freelist_link_mut().next = next; self.freelist.as_mut().unwrap().tail = prev; } (None, Some(next)) => { // The node at the head. self.data[next].freelist_link_mut().prev = prev; self.freelist.as_mut().unwrap().head = next; } (None, None) => { // We are the last node. self.freelist = None; } } } } impl<T> Default for IdMap<T> { fn default() -> Self { Self::new() } } pub trait EntryKey { fn get_key_index(&self) -> usize; } impl EntryKey for usize { fn get_key_index(&self) -> usize { *self } } /// A view into a vacant entry in a map. It is part of the [`Entry`] enum. pub struct VacantEntry<'a, K, T> { key: K, id_map: &'a mut IdMap<T>, } impl<'a, K, T> VacantEntry<'a, K, T> { /// Sets the value of the entry with the VacantEntry's key, and returns /// a mutable reference to it. pub fn insert(self, value: T) -> &'a mut T where K: EntryKey, { assert!(self.id_map.insert(self.key.get_key_index(), value).is_none()); self.id_map.data[self.key.get_key_index()].as_mut().unwrap() } /// Gets a reference to the key that would be used when inserting a /// value through the `VacantEntry`. pub fn key(&self) -> &K { &self.key } /// Take ownership of the key. pub fn into_key(self) -> K { self.key } /// Changes the key type of this `VacantEntry` to another key `X` that /// still maps to the same index in an `IdMap`. /// /// # Panics /// /// Panics if the resulting mapped key from `f` does not return the /// same value for [`EntryKey::get_key_index`] as the old key did. pub(crate) fn map_key<X, F>(self, f: F) -> VacantEntry<'a, X, T> where K: EntryKey, X: EntryKey, F: FnOnce(K) -> X, { let idx = self.key.get_key_index(); let key = f(self.key); assert_eq!(idx, key.get_key_index()); VacantEntry { key, id_map: self.id_map } } } /// A view into an occupied entry in a map. It is part of the /// [`Entry`] enum. pub struct OccupiedEntry<'a, K, T> { key: K, id_map: &'a mut IdMap<T>, } impl<'a, K: EntryKey, T> OccupiedEntry<'a, K, T> { /// Gets a reference to the key in the entry. pub fn key(&self) -> &K { &self.key } /// Gets a reference to the value in the entry. pub fn get(&self) -> &T { // we can unwrap because value is always Some for OccupiedEntry self.id_map.get(self.key.get_key_index()).unwrap() } /// Gets a mutable reference to the value in the entry. /// /// If you need a reference to the `OccupiedEntry` which may outlive the /// destruction of the entry value, see [`OccupiedEntry::into_mut`]. pub fn get_mut(&mut self) -> &mut T { // we can unwrap because value is always Some for OccupiedEntry self.id_map.get_mut(self.key.get_key_index()).unwrap() } /// Converts the `OccupiedEntry` into a mutable reference to the value /// in the entry with a lifetime bound to the map itself. /// /// If you need multiple references to the `OccupiedEntry`, see /// [`OccupiedEntry::get_mut`]. pub fn into_mut(self) -> &'a mut T { // we can unwrap because value is always Some for OccupiedEntry self.id_map.get_mut(self.key.get_key_index()).unwrap() } /// Sets the value of the entry, and returns the entry's old value. pub fn insert(&mut self, value: T) -> T { // we can unwrap because value is always Some for OccupiedEntry self.id_map.insert(self.key.get_key_index(), value).unwrap() } /// Takes the value out of the entry, and returns it. pub fn remove(self) -> T { // we can unwrap because value is always Some for OccupiedEntry self.id_map.remove(self.key.get_key_index()).unwrap() } /// Changes the key type of this `OccupiedEntry` to another key `X` that /// still maps to the same index in an `IdMap`. /// /// # Panics /// /// Panics if the resulting mapped key from `f` does not return the /// same value for [`EntryKey::get_key_index`] as the old key did. pub(crate) fn map_key<X, F>(self, f: F) -> OccupiedEntry<'a, X, T> where K: EntryKey, X: EntryKey, F: FnOnce(K) -> X, { let idx = self.key.get_key_index(); let key = f(self.key); assert_eq!(idx, key.get_key_index()); OccupiedEntry { key, id_map: self.id_map } } } /// A view into an in-place entry in a map that can be vacant or occupied. pub enum Entry<'a, K, T> { Vacant(VacantEntry<'a, K, T>), Occupied(OccupiedEntry<'a, K, T>), } impl<'a, K: EntryKey, T> Entry<'a, K, T> { /// Returns a reference to this entry's key. pub fn key(&self) -> &K { match self { Entry::Vacant(e) => e.key(), Entry::Occupied(e) => e.key(), } } /// Ensures a value is in the entry by inserting `default` if empty, /// and returns a mutable reference to the value in the entry. pub fn or_insert(self, default: T) -> &'a mut T where K: EntryKey, { match self { Entry::Vacant(e) => e.insert(default), Entry::Occupied(e) => e.into_mut(), } } /// Ensures a value is in the entry by inserting the result of the /// function `f` if empty, and returns a mutable reference to the value /// in the entry. pub fn or_insert_with<F: FnOnce() -> T>(self, f: F) -> &'a mut T where K: EntryKey, { match self { Entry::Vacant(e) => e.insert(f()), Entry::Occupied(e) => e.into_mut(), } } /// Ensures a value is in the entry by inserting the default value if /// empty, and returns a mutable reference to the value in the entry. pub fn or_default(self) -> &'a mut T where T: Default, K: EntryKey, { self.or_insert_with(<T as Default>::default) } /// Provides in-place mutable access to an occupied entry before any /// potential inserts into the map. pub fn and_modify<F: FnOnce(&mut T)>(self, f: F) -> Self { match self { Entry::Vacant(e) => Entry::Vacant(e), Entry::Occupied(mut e) => { f(e.get_mut()); Entry::Occupied(e) } } } /// Changes the key type of this `Entry` to another key `X` that still /// maps to the same index in an `IdMap`. /// /// # Panics /// /// Panics if the resulting mapped key from `f` does not return the /// same value for [`EntryKey::get_key_index`] as the old key did. pub(crate) fn map_key<X, F>(self, f: F) -> Entry<'a, X, T> where K: EntryKey, X: EntryKey, F: FnOnce(K) -> X, { match self { Entry::Vacant(e) => Entry::Vacant(e.map_key(f)), Entry::Occupied(e) => Entry::Occupied(e.map_key(f)), } } } #[cfg(test)] mod tests { use super::IdMapEntry::{self, Allocated, Free}; use super::{Entry, IdMap}; use super::{FreeList, FreeListLink}; // Smart constructors fn free<T>(prev: usize, next: usize) -> IdMapEntry<T> { Free(FreeListLink { prev: Some(prev), next: Some(next) }) } fn free_head<T>(next: usize) -> IdMapEntry<T> { Free(FreeListLink { prev: None, next: Some(next) }) } fn free_tail<T>(prev: usize) -> IdMapEntry<T> { Free(FreeListLink { prev: Some(prev), next: None }) } fn free_none<T>() -> IdMapEntry<T> { Free(FreeListLink::default()) } #[test] fn test_push() { let mut map = IdMap::new(); map.insert(1, 2); assert_eq!(map.data, vec![free_none(), Allocated(2)]); assert_eq!(map.freelist, Some(FreeList::singleton(0))); assert_eq!(map.push(1), 0); assert_eq!(map.data, vec![Allocated(1), Allocated(2)]); assert_eq!(map.freelist, None); assert_eq!(map.push(3), 2); assert_eq!(map.data, vec![Allocated(1), Allocated(2), Allocated(3)]); assert_eq!(map.freelist, None); } #[test] fn test_get() { let mut map = IdMap::new(); map.push(1); map.insert(2, 3); assert_eq!(map.data, vec![Allocated(1), free_none(), Allocated(3)]); assert_eq!(map.freelist, Some(FreeList::singleton(1))); assert_eq!(*map.get(0).unwrap(), 1); assert!(map.get(1).is_none()); assert_eq!(*map.get(2).unwrap(), 3); assert!(map.get(3).is_none()); } #[test] fn test_get_mut() { let mut map = IdMap::new(); map.push(1); map.insert(2, 3); assert_eq!(map.data, vec![Allocated(1), free_none(), Allocated(3)]); assert_eq!(map.freelist, Some(FreeList::singleton(1))); *map.get_mut(2).unwrap() = 10; assert_eq!(*map.get(0).unwrap(), 1); assert_eq!(*map.get(2).unwrap(), 10); assert!(map.get_mut(1).is_none()); assert!(map.get_mut(3).is_none()); } #[test] fn test_is_empty() { let mut map = IdMap::<i32>::new(); assert!(map.is_empty()); map.push(1); assert!(!map.is_empty()); } #[test] fn test_remove() { let mut map = IdMap::new(); map.push(1); map.push(2); map.push(3); assert_eq!(map.data, vec![Allocated(1), Allocated(2), Allocated(3)]); assert_eq!(map.freelist, None); assert_eq!(map.remove(1).unwrap(), 2); assert!(map.remove(1).is_none()); assert_eq!(map.data, vec![Allocated(1), free_none(), Allocated(3)]); assert_eq!(map.freelist, Some(FreeList::singleton(1))); } #[test] fn test_remove_compress() { let mut map = IdMap::new(); map.insert(0, 1); map.insert(2, 3); assert_eq!(map.data, vec![Allocated(1), free_none(), Allocated(3)]); assert_eq!(map.freelist, Some(FreeList::singleton(1))); assert_eq!(map.remove(2).unwrap(), 3); assert_eq!(map.data, vec![Allocated(1)]); assert_eq!(map.freelist, None); assert_eq!(map.remove(0).unwrap(), 1); assert!(map.data.is_empty()); } #[test] fn test_insert() { let mut map = IdMap::new(); assert!(map.insert(1, 2).is_none()); assert_eq!(map.data, vec![free_none(), Allocated(2)]); assert_eq!(map.freelist, Some(FreeList::singleton(0))); assert!(map.insert(3, 4).is_none()); assert_eq!(map.data, vec![free_head(2), Allocated(2), free_tail(0), Allocated(4)]); assert_eq!(map.freelist, Some(FreeList { head: 0, tail: 2 })); assert!(map.insert(0, 1).is_none()); assert_eq!(map.data, vec![Allocated(1), Allocated(2), free_none(), Allocated(4)]); assert_eq!(map.freelist, Some(FreeList::singleton(2))); assert_eq!(map.insert(3, 5).unwrap(), 4); assert_eq!(map.data, vec![Allocated(1), Allocated(2), free_none(), Allocated(5)]); assert_eq!(map.freelist, Some(FreeList::singleton(2))); } #[test] fn test_iter() { let mut map = IdMap::new(); map.insert(1, 0); map.insert(3, 1); map.insert(6, 2); assert_eq!( map.data, vec![ free_head(2), Allocated(0), free(0, 4), Allocated(1), free(2, 5), free_tail(4), Allocated(2), ] ); assert_eq!(map.freelist, Some(FreeList { head: 0, tail: 5 })); let mut c = 0; for (i, (k, v)) in map.iter().enumerate() { assert_eq!(i, *v as usize); assert_eq!(map.get(k).unwrap(), v); c += 1; } assert_eq!(c, 3); } #[test] fn test_iter_mut() { let mut map = IdMap::new(); map.insert(1, 0); map.insert(3, 1); map.insert(6, 2); assert_eq!( map.data, vec![ free_head(2), Allocated(0), free(0, 4), Allocated(1), free(2, 5), free_tail(4), Allocated(2), ] ); assert_eq!(map.freelist, Some(FreeList { head: 0, tail: 5 })); for (k, v) in map.iter_mut() { *v += k as u32; } assert_eq!( map.data, vec![ free_head(2), Allocated(1), free(0, 4), Allocated(4), free(2, 5), free_tail(4), Allocated(8), ] ); assert_eq!(map.freelist, Some(FreeList { head: 0, tail: 5 })); } #[test] fn test_entry() { let mut map = IdMap::new(); assert_eq!(*map.entry(1).or_insert(2), 2); assert_eq!(map.data, vec![free_none(), Allocated(2)]); assert_eq!(map.freelist, Some(FreeList::singleton(0))); assert_eq!( *map.entry(1) .and_modify(|v| { *v = 10; }) .or_insert(5), 10 ); assert_eq!(map.data, vec![free_none(), Allocated(10)]); assert_eq!(map.freelist, Some(FreeList::singleton(0))); assert_eq!( *map.entry(2) .and_modify(|v| { *v = 10; }) .or_insert(5), 5 ); assert_eq!(map.data, vec![free_none(), Allocated(10), Allocated(5)]); assert_eq!(map.freelist, Some(FreeList::singleton(0))); assert_eq!(*map.entry(4).or_default(), 0); assert_eq!( map.data, vec![free_head(3), Allocated(10), Allocated(5), free_tail(0), Allocated(0)] ); assert_eq!(map.freelist, Some(FreeList { head: 0, tail: 3 })); assert_eq!(*map.entry(3).or_insert_with(|| 7), 7); assert_eq!( map.data, vec![free_none(), Allocated(10), Allocated(5), Allocated(7), Allocated(0)] ); assert_eq!(map.freelist, Some(FreeList::singleton(0))); assert_eq!(*map.entry(0).or_insert(1), 1); assert_eq!( map.data, vec![Allocated(1), Allocated(10), Allocated(5), Allocated(7), Allocated(0)] ); assert_eq!(map.freelist, None); match map.entry(0) { Entry::Occupied(mut e) => { assert_eq!(*e.key(), 0); assert_eq!(*e.get(), 1); *e.get_mut() = 2; assert_eq!(*e.get(), 2); assert_eq!(e.remove(), 2); } _ => panic!("Wrong entry type, should be occupied"), } assert_eq!( map.data, vec![free_none(), Allocated(10), Allocated(5), Allocated(7), Allocated(0)] ); assert_eq!(map.freelist, Some(FreeList::singleton(0))); match map.entry(0) { Entry::Vacant(mut e) => { assert_eq!(*e.key(), 0); assert_eq!(*e.insert(4), 4); } _ => panic!("Wrong entry type, should be vacant"), } assert_eq!( map.data, vec![Allocated(4), Allocated(10), Allocated(5), Allocated(7), Allocated(0)] ); assert_eq!(map.freelist, None) } #[test] fn test_freelist_order() { let mut rng = crate::testutil::new_rng(1234981); use rand::seq::SliceRandom; const NELEMS: usize = 1_000; for _ in 0..1_000 { let mut map = IdMap::new(); for i in 0..NELEMS { assert_eq!(map.push(i), i); } // don't remove the last one to prevent compressing. let mut remove_seq: Vec<usize> = (0..NELEMS - 1).collect(); remove_seq.shuffle(&mut rng); for i in &remove_seq { map.remove(*i); } for i in remove_seq.iter().rev() { // We should be able to push into the array in the same order. assert_eq!(map.push(*i), *i); } map.remove(NELEMS - 1); for i in &remove_seq { map.remove(*i); } assert!(map.is_empty()); } } #[test] fn test_compress_freelist() { let mut map = IdMap::new(); for _ in 0..100 { map.push(0); } for i in 0..100 { map.remove(i); } assert_eq!(map.data.len(), 0); assert_eq!(map.freelist, None); } #[test] fn test_insert_beyond_end_freelist() { let mut map = IdMap::new(); for i in 0..10 { map.insert(2 * i + 1, 0); } for i in 0..10 { assert_eq!(map.push(1), 2 * i); } } #[test] fn test_double_free() { const MAX_KEY: usize = 100; let mut map1 = IdMap::new(); map1.insert(MAX_KEY, 2); let mut map2 = IdMap::new(); map2.insert(MAX_KEY, 2); for i in 0..MAX_KEY { assert!(map1.remove(i).is_none()); // Removing an already free entry should be a no-op. assert_eq!(map1.data, map2.data); assert_eq!(map1.freelist, map2.freelist); } } } } /// Identifier map collection data structure. /// /// Defines [`IdMapCollection`], which is a generic map collection that can be /// keyed on [`IdMapCollectionKey`], which is a two-level key structure. /// /// Used to provide collections keyed on [`crate::DeviceId`] that match hot path /// performance requirements. pub mod id_map_collection { use super::id_map::{Entry, EntryKey}; use super::IdMap; /// A key that can index items in [`IdMapCollection`]. /// /// An `IdMapCollectionKey` is a key with two levels: `variant` and `id`. /// The number of `variant`s must be fixed and known at compile time, and is /// typically mapped to a number of `enum` variants (nested or not). pub trait IdMapCollectionKey { /// The number of variants this key supports. const VARIANT_COUNT: usize; /// Get the variant index for this key. /// /// # Panics /// /// Callers may assume that `get_variant` returns a value in the range /// `[0, VARIANT_COUNT)`, and may panic if that assumption is violated. fn get_variant(&self) -> usize; /// Get the id index for this key. fn get_id(&self) -> usize; } impl<O> EntryKey for O where O: IdMapCollectionKey, { fn get_key_index(&self) -> usize { <O as IdMapCollectionKey>::get_id(self) } } /// A generic collection indexed by an [`IdMapCollectionKey`]. /// /// `IdMapCollection` provides the same performance guarantees as [`IdMap`], /// but provides a two-level keying scheme that matches the pattern used /// in [`crate::DeviceId`]. pub struct IdMapCollection<K: IdMapCollectionKey, T> { // TODO(brunodalbo): we define a vector container here because we can't // just define a fixed array length based on an associated const in // IdMapCollectionKey. When rust issue #43408 gets resolved we can // switch this to use the associated const and just have a fixed length // array. data: Vec<IdMap<T>>, _marker: std::marker::PhantomData<K>, } impl<K: IdMapCollectionKey, T> IdMapCollection<K, T> { /// Creates a new empty `IdMapCollection`. pub fn new() -> Self { let mut data = Vec::new(); data.resize_with(K::VARIANT_COUNT, IdMap::default); Self { data, _marker: std::marker::PhantomData } } fn get_map(&self, key: &K) -> &IdMap<T> { &self.data[key.get_variant()] } fn get_map_mut(&mut self, key: &K) -> &mut IdMap<T> { &mut self.data[key.get_variant()] } /// Returns `true` if the `IdMapCollection` holds no items. pub fn is_empty(&self) -> bool { self.data.iter().all(|d| d.is_empty()) } /// Returns a reference to the item indexed by `key`, or `None` if /// the `key` doesn't exist. pub fn get(&self, key: &K) -> Option<&T> { self.get_map(key).get(key.get_id()) } /// Returns a mutable reference to the item indexed by `key`, or `None` /// if the `key` doesn't exist. pub fn get_mut(&mut self, key: &K) -> Option<&mut T> { self.get_map_mut(key).get_mut(key.get_id()) } /// Removes item indexed by `key` from the container. /// /// Returns the removed item if it exists, or `None` otherwise. pub fn remove(&mut self, key: &K) -> Option<T> { self.get_map_mut(key).remove(key.get_id()) } /// Inserts `item` at `key`. /// /// If the [`IdMapCollection`] already contained an item indexed by /// `key`, `insert` returns it, or `None` otherwise. pub fn insert(&mut self, key: &K, item: T) -> Option<T> { self.get_map_mut(key).insert(key.get_id(), item) } /// Creates an iterator over the containing items. pub fn iter(&self) -> impl Iterator<Item = &T> { self.data.iter().flat_map(|m| m.iter()).map(|(_, v)| v) } /// Creates a mutable iterator over the containing items. pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> { self.data.iter_mut().flat_map(|m| m.iter_mut()).map(|(_, v)| v) } /// Gets the given key's corresponding entry in the map for in-place /// manipulation. pub fn entry(&mut self, key: K) -> Entry<'_, K, T> { self.get_map_mut(&key).entry(key.get_id()).map_key(|_| key) } } impl<K: IdMapCollectionKey, T> Default for IdMapCollection<K, T> { fn default() -> Self { Self::new() } } #[cfg(test)] mod tests { use super::*; #[derive(Copy, Clone, Eq, PartialEq, Debug)] enum MockVariants { A, B, C, } #[derive(Copy, Clone, Eq, PartialEq, Debug)] struct MockKey { id: usize, var: MockVariants, } impl MockKey { const fn new(id: usize, var: MockVariants) -> Self { Self { id, var } } } impl IdMapCollectionKey for MockKey { const VARIANT_COUNT: usize = 3; fn get_variant(&self) -> usize { match self.var { MockVariants::A => 0, MockVariants::B => 1, MockVariants::C => 2, } } fn get_id(&self) -> usize { self.id } } type TestCollection = IdMapCollection<MockKey, i32>; const KEY_A: MockKey = MockKey::new(0, MockVariants::A); const KEY_B: MockKey = MockKey::new(2, MockVariants::B); const KEY_C: MockKey = MockKey::new(4, MockVariants::C); #[test] fn test_insert_and_get() { let mut t = TestCollection::new(); assert!(t.data[0].is_empty()); assert!(t.data[1].is_empty()); assert!(t.data[2].is_empty()); assert!(t.insert(&KEY_A, 1).is_none()); assert!(!t.data[0].is_empty()); assert!(t.insert(&KEY_B, 2).is_none()); assert!(!t.data[1].is_empty()); assert_eq!(*t.get(&KEY_A).unwrap(), 1); assert!(t.get(&KEY_C).is_none()); *t.get_mut(&KEY_B).unwrap() = 3; assert_eq!(*t.get(&KEY_B).unwrap(), 3); } #[test] fn test_remove() { let mut t = TestCollection::new(); assert!(t.insert(&KEY_B, 15).is_none()); assert_eq!(t.remove(&KEY_B).unwrap(), 15); assert!(t.remove(&KEY_B).is_none()); } #[test] fn test_iter() { let mut t = TestCollection::new(); assert!(t.insert(&KEY_A, 15).is_none()); assert!(t.insert(&KEY_B, -5).is_none()); assert!(t.insert(&KEY_C, -10).is_none()); let mut c = 0; let mut sum = 0; for i in t.iter() { c += 1; sum += *i; } assert_eq!(c, 3); assert_eq!(sum, 0); } #[test] fn test_is_empty() { let mut t = TestCollection::new(); assert!(t.is_empty()); assert!(t.insert(&KEY_B, 15).is_none()); assert!(!t.is_empty()); } #[test] fn test_iter_mut() { let mut t = TestCollection::new(); assert!(t.insert(&KEY_A, 15).is_none()); assert!(t.insert(&KEY_B, -5).is_none()); assert!(t.insert(&KEY_C, -10).is_none()); for i in t.iter_mut() { *i *= 2; } assert_eq!(*t.get(&KEY_A).unwrap(), 30); assert_eq!(*t.get(&KEY_B).unwrap(), -10); assert_eq!(*t.get(&KEY_C).unwrap(), -20); } #[test] fn test_entry() { let mut t = TestCollection::new(); assert_eq!(*t.entry(KEY_A).or_insert(2), 2); assert_eq!( *t.entry(KEY_A) .and_modify(|v| { *v = 10; }) .or_insert(5), 10 ); assert_eq!( *t.entry(KEY_B) .and_modify(|v| { *v = 10; }) .or_insert(5), 5 ); assert_eq!(*t.entry(KEY_C).or_insert_with(|| 7), 7); assert_eq!(*t.entry(KEY_C).key(), KEY_C); assert_eq!(*t.get(&KEY_A).unwrap(), 10); assert_eq!(*t.get(&KEY_B).unwrap(), 5); assert_eq!(*t.get(&KEY_C).unwrap(), 7); } } }
use std::env; use std::path; pub fn path() -> path::PathBuf { let file_path = file_path(); let path = std::path::PathBuf::from(&file_path); if path.is_absolute() { return path; } let current_dir = current_dir(); let mut path = std::path::PathBuf::from(current_dir); path.push(file_path); path } fn current_dir() -> String { let current_dir = env::current_dir().expect("Failed to get current dir"); let current_dir = current_dir.to_str().expect("Failed to get current dir"); String::from(current_dir) } fn file_path() -> String { let args: Vec<_> = std::env::args().collect(); args.get(1).unwrap_or(&String::from("sudoku.txt")).to_owned() }
use std::io::{self, BufRead}; fn main() { let mut vec: Vec<_> = io::stdin() .lock() .lines() .filter_map(|x| x.ok()) .map(|x| { x.replace("F", "0") .replace("B", "1") .replace("L", "0") .replace("R", "1") }) .filter_map(|x| u32::from_str_radix(&x, 2).ok()) .collect(); vec.sort_unstable(); println!("Part 1: {}", vec.last().unwrap()); let result = vec .iter() .zip(vec[1..].iter()) .filter(|(&x, &y)| x + 2 == y) .map(|(x, _)| x + 1) .next() .unwrap(); println!("Part 2: {}", result) }
#[macro_use] extern crate structopt; use structopt::StructOpt; #[derive(StructOpt, Debug)] #[structopt(name = "jabir", about = "Chemical elements analyser")] enum Jabir { #[structopt(name = "show", about = "Show element")] Show { element: String, } } fn main() { let matches = Jabir::from_args(); println!("{:?}", matches); match matches { Jabir::Show { element } => { println!("Element is {}", element); } } }
use std::cmp::{max, min}; fn main() { let input = include_str!("../data/2015-14.txt"); println!("Part 1: {}", part1(input, 2503)); println!("Part 2: {}", part2(input, 2503)); } fn part1(input: &str, secs: i32) -> i32 { let deers = parse(input); deers.iter().fold(0, |best, deer| { let cycles = secs / (deer.fly_max + deer.rest_max); let remaining_secs = secs % (deer.fly_max + deer.rest_max); let mut dist = cycles * (deer.speed * deer.fly_max); dist += deer.speed * min(deer.fly_max, remaining_secs); max(best, dist) }) } fn part2(input: &str, secs: i32) -> i32 { let mut deers = parse(input); for _ in 0..secs { let best = deers.iter_mut().fold(0, |best, deer| { deer.status += 1; if deer.status <= deer.fly_max { deer.distance += deer.speed; } if deer.status == deer.fly_max + deer.rest_max { deer.status = 0; } max(best, deer.distance) }); for deer in &mut deers { if deer.distance == best { deer.score += 1; } } } deers.iter().fold(0, |best, deer| max(best, deer.score)) } struct Deer { speed: i32, fly_max: i32, rest_max: i32, status: i32, distance: i32, score: i32 } fn parse(input: &str) -> Vec<Deer> { let mut deers = Vec::new(); for line in input.lines() { let parts: Vec<_> = line.split(' ').collect(); deers.push(Deer { speed: parts[3].parse().unwrap(), fly_max: parts[6].parse().unwrap(), rest_max: parts[13].parse().unwrap(), status: 0, distance: 0, score: 0 }); } deers } #[test] fn test1() { let input = "\ Comet can fly 14 km/s for 10 seconds, but then must rest for 127 seconds. Dancer can fly 16 km/s for 11 seconds, but then must rest for 162 seconds."; assert_eq!(part1(input, 1000), 1120); } #[test] fn test2() { let input = "\ Comet can fly 14 km/s for 10 seconds, but then must rest for 127 seconds. Dancer can fly 16 km/s for 11 seconds, but then must rest for 162 seconds."; assert_eq!(part2(input, 1000), 689); }
//! Light setup structure. use arctk::clone; use arctk_attr::input; /// Lighting structure. #[input] pub struct Light { /// Ambient lighting fraction. ambient: f64, /// Diffuse lighting fraction. diffuse: f64, /// Specular lighting fraction. specular: f64, /// Specular lighting power. spec_pow: i32, } impl Light { clone!(ambient, f64); clone!(diffuse, f64); clone!(specular, f64); clone!(spec_pow, i32); /// Construct a new instance. #[inline] #[must_use] pub const fn new(ambient: f64, diffuse: f64, specular: f64, spec_pow: i32) -> Self { Self { ambient, diffuse, specular, spec_pow, } } }
// mod tests; // use tests::BOOK; mod book; use book::BOOK; mod exth_header { use super::*; use mobi::{BookInfo, ExtHeader, Header, HeaderData}; use std::collections::HashMap; #[test] fn parse() { let records: HashMap<u32, String> = [ (101, String::from("HarperCollins Publishers Ltd")), (103, String::from("<h3>From Library Journal</h3><p>New Line Cinema will be releasing \"The Lord of the Rings\" trilogy in three separate installments, and Houghton Mifflin Tolkien\'s U.S. publisher since the release of The Hobbit in 1938 will be re-releasing each volume of the trilogy separately and in a boxed set (ISBN 0-618-15397-7. $22; pap. ISBN 0-618-15396-9. $12). <br />Copyright 2001 Reed Business Information, Inc. </p><h3>Review</h3><p>\'An extraordinary book. It deals with a stupendous theme. It leads us through a succession of strange and astonishing episodes, some of them magnificent, in a region where everything is invented, forest, moor, river, wilderness, town and the races which inhabit them.\' The Observer \'Among the greatest works of imaginative fiction of the twentieth century.\' Sunday Telegraph </p>")), (100, String::from("J. R. R. Tolkien")), (503, String::from("Lord of the Rings - Fellowship of the Ring")), (106, String::from("2010-12-21T00:00:00+00:00")), (108, String::from("calibre (0.7.31) [http://calibre-ebook.com]")), (104, String::from("9780261102316")), (106, String::from("2010-12-21T00:00:00+00:00")), (201, String::from("\u{0}\u{0}\u{0}\u{0}")), (203, String::from("\u{0}\u{0}\u{0}\u{0}")), (202, String::from("\u{0}\u{0}\u{0}\u{1}")), ].iter().cloned().collect(); let extheader = ExtHeader { identifier: 1163416648, header_length: 1109, record_count: 11, records, }; let parsed_header = ExtHeader::parse( BOOK, Header::get_headers_u16(BOOK, HeaderData::NumOfRecords).unwrap(), ) .unwrap(); assert_eq!(extheader, parsed_header); } mod records { use super::*; macro_rules! info { ($t: ident, $s: expr) => { let exth = ExtHeader::parse(BOOK, 292).unwrap(); let data = exth.get_book_info(BookInfo::$t); assert_eq!(data, Some(&String::from($s))); }; } #[test] fn author() { info!(Author, "J. R. R. Tolkien"); } #[test] fn publisher() { info!(Publisher, "HarperCollins Publishers Ltd"); } #[test] fn description() { info!(Description, "<h3>From Library Journal</h3><p>New Line Cinema will be releasing \"The Lord of the Rings\" trilogy in three separate installments, and Houghton Mifflin Tolkien\'s U.S. publisher since the release of The Hobbit in 1938 will be re-releasing each volume of the trilogy separately and in a boxed set (ISBN 0-618-15397-7. $22; pap. ISBN 0-618-15396-9. $12). <br />Copyright 2001 Reed Business Information, Inc. </p><h3>Review</h3><p>\'An extraordinary book. It deals with a stupendous theme. It leads us through a succession of strange and astonishing episodes, some of them magnificent, in a region where everything is invented, forest, moor, river, wilderness, town and the races which inhabit them.\' The Observer \'Among the greatest works of imaginative fiction of the twentieth century.\' Sunday Telegraph </p>"); } #[test] fn isbn() { info!(Isbn, "9780261102316"); } #[test] fn publish_date() { info!(PublishDate, "2010-12-21T00:00:00+00:00"); } #[test] fn contributor() { info!(Contributor, "calibre (0.7.31) [http://calibre-ebook.com]"); } #[test] fn title() { info!(Title, "Lord of the Rings - Fellowship of the Ring"); } } }
use svm_types::{Account, SpawnAccount, TemplateAddr}; use crate::spawn; /// Builds a binary representation for [`SpawnAccount`] /// /// Should be used mainly for testing only. pub struct SpawnBuilder { version: Option<u16>, template: Option<TemplateAddr>, name: Option<String>, ctor_name: Option<String>, calldata: Option<Vec<u8>>, } /// /// # Examples /// /// ```rust /// use std::io::Cursor; /// /// use svm_types::{Account, SpawnAccount, TemplateAddr}; /// use svm_codec::api::builder::SpawnBuilder; /// use svm_codec::spawn; /// /// let template_addr = TemplateAddr::of("@template"); /// let name = "My Account".to_string(); /// let ctor_name = "initialize"; /// let calldata = vec![0x10, 0x20, 0x30]; /// /// let bytes = SpawnBuilder::new() /// .with_version(0) /// .with_template(&template_addr) /// .with_name(&name) /// .with_ctor(ctor_name) /// .with_calldata(&calldata) /// .build(); /// /// let mut cursor = Cursor::new(&bytes[..]); /// let actual = spawn::decode(&mut cursor).unwrap(); /// let expected = SpawnAccount { /// version: 0, /// account: Account { name, template_addr }, /// ctor_name: ctor_name.to_string(), /// calldata, /// }; /// //// assert_eq!(expected, actual); /// ``` /// #[allow(missing_docs)] impl SpawnBuilder { #[allow(clippy::new_without_default)] pub fn new() -> Self { Self { version: None, template: None, name: None, ctor_name: None, calldata: None, } } pub fn with_version(mut self, version: u16) -> Self { self.version = Some(version); self } pub fn with_template(mut self, template: &TemplateAddr) -> Self { self.template = Some(template.clone()); self } pub fn with_name(mut self, name: &str) -> Self { self.name = Some(name.to_string()); self } pub fn with_ctor(mut self, ctor_name: &str) -> Self { self.ctor_name = Some(ctor_name.to_string()); self } pub fn with_calldata(mut self, calldata: &[u8]) -> Self { self.calldata = Some(calldata.to_vec()); self } pub fn build(self) -> Vec<u8> { let version = self.version.unwrap(); let template_addr = self.template.unwrap(); let name = self.name.unwrap(); let ctor_name = self.ctor_name.unwrap(); let calldata = match self.calldata { None => vec![], Some(calldata) => calldata.to_vec(), }; let spawn = SpawnAccount { version, account: Account::new(template_addr, name), ctor_name, calldata, }; let mut w = Vec::new(); spawn::encode(&spawn, &mut w); w } }
//! This crate implements a structure that can be used as a generic array type.use //! Core Rust array types `[T; N]` can't be used generically with respect to `N`, so for example this: //! //! ```{should_fail} //! struct Foo<T, N> { //! data: [T; N] //! } //! ``` //! //! won't work. //! //! **generic-array** exports a `GenericArray<T,N>` type, which lets the above be implemented as: //! //! ``` //!# use generic_array::{ArrayLength, GenericArray}; //! struct Foo<T, N: ArrayLength<T>> { //! data: GenericArray<T,N> //! } //! ``` //! //! The `ArrayLength<T>` trait is implemented by default for [unsigned integer types](../typenum/uint/index.html) from [typenum](../typenum/index.html). //! //! For ease of use, an `arr!` macro is provided - example below: //! //! ``` //! # #[macro_use] //! # extern crate generic_array; //! # extern crate typenum; //! # fn main() { //! let array = arr![u32; 1, 2, 3]; //! assert_eq!(array[2], 3); //! # } //! ``` #![cfg_attr(feature="no_std", no_std)] #[cfg(feature="no_std")] extern crate core as std; extern crate typenum; pub mod arr; use typenum::uint::{Unsigned, UTerm, UInt}; use typenum::bit::{B0, B1}; use std::marker::PhantomData; use std::mem; use std::ops::{Deref, DerefMut}; use std::ptr; use std::slice; /// Trait making GenericArray work, marking types to be used as length of an array pub unsafe trait ArrayLength<T> : Unsigned { /// Associated type representing the array type for the number type ArrayType; } unsafe impl<T> ArrayLength<T> for UTerm { type ArrayType = (); } /// Internal type used to generate a struct of appropriate size #[allow(dead_code)] #[repr(C)] pub struct GenericArrayImplEven<T, U> { parent1: U, parent2: U, _marker: PhantomData<T> } impl<T: Clone, U: Clone> Clone for GenericArrayImplEven<T, U> { fn clone(&self) -> GenericArrayImplEven<T, U> { GenericArrayImplEven { parent1: self.parent1.clone(), parent2: self.parent2.clone(), _marker: PhantomData } } } impl<T: Copy, U: Copy> Copy for GenericArrayImplEven<T, U> {} /// Internal type used to generate a struct of appropriate size #[allow(dead_code)] #[repr(C)] pub struct GenericArrayImplOdd<T, U> { parent1: U, parent2: U, data: T } impl<T: Clone, U: Clone> Clone for GenericArrayImplOdd<T, U> { fn clone(&self) -> GenericArrayImplOdd<T, U> { GenericArrayImplOdd { parent1: self.parent1.clone(), parent2: self.parent2.clone(), data: self.data.clone() } } } impl<T: Copy, U: Copy> Copy for GenericArrayImplOdd<T, U> {} unsafe impl<T, N: ArrayLength<T>> ArrayLength<T> for UInt<N, B0> { type ArrayType = GenericArrayImplEven<T, N::ArrayType>; } unsafe impl<T, N: ArrayLength<T>> ArrayLength<T> for UInt<N, B1> { type ArrayType = GenericArrayImplOdd<T, N::ArrayType>; } /// Struct representing a generic array - GenericArray<T, N> works like [T; N] #[allow(dead_code)] pub struct GenericArray<T, U: ArrayLength<T>> { data: U::ArrayType } impl<T, N> Deref for GenericArray<T, N> where N: ArrayLength<T> { type Target = [T]; fn deref(&self) -> &[T] { unsafe { slice::from_raw_parts(self as *const Self as *const T, N::to_usize()) } } } impl<T, N> DerefMut for GenericArray<T, N> where N: ArrayLength<T> { fn deref_mut(&mut self) -> &mut [T] { unsafe { slice::from_raw_parts_mut(self as *mut Self as *mut T, N::to_usize()) } } } impl<T: Default, N> GenericArray<T, N> where N: ArrayLength<T> { /// Function constructing an array filled with default values pub fn new() -> GenericArray<T, N> { unsafe { let mut res: GenericArray<T, N> = mem::uninitialized(); for r in res.iter_mut() { ptr::write(r, T::default()) } res } } } impl<T: Clone, N> GenericArray<T, N> where N: ArrayLength<T> { /// Function constructing an array from a slice; the length of the slice must be equal to the length of the array pub fn from_slice(list: &[T]) -> GenericArray<T, N> { assert_eq!(list.len(), N::to_usize()); unsafe { let mut res: GenericArray<T, N> = mem::uninitialized(); for i in 0..N::to_usize() { ptr::write(&mut res[i], list[i].clone()) } res } } } impl<T: Clone, N> Clone for GenericArray<T, N> where N: ArrayLength<T> { fn clone(&self) -> GenericArray<T, N> { unsafe { let mut res: GenericArray<T, N> = mem::uninitialized(); for i in 0..N::to_usize() { ptr::write(&mut res[i], self[i].clone()) } res } } } impl<T: Copy, N> Copy for GenericArray<T, N> where N: ArrayLength<T>, N::ArrayType: Copy {}
extern crate libc; extern crate x11; pub mod config; pub mod windowsystem; use windowsystem::WindowSystem; #[allow(while_true)] fn main() { let mut window_system = WindowSystem::new(); window_system.grab_keys(); window_system.grab_buttons(); let mut exit_event = false; while !exit_event { exit_event = window_system.on_update(); } }
use std::env; use image::GenericImageView; use image::imageops; fn main() { let mut scale=1u32; let args: Vec<String> = env::args().collect(); if args.len() ==1 {println!("Please provide atleast one argument"); return} else if args.len() ==3 { scale = args[2].parse::<u32>().unwrap()} let path = &args[1]; let mut img = image::open(path).unwrap(); let w = img.dimensions().0/scale; let h = img.dimensions().1/scale; let pixel_ascii_map = "`^\",:;Il!i~+_-?][}{1)(|\\/tfjrxnuvczXYUJCLQ0OZmwqpdbkhao*#MW&8%B@$"; img =img.resize( w,h,imageops::FilterType::Lanczos3); for y in 0..h-1 { for x in 0..w-1 { let pixel = img.get_pixel(x,y); let bright:i32 = (pixel[0] as i32+pixel[1] as i32+pixel[2] as i32)/3; let brightindex = (bright * pixel_ascii_map.len() as i32 -1)/255; print!("{}",pixel_ascii_map.as_bytes()[brightindex as usize] as char); } print!("\n") } }
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. #[repr(C)] pub union ib_addr__bindgen_ty_1 { pub uib_addr8: [__u8; 16usize], pub uib_addr16: [__be16; 8usize], pub uib_addr32: [__be32; 4usize], pub uib_addr64: [__be64; 2usize], _bindgen_union_align: [u64; 2usize], pub _address: u8, }
use cocoa::base::{id, nil}; use cocoa::foundation::NSString; use error::NSError; use objc_bringup::NSArray; use std::convert::From; use std::error::Error; use std::ffi::CStr; use std::fmt; use std::io::Error as IoError; use std::mem; use std::sync::Arc; use sys::MTLLibrary; use {Device, FromRaw, FromRawError, Function}; pub struct Library(id); impl Library { pub fn new_function_with_name(&mut self, function_name: &str) -> Option<Function> { unsafe { let func_name_nsstr = NSString::alloc(nil).init_str(function_name); let function = self.0.newFunctionWithName(func_name_nsstr); FromRaw::from_raw(function).ok() } } pub fn function_names(&self) -> Vec<&str> { let names_array = unsafe { self.0.functionNames() }; let names_len = unsafe { names_array.count() }; let mut names_vec = vec![]; for i in 0..names_len { let name = unsafe { CStr::from_ptr(names_array.objectAtIndex(i).UTF8String()).to_str().unwrap_or(&"") }; names_vec.push(name); } names_vec } pub fn device(&self) -> &Device { let device = unsafe { self.0.device() }; assert!(device != nil); unsafe { mem::transmute(device) } } pub fn set_label(&mut self, label: &str) { unsafe { self.0.setLabel(NSString::alloc(nil).init_str(label)) } } pub fn label(&self) -> &str { unsafe { CStr::from_ptr(self.0.label().UTF8String()).to_str().unwrap_or(&"") } } } impl_from_into_raw!(Library, of protocol "MTLLibrary"); #[derive(Debug)] pub enum LibraryError { SourceError(Option<Arc<NSError>>), FromRaw(FromRawError), Io(IoError) } impl fmt::Display for LibraryError { fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { write!(formatter, "{}", self.description()) } } impl Error for LibraryError { fn description(&self) -> &str { match *self { LibraryError::SourceError(Some(ref e)) => e.domain(), LibraryError::SourceError(None) => "Could not parse source code.", LibraryError::FromRaw(_) => "Error converting library from pointer", LibraryError::Io(_) => "Io error while loading library", } } fn cause(&self) -> Option<&Error> { match *self { LibraryError::FromRaw(ref e) => { let e: &Error = e; Some(e) } LibraryError::Io(ref e) => { let e: &Error = e; Some(e) } _ => None, } } } impl From<NSError> for LibraryError { fn from(error: NSError) -> Self { LibraryError::SourceError(Some(Arc::new(error))) } } impl From<Option<NSError>> for LibraryError { fn from(error: Option<NSError>) -> Self { if let Some(error) = error { LibraryError::SourceError(Some(Arc::new(error))) } else { LibraryError::SourceError(None) } } } impl From<FromRawError> for LibraryError { fn from(error: FromRawError) -> Self { LibraryError::FromRaw(error) } } impl From<IoError> for LibraryError { fn from(error: IoError) -> Self { LibraryError::Io(error) } }
pub use fc_consensus::FrontierBlockImport; pub use fc_db::kv::frontier_database_dir; use fc_mapping_sync::kv::MappingSyncWorker; use fc_mapping_sync::SyncStrategy; use fc_rpc::{EthTask, OverrideHandle}; pub use fc_rpc_core::types::{FeeHistoryCache, FeeHistoryCacheLimit, FilterPool}; use futures::{future, StreamExt}; use sc_client_api::{BlockchainEvents, StorageProvider}; use sc_network_sync::SyncingService; use sc_service::error::Error as ServiceError; use sp_api::{BlockT, ProvideRuntimeApi}; use sp_blockchain::HeaderBackend; use sp_core::traits::SpawnEssentialNamed; use sp_runtime::traits::{NumberFor, Zero}; use std::collections::BTreeMap; use std::sync::{Arc, Mutex}; use std::time::Duration; /// The ethereum-compatibility configuration used to run a node. #[derive(Clone, Debug, clap::Parser)] pub struct EthConfiguration { /// Maximum number of logs in a query. #[arg(long, default_value = "10000")] pub max_past_logs: u32, /// Maximum fee history cache size. #[arg(long, default_value = "2048")] pub fee_history_limit: u64, #[arg(long)] pub enable_dev_signer: bool, /// The dynamic-fee pallet target gas price set by block author #[arg(long, default_value = "1")] pub target_gas_price: u64, /// Maximum allowed gas limit will be `block.gas_limit * execute_gas_limit_multiplier` /// when using eth_call/eth_estimateGas. #[arg(long, default_value = "10")] pub execute_gas_limit_multiplier: u64, /// Size in bytes of the LRU cache for block data. #[arg(long, default_value = "50")] pub eth_log_block_cache: usize, /// Size in bytes of the LRU cache for transactions statuses data. #[arg(long, default_value = "50")] pub eth_statuses_cache: usize, } pub(crate) struct FrontierPartialComponents { pub(crate) filter_pool: Option<FilterPool>, pub(crate) fee_history_cache: FeeHistoryCache, pub(crate) fee_history_cache_limit: FeeHistoryCacheLimit, } pub(crate) fn new_frontier_partial( fee_history_cache_limit: u64, ) -> Result<FrontierPartialComponents, ServiceError> { Ok(FrontierPartialComponents { filter_pool: Some(Arc::new(Mutex::new(BTreeMap::new()))), fee_history_cache: Arc::new(Mutex::new(BTreeMap::new())), fee_history_cache_limit, }) } #[allow(clippy::too_many_arguments)] pub(crate) fn spawn_frontier_tasks<Block, Client, Backend, SE>( essential_task_spawner: SE, client: Arc<Client>, backend: Arc<Backend>, frontier_backend: Arc<fc_db::kv::Backend<Block>>, overrides: Arc<OverrideHandle<Block>>, frontier_partial_components: FrontierPartialComponents, sync: Arc<SyncingService<Block>>, pubsub_notification_sinks: Arc< fc_mapping_sync::EthereumBlockNotificationSinks< fc_mapping_sync::EthereumBlockNotification<Block>, >, >, ) where Block: BlockT, Backend: sc_client_api::Backend<Block> + 'static, Client: ProvideRuntimeApi<Block> + BlockchainEvents<Block> + HeaderBackend<Block> + StorageProvider<Block, Backend> + Send + Sync + 'static, Client::Api: sp_api::ApiExt<Block> + fp_rpc::EthereumRuntimeRPCApi<Block> + fp_rpc::ConvertTransactionRuntimeApi<Block>, SE: SpawnEssentialNamed, { essential_task_spawner.spawn_essential( "frontier-mapping-sync-worker", Some("frontier"), Box::pin( MappingSyncWorker::new( client.import_notification_stream(), Duration::new(6, 0), client.clone(), backend, overrides.clone(), frontier_backend, 3, NumberFor::<Block>::zero(), SyncStrategy::Normal, sync, pubsub_notification_sinks, ) .for_each(|()| future::ready(())), ), ); let FrontierPartialComponents { filter_pool, fee_history_cache, fee_history_cache_limit, } = frontier_partial_components; // Spawn Frontier EthFilterApi maintenance task. if let Some(filter_pool) = filter_pool { // Each filter is allowed to stay in the pool for 100 blocks. const FILTER_RETAIN_THRESHOLD: u64 = 100; essential_task_spawner.spawn_essential( "frontier-filter-pool", Some("frontier"), Box::pin(EthTask::filter_pool_task( client.clone(), filter_pool, FILTER_RETAIN_THRESHOLD, )), ); } // Spawn Frontier FeeHistory cache maintenance task. essential_task_spawner.spawn_essential( "frontier-fee-history", Some("frontier"), Box::pin(EthTask::fee_history_task( client, overrides, fee_history_cache, fee_history_cache_limit, )), ); }
use super::{AtomicLocation, AtomicUpdate, FileOperation}; use crate::error::{Error, UnderlyingError}; use std::fs; use std::path; impl<'a> AtomicUpdate<'a> { /// Initialize atomic file storage pub fn new( path_to_repository: &'a path::Path, path_to_working: &'a path::Path, ) -> Result<AtomicUpdate<'a>, Error> { // TODO: create_if_needed should return the path it created? fn create_if_needed( path_to_repository: &path::Path, atomic_path: AtomicLocation, ) -> Result<(), Error> { let test_directory = path_to_repository.join(atomic_path.get_path()); if test_directory.exists() == false { if let Err(error) = fs::create_dir(test_directory.as_path()) { return Err(Error::file_error(Some(UnderlyingError::from(error))) .add_debug_message(format!("Failed to create a directory when initializing the Atomic Updater, path was {}", test_directory.display())) .add_user_message(format!("Failed to create a directory when initializing the repository, the path was {}", test_directory.display()))); } } else { // This case is problematic because it means that atomic has data from an incomplete operation but did not detect it as such } Ok(()) } create_if_needed(path_to_repository, AtomicLocation::Base)?; let path_to_atomic = path_to_repository.join(AtomicLocation::Base.get_path()); create_if_needed(path_to_atomic.as_path(), AtomicLocation::ReplaceWorking)?; create_if_needed(path_to_atomic.as_path(), AtomicLocation::ReplaceComplete)?; create_if_needed(path_to_atomic.as_path(), AtomicLocation::ReplacePrevious)?; create_if_needed(path_to_atomic.as_path(), AtomicLocation::ReplaceRemove)?; create_if_needed(path_to_atomic.as_path(), AtomicLocation::CreateWorking)?; create_if_needed(path_to_atomic.as_path(), AtomicLocation::CreateComplete)?; create_if_needed(path_to_atomic.as_path(), AtomicLocation::StoreWorking)?; create_if_needed(path_to_atomic.as_path(), AtomicLocation::StoreComplete)?; let au = AtomicUpdate { path_to_create_complete: path_to_atomic.join(AtomicLocation::CreateComplete.get_path()), path_to_create_working: path_to_atomic.join(AtomicLocation::CreateWorking.get_path()), path_to_replace_working: path_to_atomic.join(AtomicLocation::ReplaceWorking.get_path()), path_to_replace_complete: path_to_atomic .join(AtomicLocation::ReplaceComplete.get_path()), path_to_replace_previous: path_to_atomic .join(AtomicLocation::ReplacePrevious.get_path()), path_to_replace_remove: path_to_atomic.join(AtomicLocation::ReplaceRemove.get_path()), path_to_store_working: path_to_atomic.join(AtomicLocation::StoreWorking.get_path()), path_to_store_complete: path_to_atomic.join(AtomicLocation::StoreComplete.get_path()), path_to_repository, path_to_working, atomic_jobs: Vec::new(), }; Ok(au) } pub fn load( path_to_working: &'a path::Path, path_to_repository: &'a path::Path, ) -> AtomicUpdate<'a> { let path_to_atomic = path_to_repository.join(AtomicLocation::Base.get_path()); AtomicUpdate { path_to_create_complete: path_to_atomic.join(AtomicLocation::CreateComplete.get_path()), path_to_create_working: path_to_atomic.join(AtomicLocation::CreateWorking.get_path()), path_to_replace_working: path_to_atomic.join(AtomicLocation::ReplaceWorking.get_path()), path_to_replace_complete: path_to_atomic .join(AtomicLocation::ReplaceComplete.get_path()), path_to_replace_previous: path_to_atomic .join(AtomicLocation::ReplacePrevious.get_path()), path_to_replace_remove: path_to_atomic.join(AtomicLocation::ReplaceRemove.get_path()), path_to_store_working: path_to_atomic.join(AtomicLocation::StoreWorking.get_path()), path_to_store_complete: path_to_atomic.join(AtomicLocation::StoreComplete.get_path()), path_to_repository, path_to_working, atomic_jobs: Vec::new(), } } // Queue a replace file, returns a path that the new file should be written to pub fn queue_replace<S: Into<path::PathBuf>>( &mut self, file_to_replace: S, ) -> Result<path::PathBuf, Error> { // TODO: All queue operations must be done on absolute paths // TODO: All paths should be relative to the working path let file_to_replace = file_to_replace.into(); if file_to_replace.is_absolute() == false { return Err(Error::invalid_parameter(None) .add_generic_message(format!("A path to a file to be replaced was not absolute, all paths to be processed by atomic must be absolute, the path was {}", file_to_replace.as_path().display()) )); } let file_name = file_to_replace.file_name().ok_or( Error::invalid_parameter(None).add_generic_message("Path did not have a file name"), )?; let utf8_file_name = file_name.to_str().ok_or( Error::invalid_parameter(None).add_generic_message("Path was not a valid UTF8 string"), )?; let path_to_return = self.path_to_replace_working.join(utf8_file_name); self.atomic_jobs .push(FileOperation::Replace(file_to_replace)); Ok(path_to_return) } // TODO: All these functions should take a path // Queue an atomic file create, returns a path that should be used as the path to the new file pub fn queue_create<S: Into<path::PathBuf>>( &mut self, file_to_create: S, ) -> Result<path::PathBuf, Error> { // TODO: All queue operations must be done on absolute paths let file_to_create = file_to_create.into(); if file_to_create.is_absolute() == false { return Err(Error::invalid_parameter(None) .add_generic_message(format!("A path to a file to be created was not absolute, all paths to be processed by atomic must be absolute, the path was {}", file_to_create.as_path().display()) )); } let file_name = file_to_create.file_name().ok_or( Error::invalid_parameter(None).add_generic_message("Path did not have a file name"), )?; let utf8_file_name = file_name.to_str().ok_or( Error::invalid_parameter(None).add_generic_message("Path was not a valid UTF8 string"), )?; let path_to_return = self.path_to_create_working.join(utf8_file_name); self.atomic_jobs.push(FileOperation::Create(file_to_create)); Ok(path_to_return) } // Queue an atomic file store, returns a PathBuf that is where the file should be stored so that atomicUpdater can find it and place it into the repository atomically pub fn queue_store<S: Into<path::PathBuf>>( &mut self, file_to_store: S, ) -> Result<path::PathBuf, Error> { // TODO: All queue operations must be done on absolute paths // FIXME: This is complicated since later a file that is being stored could consist of many different files let file_to_store = file_to_store.into(); let file_name = file_to_store.file_name().ok_or( Error::invalid_parameter(None).add_generic_message("Path did not have a file name"), )?; let utf8_file_name = file_name.to_str().ok_or( Error::invalid_parameter(None).add_generic_message("Path was not a valid UTF8 string"), )?; let path_to_return = self.path_to_store_working.join(utf8_file_name); self.atomic_jobs.push(FileOperation::Store(file_to_store)); Ok(path_to_return) } /// Consumes the AtomicUpdate and updates each file pair that was /// registered atomically, call this when all file IO has been /// completed on the temporary files. pub fn complete(self) -> Result<(), Error> { // We process each stage one at a time progressing all files through them if let Err(error) = self.process_first_stage() { return Err(error.add_debug_message(format!("Stage one of the atomic update process failed, the repository is unchanged, the operation might be recoverable")) .add_user_message(format!("Atomic update process failed, the repository is unchanged but the operation failed"))); } // Stage 2 - move all snapshot files to previous self.process_second_stage()?; // Stage 3 - move all complete files to snapshot self.process_third_stage()?; // Stage 4 - move all previous files to bad self.process_fourth_stage()?; // Stage 5 - delete all bad files self.process_fifth_stage()?; Ok(()) } fn process_first_stage(&self) -> Result<(), Error> { for job in self.atomic_jobs.as_slice() { let file_name = job .get_filename() .map_err(|err| err.add_generic_message("During a stage one atomic operation"))?; match job { FileOperation::Create(_) => { // Move from working to complete let source_file = self.path_to_create_working.join(file_name); let destination_file = self.path_to_create_complete.join(file_name); fs::rename(source_file.as_path(), destination_file.as_path()) .map_err(|err| Error::file_error(Some(UnderlyingError::from(err))) .add_debug_message(format!("A file rename failed while processing stage one of an atomic update, renaming {} to {} failed, the file operation was {:#?}", source_file.display(), destination_file.display(), job)) .add_user_message(format!("A file rename failed, failed to rename {} to {}", source_file.display(), destination_file.display())) )?; } FileOperation::Replace(_) => { // Move from working to complete fs::rename( self.path_to_replace_working.join(file_name), self.path_to_replace_complete.join(file_name), ) .map_err(|err| { Error::file_error(Some(UnderlyingError::from(err))).add_generic_message( "File rename failed during a stage one replace operation", ) })?; } FileOperation::Store(_) => { fs::rename( self.path_to_store_working.join(file_name), self.path_to_store_complete.join(file_name), ) .map_err(|err| { Error::file_error(Some(UnderlyingError::from(err))).add_generic_message( "File rename failed during a stage one store operation", ) })?; } } } Ok(()) } // Move the file being replaced fn process_second_stage(&self) -> Result<(), Error> { for job in self.atomic_jobs.as_slice() { match job { FileOperation::Create(_) => { // This is a no-op since we are not replacing a file } FileOperation::Replace(path_to_file) => { // Move from current to previous let file_name = job.get_filename()?; let destination = self.path_to_replace_previous.join(file_name); fs::rename(path_to_file, destination.as_path()) .map_err(|err| Error::file_error(Some(UnderlyingError::from(err))) .add_user_message("An atomic operation failed (specifically a file rename) while trying to update the repository, the repository was not changed") .add_debug_message(format!("A file rename failed while performing an atomic operation, the file {} was being renamed to {}",path_to_file.display(), destination.as_path().display())))?; } FileOperation::Store(_) => { // No op since there is no file we are replacing } } } Ok(()) } // The third stage will move files into the repository, which means after this point the operation is considered a success since only cleanup may be required fn process_third_stage(&self) -> Result<(), Error> { // TODO: Ensure that all paths are resolved to UNC paths for job in self.atomic_jobs.as_slice() { // FIXME: This doesn't include sub directories in the path that the file may have been restored from // let relative_path = job.get_relative_path(self.path_to_working)?; let file_name = job.get_filename()?; match job { FileOperation::Create(path_to_file) => { // Move from complete to current fs::rename(self.path_to_create_complete.join(file_name), path_to_file) .map_err(|err| { Error::file_error(Some(UnderlyingError::from(err))).add_generic_message( "File rename failed during a stage three create operation", ) })?; } FileOperation::Replace(path_to_file) => { // Move from complete to current fs::rename(self.path_to_replace_complete.join(file_name), path_to_file) .map_err(|err| { Error::file_error(Some(UnderlyingError::from(err))).add_generic_message( "File rename failed during a stage three replace operation", ) })?; } FileOperation::Store(_) => { // FIXME: The original files path is not the same as the destination path in the case of storage // Move from complete to storage use crate::storage::LocalStorage; let path_to_storage = self .path_to_repository .join(LocalStorage::DIRECTORY) .join(file_name); let path_to_complete_file = self.path_to_store_complete.join(file_name); fs::rename(path_to_complete_file, path_to_storage.as_path()).map_err( |err| { Error::file_error(Some(UnderlyingError::from(err))).add_generic_message( "File rename failed during a stage three store operation", ) }, )?; } } } Ok(()) } // The fourth stage moves files that have been replaced into a location to be removed fn process_fourth_stage(&self) -> Result<(), Error> { // TODO: Ensure that all paths are resolved to UNC paths for job in self.atomic_jobs.as_slice() { match job { FileOperation::Create(_) => { // Nothing to remove so no-op } FileOperation::Replace(_) => { // Move from previous to remove let file_name = job.get_filename()?; fs::rename( self.path_to_replace_previous.join(file_name), self.path_to_replace_remove.join(file_name), ) .map_err(|err| { Error::file_error(Some(UnderlyingError::from(err))).add_generic_message( "Rename failed during the fourth stage of an atomic operation", ) })?; } FileOperation::Store(_) => { // Nothing to remove so no op } } } Ok(()) } // The fifth stage removes files that were replaced fn process_fifth_stage(&self) -> Result<(), Error> { // TODO: Ensure that all paths are resolved to UNC paths for job in self.atomic_jobs.as_slice() { match job { FileOperation::Create(_) => { // Nothing to remove so no-op } FileOperation::Replace(_) => { let file_name = job.get_filename()?; // Remove the file that was replaced fs::remove_file(self.path_to_replace_remove.join(file_name)).map_err( |err| { Error::file_error(Some(UnderlyingError::from(err))).add_generic_message( "Remove file failed during a stage five replace operation", ) }, )?; } FileOperation::Store(_) => { // Nothing to remove so no op } } } Ok(()) } } #[cfg(test)] mod tests { use super::{AtomicLocation, AtomicUpdate}; use testspace::TestSpace; #[test] fn test_atomic_init() { let ts = TestSpace::new().allow_cleanup(false); let ts2 = ts.create_child(); let path_to_working = ts.get_path(); let path_to_repository = ts2.get_path(); let atomic = AtomicUpdate::new(path_to_repository, path_to_working).expect("Atomic init failed"); } #[test] fn test_first_stage_queue_create() { // Queue some file creates and then write random data to them let mut ts = TestSpace::new(); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); for random_file in 0..5 { // Returns the path that we write data to let file_to_create = atomic .queue_create(format!("test{}", random_file)) .expect("Failed to queue a create file"); println!("Writing to path {:?}", file_to_create.as_path()); ts.create_file(file_to_create, 2048); } // First stage will copy files that are being created from the cw directory to the cc directory atomic.process_first_stage().expect("First Stage failed"); // Ensure the files exist for test_file in 0..5 { let path_to_test = repository_path .join(AtomicLocation::CreateComplete.get_path()) .join(format!("test{}", test_file.to_string())); println!("Testing for file {}", path_to_test.display()); assert!(path_to_test.exists()); } } #[test] fn test_third_stage_queue_create() { // Queue some files to create let mut ts = TestSpace::new(); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); for random_file in 0..5 { // Returns the path that we write data to let file_to_create = atomic .queue_create(working_path.join(format!("test{}", random_file))) .expect("Failed to queue a create file"); ts.create_file(file_to_create, 2048); } // Move from working to complete atomic.process_first_stage().expect("First Stage failed"); // Does nothing with files being created atomic.process_second_stage().expect("Second Stage failed"); // Move from complete to current atomic.process_third_stage().expect("Second Stage failed"); for test_file in 0..5 { let path_to_test = working_path.join(format!("test{}", test_file)); println!("Testing for file {}", path_to_test.display()); assert!(path_to_test.exists()); } } #[test] fn test_first_stage_queue_replace() { let mut ts = TestSpace::new().allow_cleanup(false); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); // Create the files that will be replaced for random_file in 0..5 { ts.create_file(working_path.join(format!("test{}", random_file)), 4096); } // Create the files that will replace the files in repository for random_file in 0..5 { // Returns the path that we write data to let file_to_replace = atomic .queue_replace(working_path.join(format!("test{}", random_file))) .expect("Failed to queue a replace file"); println!("File that is replacing {:?}", file_to_replace); ts.create_file(file_to_replace, 2048); } // First stage only moves from working to complete atomic.process_first_stage().expect("First Stage failed"); for test_file in 0..5 { let path_to_test = repository_path .join(AtomicLocation::ReplaceComplete.get_path()) .join(format!("test{}", test_file.to_string())); println!("Testing for file {}", path_to_test.display()); assert!(path_to_test.exists()); } } #[test] fn test_second_stage_queue_replace() { let mut ts = TestSpace::new().allow_cleanup(false); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); // Create the files that will be replaced for random_file in 0..5 { let test_path = working_path.join(format!("test{}", random_file)); println!("Creating test file in {:?}", test_path.as_path()); ts.create_file(test_path, 4096); } // Create the files that will replace the files in repository for random_file in 0..5 { // Returns the path that we write data to let file_to_replace = atomic .queue_replace(working_path.join(format!("test{}", random_file))) .expect("Failed to queue a file replace"); println!("Replacement at {:?}", file_to_replace); ts.create_file(file_to_replace, 2048); } // First stage moves from working to complete atomic.process_first_stage().expect("First Stage failed"); // Second stage moves from current to previous atomic.process_second_stage().expect("Second Stage failed"); // Here we expect the files that were in working to now be in rp and working to be empty for test_file in 0..5 { // Repository/atomic/rp/test0 let path_to_test = repository_path .join(AtomicLocation::ReplacePrevious.get_path()) .join(format!("test{}", test_file.to_string())); println!("Testing for file {}", path_to_test.display()); assert!(path_to_test.exists()); } } #[test] fn test_third_stage_queue_replace() { let mut ts = TestSpace::new().allow_cleanup(false); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); // Create the files that will be replaced for random_file in 0..5 { ts.create_file(working_path.join(format!("test{}", random_file)), 4096); } // Create the files that will replace the files in repository for random_file in 0..5 { // Returns the path that we write data to let file_to_create = atomic .queue_replace(working_path.join(format!("test{}", random_file))) .expect("Failed to queue a file replace"); ts.create_file(file_to_create, 2048); } // First stage only moves from working to complete atomic.process_first_stage().expect("First Stage failed"); // Second stage moves from current to previous atomic.process_second_stage().expect("Second Stage failed"); // Repository should have no files for test_file in 0..5 { let path_to_test = repository_path.join(format!("test{}", test_file)); println!("Testing for file {}", path_to_test.display()); assert_eq!(path_to_test.exists(), false); } // Move from complete to current atomic.process_third_stage().expect("Third stage failed"); // Files have been replaced in the working directory, old files are in rp for test_file in 0..5 { let path_to_test = working_path.join(format!("test{}", test_file)); println!("Testing for file {}", path_to_test.display()); assert!(path_to_test.exists()); } } #[test] fn test_fourth_stage_queue_replace() { let mut ts = TestSpace::new(); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); // Create the files that will be replaced for random_file in 0..5 { ts.create_file(working_path.join(format!("test{}", random_file)), 4096); } // Create the files that will replace the files in repository for random_file in 0..5 { // Returns the path that we write data to let file_to_create = atomic .queue_replace(working_path.join(format!("test{}", random_file))) .expect("Failed to queue a file replace"); ts.create_file(file_to_create, 2048); } // First stage only moves from working to complete atomic.process_first_stage().expect("First Stage failed"); // TODO: Write a function that checks each stages success // Second stage moves from current to previous atomic.process_second_stage().expect("Second Stage failed"); // Move from complete to current atomic.process_third_stage().expect("Third stage failed"); // Move from previous to remove atomic.process_fourth_stage().expect("Fourth stage failed"); // Check that files are waiting to be removed for test_file in 0..5 { let path_to_test = repository_path.join(format!( "{}/test{}", AtomicLocation::ReplaceRemove.get_str(), test_file )); println!("Testing for file {}", path_to_test.display()); assert!(path_to_test.exists()); } } #[test] fn test_fifth_stage_queue_replace() { let mut ts = TestSpace::new(); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); // Create the files that will be replaced for random_file in 0..5 { ts.create_file(working_path.join(format!("test{}", random_file)), 4096); } // Create the files that will replace the files in repository for random_file in 0..5 { // Returns the path that we write data to let file_to_create = atomic .queue_replace(working_path.join(format!("test{}", random_file))) .expect("Failed to queue a file replace"); ts.create_file(file_to_create, 2048); } // First stage only moves from working to complete atomic.process_first_stage().expect("First Stage failed"); // Second stage moves from current to previous atomic.process_second_stage().expect("Second Stage failed"); // Move from complete to current atomic.process_third_stage().expect("Third stage failed"); // Move from previous to remove atomic.process_fourth_stage().expect("Fourth stage failed"); // Remove the files atomic.process_fifth_stage().expect("Fourth stage failed"); for test_file in 0..5 { let path_to_test = repository_path.join(format!( "{}\\test{}", AtomicLocation::ReplaceRemove.get_str(), test_file )); println!("Testing for file {}", path_to_test.display()); assert_eq!(path_to_test.exists(), false); } } #[test] fn test_atomic_complete() { let mut ts = TestSpace::new(); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); // Create the files that will be replaced for random_file in 0..5 { ts.create_file(working_path.join(format!("test{}", random_file)), 4096); } // Create the files that will replace the files in repository for random_file in 0..5 { // Returns the path that we write data to let file_to_create = atomic .queue_replace(working_path.join(format!("test{}", random_file))) .expect("Failed to queue a file replace"); ts.create_file(file_to_create, 2048); } // Create the files that will be created in the repository for random_file in 0..5 { // Returns the path that we write data to let file_to_create = atomic .queue_create(working_path.join(format!("test_create{}", random_file))) .expect("Failed to queue a file create"); ts.create_file(file_to_create, 4096); } atomic.complete().expect("Atomic operation failed"); // Check for created files for random_file in 0..5 { let file_to_create = working_path.join(format!("test_create{}", random_file)); assert!(file_to_create.exists()); } // Check for replaced files for random_file in 0..5 { let file_to_create = working_path.join(format!("test{}", random_file)); assert!(file_to_create.exists()); } } #[test] fn test_atomic_store() { use crate::storage::LocalStorage; let mut ts = TestSpace::new().allow_cleanup(false); let ts2 = ts.create_child(); let working_path = ts.get_path().to_path_buf(); let repository_path = ts2.get_path().to_path_buf(); let mut atomic = AtomicUpdate::new(repository_path.as_path(), working_path.as_path()) .expect("Failed to initialize repository"); let fs = LocalStorage::initialize(repository_path.as_path()) .expect("Failed to init file storage"); // TODO: Redo this // Create the files that will be stored // for random_file in 0..5 { // ts.create_file(working_path.join(format!("test{}", random_file)), 4096); // let file_to_store = working_path.join(format!("test{}", random_file)); // // Queuing a store means that the files ends up in storage rather than in a relative path in the working directory // let place_to_store = atomic.queue_store(working_path.join(format!("test{}", random_file))).expect("Failed to queue a file store"); // fs.store_file(file_to_store.as_path(), place_to_store.as_path()).expect("Failed to store file"); // } // // Process stage one // atomic.process_first_stage().expect("Failed first stage"); // // All files should have been moved to storage complete // for file in 0..5 { // let path_to_check = atomic.path_to_store_complete.join(format!("test{}", file)); // assert!(path_to_check.exists()); // } // atomic.process_third_stage().expect("All files should have been moved to storage"); // // process stage three - Files should be in the storage folder in the repository // let storage_path = repository_path.join(LocalStorage::DIRECTORY); // for file in 0..5 { // let path_to_check = storage_path.join(format!("test{}", file)); // assert!(path_to_check.exists()); // } } // TODO: Advanced create // TODO: Advanced replace // TODO: Advanced Store }
use crate::common; use crate::vector2::Vector2; use crate::vector4::Vector4; use std::cmp; use std::convert::From; use std::f32::EPSILON; use std::fmt; use std::fmt::{Display, Formatter}; use std::ops::{ Index, IndexMut, Neg, Add, AddAssign, Sub, SubAssign, Mul, MulAssign, Div, DivAssign, }; #[repr(C, packed)] #[derive(Copy, Clone, Debug)] pub struct Vector3 { pub x: f32, pub y: f32, pub z: f32, } impl Vector3 { /// Creates a vector <0.0, 0.0, 0.0> /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::new(); /// let expected = Vector3 { x: 0.0, y: 0.0, z: 0.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn new() -> Vector3 { Vector3 { x: 0.0, y: 0.0, z: 0.0 } } /// Creates a vector <1.0, 1.0, 1.0> /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::one(); /// let expected = Vector3 { x: 1.0, y: 1.0, z: 1.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn one() -> Vector3 { Vector3 { x: 1.0, y: 1.0, z: 1.0 } } /// Creates a right vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::right(); /// let expected = Vector3 { x: 1.0, y: 0.0, z: 0.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn right() -> Vector3 { Vector3 { x: 1.0, y: 0.0, z: 0.0 } } /// Creates an up vector /// /// # Examples /// ``` /// use vex::Vector3; /// let actual = Vector3::up(); /// let expected = Vector3 { x: 0.0, y: 1.0, z: 0.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn up() -> Vector3 { Vector3 { x: 0.0, y: 1.0, z: 0.0 } } /// Creates a forward vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::forward(); /// let expected = Vector3 { x: 0.0, y: 0.0, z: -1.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn forward() -> Vector3 { Vector3 { x: 0.0, y: 0.0, z: -1.0 } } /// Creates a vector from the provided values /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0); /// let expected = Vector3 { x: 1.0, y: 2.0, z: 3.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn make(x: f32, y: f32, z: f32) -> Vector3 { Vector3 { x, y, z } } /// Find the dot product between two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 0.0, 0.0); /// let b = Vector3::make(0.0, 0.0, 1.0); /// let actual = Vector3::dot(&a, &b); /// let expected = 0.0; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn dot(a: &Vector3, b: &Vector3) -> f32 { a.x * b.x + a.y * b.y + a.z * b.z } /// Find the cross product between two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(0.0, 0.0, 1.0); /// let b = Vector3::make(1.0, 0.0, 0.0); /// let actual = Vector3::cross(&a, &b); /// let expected = Vector3::make(0.0, 1.0, 0.0); /// assert_eq!(actual, expected); /// ``` #[inline] pub fn cross(a: &Vector3, b: &Vector3) -> Vector3 { Vector3::make( a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y - a.y * b.x, ) } /// Find the minimum (component-wise) vector between two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 4.0, 5.0); /// let b = Vector3::make(2.0, 3.0, 6.0); /// let actual = Vector3::min(&a, &b); /// let expected = Vector3::make(1.0, 3.0, 5.0); /// assert_eq!(actual, expected); /// ``` #[inline] pub fn min(a: &Vector3, b: &Vector3) -> Vector3 { Vector3::make(a.x.min(b.x), a.y.min(b.y), a.z.min(b.z)) } /// Find the maximum (component-wise) vector between two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 4.0, 5.0); /// let b = Vector3::make(2.0, 3.0, 6.0); /// let actual = Vector3::max(&a, &b); /// let expected = Vector3::make(2.0, 4.0, 6.0); /// assert_eq!(actual, expected); /// ``` #[inline] pub fn max(a: &Vector3, b: &Vector3) -> Vector3 { Vector3::make(a.x.max(b.x), a.y.max(b.y), a.z.max(b.z)) } /// Find the clamped (component-wise) vector between two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 3.0, 5.0); /// let b = Vector3::make(2.0, 4.0, 6.0); /// let mut actual = Vector3::make(0.0, 5.0, 10.0); /// actual.clamp(&a, &b); /// let expected = Vector3::make(1.0, 4.0, 6.0); /// assert_eq!(actual, expected); /// ``` #[inline] pub fn clamp(&mut self, a: &Vector3, b: &Vector3) { let low = Self::min(a, b); let high = Self::max(a, b); let result = Self::max(&low, &Self::min(self, &high)); self.set(result.x, result.y, result.z); } /// Set the components of a vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::new(); /// actual.set(1.0, 2.0, 3.0); /// let expected = Vector3::make(1.0, 2.0, 3.0); /// assert_eq!(actual, expected); /// ``` #[inline] pub fn set(&mut self, x: f32, y: f32, z: f32) { self.x = x; self.y = y; self.z = z; } /// Get the magnitude of the vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0).mag(); /// let expected = 3.74165738677; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn mag(&self) -> f32 { self.mag_sq().sqrt() } /// Get the squared magnitude of the vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0).mag_sq(); /// let expected = 14.0; /// assert_eq!(actual, expected); /// ``` #[inline] pub fn mag_sq(&self) -> f32 { self.x * self.x + self.y * self.y + self.z * self.z } /// Normalize the vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual.norm(); /// let expected = Vector3::make(0.26726124191, 0.53452248382, 0.8017837); /// assert_eq!(actual, expected); /// ``` #[inline] pub fn norm(&mut self) -> f32 { let length = self.mag(); if length > EPSILON { self.x /= length; self.y /= length; self.z /= length; length } else { 0.0 } } /// Set the components of a vector to their absolute values /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(-1.0, -2.0, -3.0); /// actual.abs(); /// let expected = Vector3::make(1.0, 2.0, 3.0); /// assert_eq!(actual, expected); /// ``` #[inline] pub fn abs(&mut self) { self.x = self.x.abs(); self.y = self.y.abs(); self.z = self.z.abs(); } /// Determine whether or not all components of the vector are valid /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0); /// assert!(actual.is_valid()); /// ``` #[inline] pub fn is_valid(&self) -> bool { for i in 0..3 { if !common::is_valid(self[i]) { return false; } } true } } impl From<Vector2> for Vector3 { /// Creates a Vector3 from the components of a Vector2 /// /// # Examples /// ``` /// use vex::Vector2; /// use vex::Vector3; /// /// let input = Vector2::make(1.0, 2.0); /// let actual = Vector3::from(input); /// let expected = Vector3 { x: 1.0, y: 2.0, z: 0.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] fn from(item: Vector2) -> Vector3 { Vector3 { x: item.x, y: item.y, z: 0.0, } } } impl From<Vector4> for Vector3 { /// Creates a Vector3 from the components of a Vector4 /// /// # Examples /// ``` /// use vex::Vector3; /// use vex::Vector4; /// /// let input = Vector4::make(1.0, 2.0, 3.0, 4.0); /// let actual = Vector3::from(input); /// let expected = Vector3 { x: 1.0, y: 2.0, z: 3.0 }; /// assert_eq!(actual, expected); /// ``` #[inline] fn from(item: Vector4) -> Vector3 { Vector3 { x: item.x, y: item.y, z: item.z, } } } impl Index<u32> for Vector3 { type Output = f32; /// Looks up a component by index /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut v = Vector3::make(1.0, 2.0, 3.0); /// assert_eq!(v[0], 1.0); /// assert_eq!(v[1], 2.0); /// assert_eq!(v[2], 3.0); /// ``` #[inline] fn index(&self, index: u32) -> &f32 { unsafe { match index { 0 => &self.x, 1 => &self.y, 2 => &self.z, _ => panic!("Invalid index for Vector3: {}", index), } } } } impl IndexMut<u32> for Vector3 { /// Mutate a component by index /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut v = Vector3::new(); /// v[0] = 4.0; /// v[1] = 5.0; /// v[2] = 6.0; /// assert_eq!(v[0], 4.0); /// assert_eq!(v[1], 5.0); /// assert_eq!(v[2], 6.0); /// ``` #[inline] fn index_mut<'a>(&'a mut self, index: u32) -> &'a mut f32 { unsafe { match index { 0 => &mut self.x, 1 => &mut self.y, 2 => &mut self.z, _ => panic!("Invalid index for Vector3: {}", index), } } } } impl Neg for Vector3 { type Output = Vector3; /// Negates all components in a vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = -Vector3::make(1.0, 2.0, 3.0); /// let expected = Vector3::make(-1.0, -2.0, -3.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn neg(self) -> Vector3 { Vector3::make(-self.x, -self.y, -self.z) } } impl Add<f32> for Vector3 { type Output = Vector3; /// Find the resulting vector by adding a scalar to a vector's components /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0) + 1.0; /// let expected = Vector3::make(2.0, 3.0, 4.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn add(self, _rhs: f32) -> Vector3 { Vector3::make(self.x + _rhs, self.y + _rhs, self.z + _rhs) } } impl Add<Vector3> for Vector3 { type Output = Vector3; /// Add two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 2.0, 3.0); /// let b = Vector3::make(4.0, 5.0, 6.0); /// let actual = a + b; /// let expected = Vector3::make(5.0, 7.0, 9.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn add(self, _rhs: Vector3) -> Vector3 { Vector3::make(self.x + _rhs.x, self.y + _rhs.y, self.z + _rhs.z) } } impl AddAssign<f32> for Vector3 { /// Increment a vector by a scalar /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual += 10.0; /// let expected = Vector3::make(11.0, 12.0, 13.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn add_assign(&mut self, _rhs: f32) { self.x += _rhs; self.y += _rhs; self.z += _rhs; } } impl AddAssign<Vector3> for Vector3 { /// Increment a vector by another vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual += Vector3::make(1.0, 2.0, 3.0); /// let expected = Vector3::make(2.0, 4.0, 6.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn add_assign(&mut self, _rhs: Vector3) { self.x += _rhs.x; self.y += _rhs.y; self.z += _rhs.z; } } impl Sub<f32> for Vector3 { type Output = Vector3; /// Find the resulting vector by subtracting a scalar from a vector's components /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0) - 10.0; /// let expected = Vector3::make(-9.0, -8.0, -7.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn sub(self, _rhs: f32) -> Vector3 { Vector3::make(self.x - _rhs, self.y - _rhs, self.z - _rhs) } } impl Sub<Vector3> for Vector3 { type Output = Vector3; /// Subtract two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 2.0, 3.0); /// let b = Vector3::make(5.0, 4.0, 3.0); /// let actual = a - b; /// let expected = Vector3::make(-4.0, -2.0, 0.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn sub(self, _rhs: Vector3) -> Vector3 { Vector3::make(self.x - _rhs.x, self.y - _rhs.y, self.z - _rhs.z) } } impl SubAssign<f32> for Vector3 { /// Decrement a vector by a scalar /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual -= 1.0; /// let expected = Vector3::make(0.0, 1.0, 2.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn sub_assign(&mut self, _rhs: f32) { self.x -= _rhs; self.y -= _rhs; self.z -= _rhs; } } impl SubAssign<Vector3> for Vector3 { /// Decrement a vector by another vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual -= Vector3::make(1.0, 2.0, 3.0); /// assert_eq!(actual, Vector3::new()); /// ``` #[inline] fn sub_assign(&mut self, _rhs: Vector3) { self.x -= _rhs.x; self.y -= _rhs.y; self.z -= _rhs.z; } } impl Mul<f32> for Vector3 { type Output = Vector3; /// Find the resulting vector by multiplying a scalar to a vector's components /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0) * 2.0; /// let expected = Vector3::make(2.0, 4.0, 6.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn mul(self, _rhs: f32) -> Vector3 { Vector3::make(self.x * _rhs, self.y * _rhs, self.z * _rhs) } } impl Mul<Vector3> for Vector3 { type Output = Vector3; /// Multiply two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 2.0, 3.0); /// let b = Vector3::make(3.0, 4.0, 5.0); /// let actual = a * b; /// let expected = Vector3::make(3.0, 8.0, 15.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn mul(self, _rhs: Vector3) -> Vector3 { Vector3::make(self.x * _rhs.x, self.y * _rhs.y, self.z * _rhs.z) } } impl MulAssign<f32> for Vector3 { /// Multiply a vector by a scalar /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual *= 2.0; /// let expected = Vector3::make(2.0, 4.0, 6.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn mul_assign(&mut self, _rhs: f32) { self.x *= _rhs; self.y *= _rhs; self.z *= _rhs; } } impl MulAssign<Vector3> for Vector3 { /// Multiply a vector by another vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual *= Vector3::make(2.0, 3.0, 6.0); /// let expected = Vector3::make(2.0, 6.0, 18.0); /// assert_eq!(actual, expected); /// ``` #[inline] fn mul_assign(&mut self, _rhs: Vector3) { self.x *= _rhs.x; self.y *= _rhs.y; self.z *= _rhs.z; } } impl Div<f32> for Vector3 { type Output = Vector3; /// Find the resulting vector by dividing a scalar to a vector's components /// /// # Examples /// ``` /// use vex::Vector3; /// /// let actual = Vector3::make(1.0, 2.0, 3.0) / 2.0; /// let expected = Vector3::make(0.5, 1.0, 1.5); /// assert_eq!(actual, expected); /// ``` #[inline] fn div(self, _rhs: f32) -> Vector3 { Vector3::make(self.x / _rhs, self.y / _rhs, self.z / _rhs) } } impl Div<Vector3> for Vector3 { type Output = Vector3; /// Divide two vectors /// /// # Examples /// ``` /// use vex::Vector3; /// /// let a = Vector3::make(1.0, 2.0, 4.0); /// let b = Vector3::make(2.0, 8.0, 32.0); /// let actual = a / b; /// let expected = Vector3::make(0.5, 0.25, 0.125); /// assert_eq!(actual, expected); /// ``` #[inline] fn div(self, _rhs: Vector3) -> Vector3 { Vector3::make(self.x / _rhs.x, self.y / _rhs.y, self.z / _rhs.z) } } impl DivAssign<f32> for Vector3 { /// Divide a vector by a scalar /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 3.0); /// actual /= 2.0; /// let expected = Vector3::make(0.5, 1.0, 1.5); /// assert_eq!(actual, expected); /// ``` #[inline] fn div_assign(&mut self, _rhs: f32) { self.x /= _rhs; self.y /= _rhs; self.z /= _rhs; } } impl DivAssign<Vector3> for Vector3 { /// Divide a vector by another vector /// /// # Examples /// ``` /// use vex::Vector3; /// /// let mut actual = Vector3::make(1.0, 2.0, 4.0); /// actual /= Vector3::make(2.0, 8.0, 32.0); /// let expected = Vector3::make(0.5, 0.25, 0.125); /// assert_eq!(actual, expected); /// ``` #[inline] fn div_assign(&mut self, _rhs: Vector3) { self.x /= _rhs.x; self.y /= _rhs.y; self.z /= _rhs.z; } } impl cmp::PartialEq for Vector3 { /// Determines if two vectors' components are equivalent /// /// # Examples /// ``` /// use vex::Vector3; /// /// assert!(Vector3::new() == Vector3::new()); /// ``` #[inline] fn eq(&self, _rhs: &Vector3) -> bool { for i in 0..3 { if self[i] != _rhs[i] { return false; } } true } } impl Display for Vector3 { #[inline] fn fmt(&self, f: &mut Formatter) -> fmt::Result { unsafe { write!(f, "<{} {} {}>", self.x, self.y, self.z) } } }
pub mod minaparser;
pub use crate::pool::ProtocolVersion; use crate::utils::validation::{Validatable, ValidationError}; use super::constants; #[derive(Debug, Copy, Clone, Serialize, Deserialize)] pub struct PoolConfig { #[serde(default = "PoolConfig::default_protocol_version")] pub protocol_version: ProtocolVersion, #[serde(default = "PoolConfig::default_freshness_threshold")] pub freshness_threshold: u64, #[serde(default = "PoolConfig::default_ack_timeout")] pub ack_timeout: i64, #[serde(default = "PoolConfig::default_reply_timeout")] pub reply_timeout: i64, #[serde(default = "PoolConfig::default_conn_request_limit")] pub conn_request_limit: usize, #[serde(default = "PoolConfig::default_conn_active_timeout")] pub conn_active_timeout: i64, #[serde(default = "PoolConfig::default_request_read_nodes")] pub request_read_nodes: usize, } impl Validatable for PoolConfig { fn validate(&self) -> Result<(), ValidationError> { if self.freshness_threshold == 0 { return Err(invalid!("`freshness_threshold` must be greater than 0")); } if self.ack_timeout <= 0 { return Err(invalid!("`ack_timeout` must be greater than 0")); } if self.reply_timeout <= 0 { return Err(invalid!("`reply_timeout` must be greater than 0")); } if self.conn_request_limit == 0 { return Err(invalid!("`conn_request_limit` must be greater than 0")); } if self.conn_active_timeout <= 0 { return Err(invalid!("`conn_active_timeout` must be greater than 0")); } if self.request_read_nodes == 0 { return Err(invalid!("`request_read_nodes` must be greater than 0")); } Ok(()) } } impl PoolConfig { fn default_freshness_threshold() -> u64 { constants::DEFAULT_FRESHNESS_TIMEOUT } fn default_protocol_version() -> ProtocolVersion { constants::DEFAULT_PROTOCOL_VERSION } fn default_ack_timeout() -> i64 { constants::DEFAULT_ACK_TIMEOUT } fn default_reply_timeout() -> i64 { constants::DEFAULT_REPLY_TIMEOUT } fn default_conn_request_limit() -> usize { constants::DEFAULT_CONN_REQUEST_LIMIT } fn default_conn_active_timeout() -> i64 { constants::DEFAULT_CONN_ACTIVE_TIMEOUT } fn default_request_read_nodes() -> usize { constants::DEFAULT_REQUEST_READ_NODES } } impl Default for PoolConfig { fn default() -> PoolConfig { PoolConfig { protocol_version: Self::default_protocol_version(), freshness_threshold: Self::default_freshness_threshold(), ack_timeout: Self::default_ack_timeout(), reply_timeout: Self::default_reply_timeout(), conn_request_limit: Self::default_conn_request_limit(), conn_active_timeout: Self::default_conn_active_timeout(), request_read_nodes: Self::default_request_read_nodes(), } } }
use std::ops::{AddAssign, SubAssign, Mul}; use std::mem; use std::fmt::Debug; #[macro_export] macro_rules! calculus { ($no_consume_iter:expr, $variant:ident, $($iter:expr),+) => { ($( $variant::from($iter, $no_consume_iter.subscribe()) ),+) }; } pub type DifferentiateF32<I, F> = Differentiate<I, F, f32>; pub struct Differentiate<I, F, T> { last: Option<T>, inner: I, factor: F, } impl<'a, I: Iterator, F: Iterator, T> Differentiate<I, F, T> { pub fn from(inner: I, factor: F) -> Self { Differentiate { last: None, inner, factor, } } } impl<I, F, T> Iterator for Differentiate<I, F, T> where I: Iterator<Item=T>, F: Iterator<Item=T>, T: SubAssign + Copy + Debug + Default + Mul, <T as Mul>::Output: Into<T> { type Item = T; fn next(&mut self) -> Option<Self::Item> { let (cur, factor) = if let (Some(i), Some(f)) = (self.inner.next(), self.factor.next()) { (i, f) } else { return None; }; if let Some(ref mut last) = self.last { let mut last = mem::replace(last, cur); last -= cur; Some((last * factor).into()) } else { self.last = Some(cur); Some(T::default()) } } } pub type IntegrateF32<I, F> = Integrate<I, F, f32>; pub struct Integrate<I, F, T> { accumulator: T, inner: I, factor: F, } impl<I, F, T: Default> Integrate<I, F, T> { pub fn from(inner: I, factor: F) -> Self { Integrate { accumulator: <T as Default>::default(), inner, factor, } } } impl<I, F, T> Iterator for Integrate<I, F, T> where I: Iterator<Item=T>, F: Iterator<Item=T>, T: AddAssign + Mul + Copy + Debug, <T as Mul>::Output: Into<T> { type Item = T; fn next(&mut self) -> Option<Self::Item> { if let (Some(i), Some(f)) = (self.inner.next(), self.factor.next()) { self.accumulator += (i * f).into(); return Some(self.accumulator); } else { None } } }
use cocoa::base::{class, id}; use cocoa::foundation::NSUInteger; pub trait MTLRenderPassDescriptor { /// Creates a default render pass descriptor. /// /// # Description /// /// Set the desired color attachments with the `setObject:atIndexedSubscript:` /// method of the `colorAttachments` property. Set the desired depth and stencil /// attachments with the `depthAttachment` and `stencilAttachment` properties, /// respectively. unsafe fn renderPassDescriptor(_: Self) -> id { msg_send![class("MTLRenderPassDescriptor"), renderPassDescriptor] } /// An array of state information for attachments that store color data. unsafe fn colorAttachments(self) -> id; unsafe fn setColorAttachments(self, colorAttachments: id); /// State information for an attachment that stores depth data. unsafe fn depthAttachment(self) -> id; unsafe fn setDepthAttachment(self, depthAttachment: id); /// State information for an attachment that stores stencil data. unsafe fn stencilAttachment(self) -> id; unsafe fn setStencilAttachment(self, stencilAttachment: id); /// The destination for the GPU to write visibility information when samples /// pass the depth and stencil tests. unsafe fn visibilityResultBuffer(self) -> id; unsafe fn setVisibilityResultBuffer(self, visibilityResultBuffer: id); #[cfg(target_os = "macos")] unsafe fn renderTargetArrayLength(self) -> NSUInteger; #[cfg(target_os = "macos")] unsafe fn setRenderTargetArrayLength(self, renderTargetArrayLength: NSUInteger); unsafe fn copy(self) -> id; } impl MTLRenderPassDescriptor for id { unsafe fn colorAttachments(self) -> id { msg_send![self, colorAttachments] } unsafe fn setColorAttachments(self, colorAttachments: id) { msg_send![self, setColorAttachments:colorAttachments] } unsafe fn depthAttachment(self) -> id { msg_send![self, depthAttachment] } unsafe fn setDepthAttachment(self, depthAttachment: id) { msg_send![self, setDepthAttachment:depthAttachment] } unsafe fn stencilAttachment(self) -> id { msg_send![self, stencilAttachment] } unsafe fn setStencilAttachment(self, stencilAttachment: id) { msg_send![self, setStencilAttachment:stencilAttachment] } unsafe fn visibilityResultBuffer(self) -> id { msg_send![self, visibilityResultBuffer] } unsafe fn setVisibilityResultBuffer(self, visibilityResultBuffer: id) { msg_send![self, setVisibilityResultBuffer:visibilityResultBuffer] } #[cfg(target_os = "macos")] unsafe fn renderTargetArrayLength(self) -> NSUInteger { msg_send![self, renderTargetArrayLength] } #[cfg(target_os = "macos")] unsafe fn setRenderTargetArrayLength(self, renderTargetArrayLength: NSUInteger) { msg_send![self, setRenderTargetArrayLength:renderTargetArrayLength] } unsafe fn copy(self) -> id { msg_send![self, copy] } }
use parser::{FileHash, TypeDef, Unit}; use crate::print::{self, DiffState, PrintHeader, PrintState, ValuePrinter}; use crate::Result; fn print_name(ty: &TypeDef, w: &mut dyn ValuePrinter) -> Result<()> { if let Some(namespace) = ty.namespace() { print::namespace::print(namespace, w)?; } w.name(ty.name().unwrap_or("<anon-typedef>"))?; Ok(()) } pub(crate) fn print_ref(ty: &TypeDef, w: &mut dyn ValuePrinter, id: usize) -> Result<()> { w.link(id, &mut |w| print_name(ty, w)) } fn print_def(ty: &TypeDef, w: &mut dyn ValuePrinter, hash: &FileHash) -> Result<()> { write!(w, "type ")?; print_name(ty, w)?; write!(w, " = ")?; print::types::print_ref(ty.ty(hash), w, hash)?; Ok(()) } fn print_source(ty: &TypeDef, w: &mut dyn ValuePrinter, unit: &Unit) -> Result<()> { print::source::print(ty.source(), w, unit) } fn print_byte_size(ty: &TypeDef, w: &mut dyn ValuePrinter, hash: &FileHash) -> Result<()> { if let Some(byte_size) = ty.byte_size(hash) { write!(w, "{}", byte_size)?; } Ok(()) } impl<'input> PrintHeader for TypeDef<'input> { fn print_header(&self, state: &mut PrintState) -> Result<()> { state.line(|w, state| print_def(self, w, state)) } fn print_body(&self, state: &mut PrintState, unit: &Unit) -> Result<()> { let ty = self.ty(state.hash()); if state.options().print_source { state.field("source", |w, _state| print_source(self, w, unit))?; } state.field("size", |w, state| print_byte_size(self, w, state))?; if let Some(ref ty) = ty { if ty.is_anon() { state.field_expanded("members", |state| { print::types::print_members(state, unit, Some(ty)) })?; } } Ok(()) } fn diff_header(state: &mut DiffState, a: &Self, b: &Self) -> Result<()> { state.line(a, b, |w, state, x| print_def(x, w, state)) } fn diff_body( state: &mut DiffState, unit_a: &parser::Unit, a: &Self, unit_b: &parser::Unit, b: &Self, ) -> Result<()> { if state.options().print_source { state.field( "source", (unit_a, a), (unit_b, b), |w, _state, (unit, x)| print_source(x, w, unit), )?; } state.field("size", a, b, |w, state, x| print_byte_size(x, w, state))?; let ty_a = filter_option(a.ty(state.hash_a()), |ty| ty.is_anon()); let ty_a = ty_a.as_deref(); let ty_b = filter_option(b.ty(state.hash_b()), |ty| ty.is_anon()); let ty_b = ty_b.as_deref(); state.field_expanded("members", |state| { print::types::diff_members(state, unit_a, ty_a, unit_b, ty_b) }) } } fn filter_option<T, F>(o: Option<T>, f: F) -> Option<T> where F: FnOnce(&T) -> bool, { o.and_then(|v| if f(&v) { Some(v) } else { None }) }
/* chapter 4 functions early returns */ fn foo(n: i32) -> i32 { return n + 1; } fn main() { let n = foo(5); println!("{}", n); } // output should be: /* */
use std::{ fs::File, io::{BufReader, Read, Seek}, }; use std::{io::SeekFrom, path::Path}; use anyhow::Result; use las_rs::Header; use crate::base::{PointReader, SeekToPoint}; use pasture_core::{containers::PointBufferWriteable, layout::PointLayout, meta::Metadata}; use super::{path_is_compressed_las_file, LASReaderBase, RawLASReader, RawLAZReader}; trait AnyLASReader: PointReader + SeekToPoint + LASReaderBase {} impl<T: PointReader + SeekToPoint + LASReaderBase> AnyLASReader for T {} /// `PointReader` implementation for LAS/LAZ files pub struct LASReader<'a> { raw_reader: Box<dyn AnyLASReader + 'a>, } impl<'a> LASReader<'a> { // TODO LAS files store 32-bit integer coordinates in local space internally, but we almost always want // floating point values in world space instead. The conversion from integer to float in world space will // have to happen automatically during reading, and the default datatype of attribute POSITION_3D will be // Vector3<f64>. However, we should make it possible to switch this default conversion off for cases where // we want to read just the raw i32 data! /// Creates a new `LASReader` by opening the file at the given `path`. Tries to determine whether /// the file is compressed from the file extension (i.e. files with extension `.laz` are assumed to be /// compressed). /// /// # Errors /// /// If `path` does not exist, cannot be opened or does not point to a valid LAS/LAZ file, an error is returned. pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> { let is_compressed = path_is_compressed_las_file(path.as_ref())?; let file = BufReader::new(File::open(path)?); Self::from_read(file, is_compressed) } /// Creates a new `LASReader` from the given `read`. This method has to know whether /// the `read` points to a compressed LAZ file or a regular LAS file. /// /// # Errors /// /// If the given `Read` does not represent a valid LAS/LAZ file, an error is returned. pub fn from_read<R: Read + Seek + Send + 'a>(read: R, is_compressed: bool) -> Result<Self> { let raw_reader: Box<dyn AnyLASReader> = if is_compressed { Box::new(RawLAZReader::from_read(read)?) } else { Box::new(RawLASReader::from_read(read)?) }; Ok(Self { raw_reader: raw_reader, }) } pub fn remaining_points(&mut self) -> usize { self.raw_reader.remaining_points() } /// Returns the LAS header for the associated `LASReader` pub fn header(&self) -> &Header { self.raw_reader.header() } } impl<'a> PointReader for LASReader<'a> { fn read(&mut self, count: usize) -> Result<Box<dyn pasture_core::containers::PointBuffer>> { self.raw_reader.read(count) } fn read_into( &mut self, point_buffer: &mut dyn PointBufferWriteable, count: usize, ) -> Result<usize> { self.raw_reader.read_into(point_buffer, count) } fn get_metadata(&self) -> &dyn Metadata { self.raw_reader.get_metadata() } fn get_default_point_layout(&self) -> &PointLayout { self.raw_reader.get_default_point_layout() } } impl<'a> SeekToPoint for LASReader<'a> { fn seek_point(&mut self, position: SeekFrom) -> Result<usize> { self.raw_reader.seek_point(position) } }
use amethyst::{ assets::{AssetStorage, Handle, Loader}, core::{math::Vector3, Transform}, ecs::prelude::{Component, DenseVecStorage, Entity}, prelude::*, renderer::{ formats::texture::ImageFormat, sprite::{SpriteRender, SpriteSheet, SpriteSheetFormat}, Texture, }, }; use ron::de::from_str; use serde::Deserialize; use std::fs; use crate::resources::get_scale; #[derive(Debug, Eq, PartialEq, Clone)] pub struct Tile; impl Default for Tile { fn default() -> Self { Tile {} } } #[derive(Debug, Deserialize)] pub struct TileMapConfig { pub tile_width: usize, pub tile_height: usize, pub size_x: usize, pub size_y: usize, } impl TileMapConfig { pub fn from_path(path: &str) -> TileMapConfig { let file_content = fs::read_to_string(path).expect("reading tilemap setting"); from_str(&file_content).expect("parsing tile config") } } #[derive(Component, Debug)] #[storage(DenseVecStorage)] pub struct TileMap { pub entities: Vec<Entity>, pub tile_set: Handle<SpriteSheet>, len: usize, } impl TileMap { pub fn new( world: &mut World, asset_path: &str, asset_config: &str, config: &TileMapConfig, ) -> TileMap { let texture_handle = { let loader = world.read_resource::<Loader>(); let texture_storage = world.read_resource::<AssetStorage<Texture>>(); loader.load(asset_path, ImageFormat::default(), (), &texture_storage) }; let sprite_sheet_handle = { let loader = world.read_resource::<Loader>(); let sprite_sheet_store = world.read_resource::<AssetStorage<SpriteSheet>>(); loader.load( asset_config, SpriteSheetFormat(texture_handle), (), &sprite_sheet_store, ) }; let mut entities: Vec<Entity> = vec![]; for x in 0..config.size_x { for y in 0..config.size_y { let sprite_render = SpriteRender { sprite_sheet: sprite_sheet_handle.clone(), sprite_number: 0, //default }; let mut transform = Transform::default(); let scale = get_scale(world); transform.set_translation_xyz( (x as f32 + 0.5) * scale * config.tile_width as f32, (y as f32 + 0.5) * scale * config.tile_height as f32, 0.0, ); transform.set_scale(Vector3::new(scale, scale, scale)); let entity = world .create_entity() .with(transform) .with(sprite_render.clone()) .build(); entities.push(entity); } } let len = entities.len(); TileMap { entities, tile_set: sprite_sheet_handle, len, } } pub fn len(&self) -> usize { self.len } }
use std::{cmp::Reverse, convert::Infallible, str::FromStr}; /// Version parser for Kubernetes version patterns /// /// This type implements two orderings for sorting by: /// /// - [`Version::priority`] for [Kubernetes/kubectl version priority](https://kubernetes.io/docs/tasks/extend-kubernetes/custom-resources/custom-resource-definition-versioning/#version-priority) /// - [`Version::generation`] for sorting strictly by version generation in a semver style /// /// To get the api versions sorted by `kubectl` priority: /// /// ``` /// use kube_core::Version; /// use std::cmp::Reverse; // for DESCENDING sort /// let mut versions = vec![ /// "v10beta3", /// "v2", /// "foo10", /// "v1", /// "v3beta1", /// "v11alpha2", /// "v11beta2", /// "v12alpha1", /// "foo1", /// "v10", /// ]; /// versions.sort_by_cached_key(|v| Reverse(Version::parse(v).priority())); /// assert_eq!(versions, vec![ /// "v10", /// "v2", /// "v1", /// "v11beta2", /// "v10beta3", /// "v3beta1", /// "v12alpha1", /// "v11alpha2", /// "foo1", /// "foo10", /// ]); /// ``` /// #[derive(PartialEq, Eq, Debug, Clone)] pub enum Version { /// A major/GA release /// /// Always considered higher priority than a beta release. Stable(u32), /// A beta release for a specific major version /// /// Always considered higher priority than an alpha release. Beta(u32, Option<u32>), /// An alpha release for a specific major version /// /// Always considered higher priority than a nonconformant version Alpha(u32, Option<u32>), /// An non-conformant api string /// /// CRDs and APIServices can use arbitrary strings as versions. Nonconformant(String), } impl Version { fn try_parse(v: &str) -> Option<Version> { let v = v.strip_prefix('v')?; let major = v.split_terminator(|ch: char| !ch.is_ascii_digit()).next()?; let v = &v[major.len()..]; let major: u32 = major.parse().ok()?; if v.is_empty() { return Some(Version::Stable(major)); } if let Some(suf) = v.strip_prefix("alpha") { return if suf.is_empty() { Some(Version::Alpha(major, None)) } else { Some(Version::Alpha(major, Some(suf.parse().ok()?))) }; } if let Some(suf) = v.strip_prefix("beta") { return if suf.is_empty() { Some(Version::Beta(major, None)) } else { Some(Version::Beta(major, Some(suf.parse().ok()?))) }; } None } /// An infallble parse of a Kubernetes version string /// /// ``` /// use kube_core::Version; /// assert_eq!(Version::parse("v10beta12"), Version::Beta(10, Some(12))); /// ``` pub fn parse(v: &str) -> Version { match Self::try_parse(v) { Some(ver) => ver, None => Version::Nonconformant(v.to_string()), } } } /// An infallible FromStr implementation for more generic users impl FromStr for Version { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(Version::parse(s)) } } #[derive(PartialEq, Eq, PartialOrd, Ord)] enum Stability { Nonconformant, Alpha, Beta, Stable, } /// See [`Version::priority`] #[derive(PartialEq, Eq, PartialOrd, Ord)] struct Priority { stability: Stability, major: u32, minor: Option<u32>, nonconformant: Option<Reverse<String>>, } /// See [`Version::generation`] #[derive(PartialEq, Eq, PartialOrd, Ord)] struct Generation { major: u32, stability: Stability, minor: Option<u32>, nonconformant: Option<Reverse<String>>, } impl Version { /// An [`Ord`] for `Version` that orders by [Kubernetes version priority](https://kubernetes.io/docs/tasks/extend-kubernetes/custom-resources/custom-resource-definition-versioning/#version-priority) /// /// This order will favour stable versions over newer pre-releases and is used by `kubectl`. /// /// For example: /// /// ``` /// # use kube_core::Version; /// assert!(Version::Stable(2).priority() > Version::Stable(1).priority()); /// assert!(Version::Stable(1).priority() > Version::Beta(1, None).priority()); /// assert!(Version::Stable(1).priority() > Version::Beta(2, None).priority()); /// assert!(Version::Stable(2).priority() > Version::Alpha(1, Some(2)).priority()); /// assert!(Version::Stable(1).priority() > Version::Alpha(2, Some(2)).priority()); /// assert!(Version::Beta(1, None).priority() > Version::Nonconformant("ver3".into()).priority()); /// ``` /// /// Note that the type of release matters more than the version numbers: /// `Stable(x)` > `Beta(y)` > `Alpha(z)` > `Nonconformant(w)` for all `x`,`y`,`z`,`w` /// /// `Nonconformant` versions are ordered alphabetically. pub fn priority(&self) -> impl Ord { match self { &Self::Stable(major) => Priority { stability: Stability::Stable, major, minor: None, nonconformant: None, }, &Self::Beta(major, minor) => Priority { stability: Stability::Beta, major, minor, nonconformant: None, }, &Self::Alpha(major, minor) => Priority { stability: Stability::Alpha, major, minor, nonconformant: None, }, Self::Nonconformant(nonconformant) => Priority { stability: Stability::Nonconformant, major: 0, minor: None, nonconformant: Some(Reverse(nonconformant.clone())), }, } } /// An [`Ord`] for `Version` that orders by version generation /// /// This order will favour higher version numbers even if it's a pre-release. /// /// For example: /// /// ``` /// # use kube_core::Version; /// assert!(Version::Stable(2).generation() > Version::Stable(1).generation()); /// assert!(Version::Stable(1).generation() > Version::Beta(1, None).generation()); /// assert!(Version::Beta(2, None).generation() > Version::Stable(1).generation()); /// assert!(Version::Stable(2).generation() > Version::Alpha(1, Some(2)).generation()); /// assert!(Version::Alpha(2, Some(2)).generation() > Version::Stable(1).generation()); /// assert!(Version::Beta(1, None).generation() > Version::Nonconformant("ver3".into()).generation()); /// ``` pub fn generation(&self) -> impl Ord { match self { &Self::Stable(major) => Generation { stability: Stability::Stable, major, minor: None, nonconformant: None, }, &Self::Beta(major, minor) => Generation { stability: Stability::Beta, major, minor, nonconformant: None, }, &Self::Alpha(major, minor) => Generation { stability: Stability::Alpha, major, minor, nonconformant: None, }, Self::Nonconformant(nonconformant) => Generation { stability: Stability::Nonconformant, major: 0, minor: None, nonconformant: Some(Reverse(nonconformant.clone())), }, } } } #[cfg(test)] mod tests { use super::Version; use std::{cmp::Reverse, str::FromStr}; #[test] fn test_stable() { assert_eq!(Version::parse("v1"), Version::Stable(1)); assert_eq!(Version::parse("v3"), Version::Stable(3)); assert_eq!(Version::parse("v10"), Version::Stable(10)); } #[test] fn test_prerelease() { assert_eq!(Version::parse("v1beta"), Version::Beta(1, None)); assert_eq!(Version::parse("v2alpha1"), Version::Alpha(2, Some(1))); assert_eq!(Version::parse("v10beta12"), Version::Beta(10, Some(12))); } fn check_not_parses(s: &str) { assert_eq!(Version::parse(s), Version::Nonconformant(s.to_string())) } #[test] fn test_nonconformant() { check_not_parses(""); check_not_parses("foo"); check_not_parses("v"); check_not_parses("v-1"); check_not_parses("valpha"); check_not_parses("vbeta3"); check_not_parses("vv1"); check_not_parses("v1alpha1hi"); check_not_parses("v1zeta3"); } #[test] fn test_version_fromstr() { assert_eq!( Version::from_str("infallible").unwrap(), Version::Nonconformant("infallible".to_string()) ); } #[test] fn test_version_priority_ord() { // sorting makes sense from a "greater than" generation perspective: assert!(Version::Stable(2).priority() > Version::Stable(1).priority()); assert!(Version::Stable(1).priority() > Version::Beta(1, None).priority()); assert!(Version::Stable(1).priority() > Version::Beta(2, None).priority()); assert!(Version::Stable(2).priority() > Version::Alpha(1, Some(2)).priority()); assert!(Version::Stable(1).priority() > Version::Alpha(2, Some(2)).priority()); assert!(Version::Beta(1, None).priority() > Version::Nonconformant("ver3".into()).priority()); assert!(Version::Stable(2).priority() > Version::Stable(1).priority()); assert!(Version::Stable(1).priority() > Version::Beta(2, None).priority()); assert!(Version::Stable(1).priority() > Version::Beta(2, Some(2)).priority()); assert!(Version::Stable(1).priority() > Version::Alpha(2, None).priority()); assert!(Version::Stable(1).priority() > Version::Alpha(2, Some(3)).priority()); assert!(Version::Stable(1).priority() > Version::Nonconformant("foo".to_string()).priority()); assert!(Version::Beta(1, Some(1)).priority() > Version::Beta(1, None).priority()); assert!(Version::Beta(1, Some(2)).priority() > Version::Beta(1, Some(1)).priority()); assert!(Version::Beta(1, None).priority() > Version::Alpha(1, None).priority()); assert!(Version::Beta(1, None).priority() > Version::Alpha(1, Some(3)).priority()); assert!(Version::Beta(1, None).priority() > Version::Nonconformant("foo".to_string()).priority()); assert!(Version::Beta(1, Some(2)).priority() > Version::Nonconformant("foo".to_string()).priority()); assert!(Version::Alpha(1, Some(1)).priority() > Version::Alpha(1, None).priority()); assert!(Version::Alpha(1, Some(2)).priority() > Version::Alpha(1, Some(1)).priority()); assert!(Version::Alpha(1, None).priority() > Version::Nonconformant("foo".to_string()).priority()); assert!(Version::Alpha(1, Some(2)).priority() > Version::Nonconformant("foo".to_string()).priority()); assert!( Version::Nonconformant("bar".to_string()).priority() > Version::Nonconformant("foo".to_string()).priority() ); assert!( Version::Nonconformant("foo1".to_string()).priority() > Version::Nonconformant("foo10".to_string()).priority() ); // sort orders by default are ascending // sorting with std::cmp::Reverse on priority gives you the highest priority first let mut vers = vec![ Version::Beta(2, Some(2)), Version::Stable(1), Version::Nonconformant("hi".into()), Version::Alpha(3, Some(2)), Version::Stable(2), Version::Beta(2, Some(3)), ]; vers.sort_by_cached_key(|x| Reverse(x.priority())); assert_eq!(vers, vec![ Version::Stable(2), Version::Stable(1), Version::Beta(2, Some(3)), Version::Beta(2, Some(2)), Version::Alpha(3, Some(2)), Version::Nonconformant("hi".into()), ]); } #[test] fn test_version_generation_ord() { assert!(Version::Stable(2).generation() > Version::Stable(1).generation()); assert!(Version::Stable(1).generation() > Version::Beta(1, None).generation()); assert!(Version::Stable(1).generation() < Version::Beta(2, None).generation()); assert!(Version::Stable(2).generation() > Version::Alpha(1, Some(2)).generation()); assert!(Version::Stable(1).generation() < Version::Alpha(2, Some(2)).generation()); assert!(Version::Beta(1, None).generation() > Version::Nonconformant("ver3".into()).generation()); assert!(Version::Stable(2).generation() > Version::Stable(1).generation()); assert!(Version::Stable(1).generation() < Version::Beta(2, None).generation()); assert!(Version::Stable(1).generation() < Version::Beta(2, Some(2)).generation()); assert!(Version::Stable(1).generation() < Version::Alpha(2, None).generation()); assert!(Version::Stable(1).generation() < Version::Alpha(2, Some(3)).generation()); assert!(Version::Stable(1).generation() > Version::Nonconformant("foo".to_string()).generation()); assert!(Version::Beta(1, Some(1)).generation() > Version::Beta(1, None).generation()); assert!(Version::Beta(1, Some(2)).generation() > Version::Beta(1, Some(1)).generation()); assert!(Version::Beta(1, None).generation() > Version::Alpha(1, None).generation()); assert!(Version::Beta(1, None).generation() > Version::Alpha(1, Some(3)).generation()); assert!(Version::Beta(1, None).generation() > Version::Nonconformant("foo".to_string()).generation()); assert!( Version::Beta(1, Some(2)).generation() > Version::Nonconformant("foo".to_string()).generation() ); assert!(Version::Alpha(1, Some(1)).generation() > Version::Alpha(1, None).generation()); assert!(Version::Alpha(1, Some(2)).generation() > Version::Alpha(1, Some(1)).generation()); assert!( Version::Alpha(1, None).generation() > Version::Nonconformant("foo".to_string()).generation() ); assert!( Version::Alpha(1, Some(2)).generation() > Version::Nonconformant("foo".to_string()).generation() ); assert!( Version::Nonconformant("bar".to_string()).generation() > Version::Nonconformant("foo".to_string()).generation() ); assert!( Version::Nonconformant("foo1".to_string()).generation() > Version::Nonconformant("foo10".to_string()).generation() ); // sort orders by default is ascending // sorting with std::cmp::Reverse on generation gives you the latest generation versions first let mut vers = vec![ Version::Beta(2, Some(2)), Version::Stable(1), Version::Nonconformant("hi".into()), Version::Alpha(3, Some(2)), Version::Stable(2), Version::Beta(2, Some(3)), ]; vers.sort_by_cached_key(|x| Reverse(x.generation())); assert_eq!(vers, vec![ Version::Alpha(3, Some(2)), Version::Stable(2), Version::Beta(2, Some(3)), Version::Beta(2, Some(2)), Version::Stable(1), Version::Nonconformant("hi".into()), ]); } }
// Copyright (c) Facebook, Inc. and its affiliates. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::path::PathBuf; use std::{env, process::Command}; macro_rules! get(($name:expr) => (ok!(env::var($name)))); macro_rules! ok(($expression:expr) => ($expression.unwrap())); pub const FIAT_REPO: &str = "https://github.com/mit-plv/fiat-crypto.git"; pub const FIAT_HASH: &str = "c96f983228d08c74254004b0bc101d3f6ff8b051"; pub const FIAT_FILE_HASH: [u8; 32] = [ 0xde, 0x8f, 0x2c, 0x6a, 0x1d, 0x20, 0xc2, 0x53, 0x81, 0x85, 0x82, 0xfc, 0x6c, 0x78, 0x91, 0x96, 0x27, 0xd3, 0xf, 0xc7, 0x5a, 0x27, 0x7b, 0x57, 0x5c, 0xb1, 0x58, 0x5d, 0x6b, 0xd9, 0xa2, 0x2d, ]; use sha2::{Digest, Sha256}; use std::{fs, io}; fn run<F>(name: &str, mut configure: F) where F: FnMut(&mut Command) -> &mut Command, { let mut command = Command::new(name); let configured = configure(&mut command); if !ok!(configured.status()).success() { panic!("failed to execute {:?}", configured); } } fn main() { // this folder let basedir = PathBuf::from(&get!("CARGO_MANIFEST_DIR")); let src_dir = basedir.join("src"); println!("Dir is: {:?}", basedir); // the fiat-crypto submodule let fiat_crypto = basedir.join("external").join("fiat-crypto"); // this ised to do a submodule update, see // https://github.com/google/shaderc-rs/issues/15 for why it does not any // more let src_path = fiat_crypto .join("fiat-rust") .join("src") .join("curve25519_64.rs"); let cond = if let Ok(mut file) = fs::File::open(src_path) { let mut sha256 = Sha256::new(); io::copy(&mut file, &mut sha256).expect("failed to copy file"); let hash = sha256.result(); hash[..] == FIAT_FILE_HASH } else { false }; // Go to the fiat directory if !cond && !fiat_crypto.join(".git").exists() { std::fs::remove_dir_all(&fiat_crypto).expect("could not remove fiat-crypto"); std::fs::create_dir_all(&basedir.join("external")) .expect("could not create external subdirectory"); assert!(env::set_current_dir(&basedir.join("external")).is_ok()); run("git", |command| { command .arg("clone") .arg("--recurse-submodules") .arg("https://github.com/mit-plv/fiat-crypto.git") .arg("fiat-crypto") }) } assert!(env::set_current_dir(&fiat_crypto).is_ok()); if !cond { // Checkout a particular dalek commit run("git", |command| command.arg("checkout").arg(FIAT_HASH)); } let long_file = fiat_crypto.join("coqprime").join("src").join("Coqprime").join("examples").join("prime216656403549020227250327256032933021325435259861468456540459488823774358486649614451547405419273433458932168893949521787.v"); let diag = format!("could not remove unarchivable file at {:?}", &long_file); if long_file.exists() { std::fs::remove_file(long_file).expect(&diag[..]); }; // Go to the base directory assert!(env::set_current_dir(&basedir).is_ok()); // copy the curve25519_64 file from fiat to src/ let origin_file = fiat_crypto .join("fiat-rust") .join("src") .join("curve25519_64.rs"); let diag = format!( "could not copy source file from {:?} to {:?}", &origin_file, &src_dir, ); std::fs::copy(origin_file, &src_dir.join("curve25519_64.rs")).expect(&diag[..]); }
use parser::{Expression, WhileCondition, Statement}; use instructions::Instructions; use memory::MemoryLayout; use super::errors::Error; use super::statements::expand; use super::input::read_input; pub fn while_loop( instructions: &mut Instructions, mem: &mut MemoryLayout, condition: WhileCondition, body: Vec<Statement>, ) -> Result<(), Error> { loop_condition(instructions, mem, &condition)?; instructions.jump_forward_if_zero(); loop_body(instructions, mem, body)?; loop_condition(instructions, mem, &condition)?; instructions.jump_backward_unless_zero(); Ok(()) } pub fn loop_condition( instructions: &mut Instructions, mem: &mut MemoryLayout, condition: &WhileCondition, ) -> Result<(), Error> { let size = match condition { &WhileCondition::Input {ref name, slice} => { // We only need to undeclare if we did the declaration in the loop condition if slice.is_some() { // Since we're going to read input anyway, we don't need to // zero out this cell because it will be completely overwritten in the read mem.undeclare(name); } read_input(instructions, mem, name.clone(), slice)?; let (position, size) = mem.get_cell_contents(name).expect("read_input didn't declare name"); // Make sure we're at the right cell for the result of the condition instructions.move_right_by(position); size }, &WhileCondition::Expression(Expression::StringLiteral(_)) => { return Err(Error::LoopStringLiteralUnsupported {}); }, &WhileCondition::Expression(Expression::Identifier(ref name)) => { let (position, size) = mem.get_cell_contents(name).ok_or_else(|| Error::UndeclaredIdentifier {name: name.clone()})?; // Make sure we're at the right cell for the result of the condition instructions.move_right_by(position); size }, }; if size != 1 { return Err(Error::ConditionSizeInvalid { expected: 1, actual: size, }); } Ok(()) } fn loop_body( instructions: &mut Instructions, mem: &mut MemoryLayout, body: Vec<Statement>, ) -> Result<(), Error> { for stmt in body { expand(instructions, mem, stmt)?; } Ok(()) }
use std::arch::x86_64::CpuidResult; use std::io::{Read, Write}; use std::io::{stdin, stdout}; use std::process::Command; fn main () { loop { print!("🥺 "); stdout().flush().unwrap(); let mut input = String::new(); stdin() .read_line(&mut input) .expect("Unable to read input"); let command = input.trim(); let args: Vec<&str> = command.split_whitespace().collect(); let mut child = Command::new(args[0]) .args(&args[1..]) .spawn() .expect("Unable to run command"); child.wait().unwrap(); } }
pub mod collide; pub mod quickhull;
use rand::{Rng, SeedableRng}; use rand::distributions::Standard; use rand_pcg::Pcg64Mcg; pub fn new_u32_vec(n: usize) -> Vec<u32> { // init RNG let mut rng = Pcg64Mcg::from_seed([0; 16]); rng.sample_iter(&Standard).take(n).collect() } // listが昇順かの確認 pub fn is_sorted_ascending<T: Ord>(x: &[T]) -> bool { // windows(2) はリストを1要素刻みで2要素づつ返す // [ 1, 2 ,3 ,4 ] なら [1, 2] [2, 3] [3, 4] を返す x.windows(2).all(|pair| pair[0] <= pair[1]) } pub fn is_sorted_decending<T: Ord>(x: &[T]) -> bool { x.windows(2).all(|pair| pair[0] >= pair[1]) } mod tests { use crate::utils::{new_u32_vec,is_sorted_ascending,is_sorted_decending}; use crate::third::sort; use crate::SortOrder::*; #[test] fn sort_32_large() { { let mut x = new_u32_vec(65536); assert_eq!(sort(&mut x,&Ascending),Ok(())); assert!(is_sorted_ascending(&x)) } { let mut x = new_u32_vec(65536); assert_eq!(sort(&mut x,&Decending),Ok(())); assert!(is_sorted_decending(&x)) } } }
// 1.2 Check Permutation fn check_permutation(string_a: &str, string_b: &str) -> bool { // Expects ascii characters. O(a + b) time, O(1) extra space // Similar to ex1.1's solution, we keep one character counter for each // string. In the end, we compare the counters; permutations should // generate the same counter array. if string_a.len() != string_b.len() { return false; } let mut char_counter_a: [u8; 256] = [0; 256]; let mut char_counter_b: [u8; 256] = [0; 256]; for character in string_a.bytes() { let index: usize = character as usize; char_counter_a[index] += 1; } for character in string_b.bytes() { let index: usize = character as usize; char_counter_b[index] += 1; } for index in 0..256 { if char_counter_a[index] != char_counter_b[index] { return false; } } true } #[test] fn test() { assert!(check_permutation("", "")); assert!(check_permutation("a", "a")); assert!(check_permutation("ab", "ba")); assert!(!check_permutation("aa", "ab")); assert!(!check_permutation("aa", "a")); println!("Ex 1.2 ok!"); }
use crate::{ errors::SmileError, models::{post::Post, user::User}, schema::comment, }; use chrono::prelude::*; use diesel::{delete, insert_into, prelude::*, update as UpdateDiesel}; #[derive(Debug, Associations, Clone, Queryable, Identifiable, Serialize, Deserialize, PartialEq)] #[belongs_to(Post, foreign_key = "postId")] #[belongs_to(User, foreign_key = "userId")] #[belongs_to(Comment, foreign_key = "reply_for_id")] #[table_name = "comment"] pub struct Comment { pub id: i32, pub postId: i32, pub userId: Option<String>, pub createdAt: Option<NaiveDateTime>, pub content: Option<String>, pub reply_for_id: Option<i32>, } #[derive( Debug, Clone, Insertable, Serialize, AsChangeset, Deserialize, PartialEq, juniper::GraphQLInputObject, )] #[table_name = "comment"] pub struct CommentInput { pub postId: i32, pub userId: Option<String>, pub content: Option<String>, pub reply_for_id: Option<i32>, } impl Comment { pub fn input(input: CommentInput, connection: &MysqlConnection) -> Result<bool, SmileError> { insert_into(comment::table) .values(input) .execute(connection) .map(|_| true) .map_err(SmileError::from) } pub fn update( input: CommentInput, commentId: i32, connection: &MysqlConnection, ) -> Result<bool, SmileError> { use crate::schema::comment::dsl::*; let result = UpdateDiesel( comment .filter(id.eq(commentId)) .filter(postId.eq(&input.postId)) .filter(userId.eq(&input.userId)), ) .set(content.eq(&input.content)) .execute(connection) .map_err(SmileError::from)?; return if result == 0 { Err(SmileError::AccessDenied) } else { Ok(true) }; } pub fn delete( user_id: &String, commentId: i32, connection: &MysqlConnection, ) -> Result<bool, SmileError> { use crate::schema::comment::dsl::*; delete(comment.filter(id.eq(commentId)).filter(userId.eq(user_id))) .execute(connection) .map(|_| true) .map_err(SmileError::from) } }
use super::cpu; use super::ppu; use super::apu; use super::cartridge; use std::fs::{DirEntry, File}; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; #[derive(serde::Serialize, serde::Deserialize)] struct SaveState { cpu: cpu::serialize::CpuData, ppu: ppu::serialize::PpuData, apu: apu::serialize::ApuData, mapper: cartridge::serialize::MapperData, } pub fn save_state(cpu: &cpu::Cpu, save_file: &PathBuf) -> Result<(), String> { let data = SaveState{ cpu: cpu.save_state(), ppu: cpu.ppu.save_state(), apu: cpu.apu.save_state(), mapper: cpu.mapper.borrow().save_state(), }; let serialized = serde_json::to_string(&data) .map_err(|e| e.to_string())?; let mut f = File::create(&save_file) .expect("could not create output file for save state"); f.write_all(serialized.as_bytes()) .map_err(|_| "couldn't write serialized data to file".to_string())?; println!("state saved to file: {:?}", save_file); Ok(()) } pub fn load_state(cpu: &mut cpu::Cpu, save_file: &PathBuf) -> Result<(), String> { if Path::new(&save_file).exists() { let mut f = File::open(save_file.clone()) .map_err(|e| e.to_string())?; let mut serialized_data = vec![]; f.read_to_end(&mut serialized_data) .map_err(|e| e.to_string())?; let serialized_string = std::str::from_utf8(&serialized_data) .map_err(|e| e.to_string())?; let state: SaveState = serde_json::from_str(serialized_string) .map_err(|e| e.to_string())?; cpu.load_state(state.cpu); cpu.ppu.load_state(state.ppu); cpu.apu.load_state(state.apu); cpu.mapper.borrow_mut().load_state(state.mapper); println!("loading save state from file: {:?}", save_file); Ok(()) } else { Err(format!("no save state file at {:?}", save_file)) } } pub fn find_next_filename(filepath: &PathBuf, new_ext: Option<&str>) -> Option<PathBuf> { let path = match filepath.parent()?.to_str()? { "" => ".", x => x, }; let stem = filepath.file_stem()?.to_str()?; let ext = new_ext.or(Some(filepath.extension()?.to_str()?)).unwrap(); let sep = std::path::MAIN_SEPARATOR.to_string(); let mut i = 0; loop { let current_name = format!("{}{}{}-{}.{}", path, sep, stem, i, ext); let save_file = PathBuf::from(&current_name); if !save_file.exists() { return Some(save_file) } i += 1; } } pub fn find_last_save_state(filepath: &PathBuf, new_ext: Option<&str>) -> Option<PathBuf> { let path = match filepath.parent()?.to_str()? { "" => Path::new("."), _ => filepath.parent()?, }; let stem = filepath.file_stem()?.to_str()?; let ext = new_ext.or(Some(filepath.extension()?.to_str()?)).unwrap(); let files = std::fs::read_dir(path).expect("couldn't read directory"); let mut save_states = files .filter(|dir_entry| { let os_name = dir_entry.as_ref().unwrap().file_name(); let name = os_name.to_str().unwrap(); name.len() >= stem.len() && name.len() >= ext.len() && &name[..stem.len()] == stem && &name[name.len()-ext.len()..] == ext }) .collect::<Vec<std::io::Result<DirEntry>>>(); save_states.sort_by(|a, b| { let a_mod_time = a.as_ref().unwrap().metadata().unwrap().modified().unwrap(); let b_mod_time = b.as_ref().unwrap().metadata().unwrap().modified().unwrap(); b_mod_time.cmp(&a_mod_time) // puts in reverse order by last modified time }); match save_states.len() { 0 => None, _ => Some(save_states[0].as_ref().unwrap().path()), } }
#[derive(Debug, Clone, Copy)] struct Range { lower: u32, upper: u32, } #[derive(Debug, Clone, Copy)] struct Seat { row: Range, col: Range, } pub fn a(input: String) -> String { let max = input .lines() .map(|l| l.chars()) .map(|chars| { let take_upper = |mut r: &mut Range| r.lower += (r.upper - r.lower) / 2 + 1; let take_lower = |mut r: &mut Range| r.upper -= (r.upper - r.lower) / 2 + 1; let mut seat = Seat { row: Range { lower: 0, upper: 127, }, col: Range { lower: 0, upper: 7 }, }; seat = chars.fold(seat, |mut s, c| { match c { 'F' => take_lower(&mut s.row), 'B' => take_upper(&mut s.row), 'L' => take_lower(&mut s.col), 'R' => take_upper(&mut s.col), _ => unreachable!(), }; s }); 8 * seat.row.lower + seat.col.upper }) .max() .unwrap(); format!("{}", max) } pub fn b(input: String) -> String { let ids = input .lines() .map(|l| l.chars()) .map(|chars| { let take_upper = |mut r: &mut Range| r.lower += (r.upper - r.lower) / 2 + 1; let take_lower = |mut r: &mut Range| r.upper -= (r.upper - r.lower) / 2 + 1; let mut seat = Seat { row: Range { lower: 0, upper: 127, }, col: Range { lower: 0, upper: 7 }, }; seat = chars.fold(seat, |mut s, c| { match c { 'F' => take_lower(&mut s.row), 'B' => take_upper(&mut s.row), 'L' => take_lower(&mut s.col), 'R' => take_upper(&mut s.col), _ => unreachable!(), }; s }); 8 * seat.row.lower + seat.col.upper }) .collect::<Vec<u32>>(); let max = ids.iter().max().unwrap(); let seat_id = (1..=*max) .find(|id| !ids.contains(&id) && ids.contains(&(id - 1)) && ids.contains(&(id + 1))) .unwrap(); format!("{}", seat_id) }
mod point; use std::{ fmt, ops::{Add, Deref, Sub}, }; pub use self::point::*; /// A struct representing a location in time as milliseconds (i32) #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Millis(pub i32); impl Millis { /// Converts from seconds to Milliseconds pub fn from_seconds(secs: f64) -> Millis { Millis((secs * 1000.0) as i32) } /// Converts this Milliseconds to seconds pub fn as_seconds(&self) -> f64 { self.0 as f64 / 1000.0 } } impl fmt::Display for Millis { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_fmt(format_args!("{}ms", self.0)) } } impl From<i32> for Millis { fn from(v: i32) -> Self { Self(v) } } impl Deref for Millis { type Target = i32; fn deref(&self) -> &Self::Target { &self.0 } } impl Add<Millis> for Millis { type Output = Millis; fn add(self, rhs: Millis) -> Self::Output { Millis(self.0 + rhs.0) } } impl Sub<Millis> for Millis { type Output = i32; fn sub(self, rhs: Millis) -> Self::Output { self.0 - rhs.0 } }
// Copyright 2017 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. //! //! A compiler of ttrpc-rust. //! //! *generate rust version ttrpc code from proto files.* //! //! //! Usage //! //!- [Manual Generation](https://github.com/containerd/ttrpc-rust#1-generate-with-protoc-command) uses ttrpc-compiler as a protoc plugin //! //!- [Programmatic Generation](https://github.com/containerd/ttrpc-rust#2-generate-programmatically) uses ttrpc-compiler as a rust crate pub mod codegen; pub mod prost_codegen; mod util; /// Customize generated code. #[derive(Default, Debug, Clone)] pub struct Customize { /// Indicates whether to generate async code for both server and client. pub async_all: bool, /// Indicates whether to generate async code for client. pub async_client: bool, /// Indicates whether to generate async code for server. pub async_server: bool, }
#![no_std] //! Asynchronous versions of HAL support. //! //! This module uses the built-in Rust language support for asynchronous programming. //! //! This module is unfortunately not called `async`, because that's a reserved keyword. pub mod gpio; pub mod i2c; pub mod io; pub mod prelude; pub mod serial; pub mod spi; pub mod timer;
#![feature(fn_traits)] pub mod collections; pub mod enums; pub mod guessing_game; pub mod hello_cargo; pub mod modules; pub mod ownership; pub mod slices; pub mod structs; pub mod variables;
use crate::{error::ServerError, server::Session, x509::ClientCertificateRetriever, App}; use bytes::Bytes; use bytestring::ByteString; use drogue_cloud_endpoint_common::{ sender::{Publish, PublishOutcome, Publisher}, sink::Sink, }; use drogue_cloud_service_api::auth::device::authn::Outcome as AuthOutcome; use drogue_cloud_service_common::Id; use ntex_mqtt::{ types::QoS, v3, v5::{ self, codec::{Auth, ConnectAckReason, DisconnectReasonCode, PublishAckReason}, }, }; use std::fmt::Debug; const TOPIC_COMMAND_INBOX: &str = "command/inbox"; const TOPIC_COMMAND_INBOX_PATTERN: &str = "command/inbox/#"; // const TOPIC_COMMAND_OUTBOX: &str = "command/outbox"; macro_rules! connect { ($connect:expr, $app:expr, $certs:expr) => {{ log::info!("new connection: {:?}", $connect); match $app .authenticate( &$connect.packet().username, &$connect.packet().password, &$connect.packet().client_id, $certs, ) .await? { AuthOutcome::Pass { application, device, r#as: _, } => { let app_id = application.metadata.name.clone(); let device_id = device.metadata.name.clone(); let session = Session::new( $app.downstream, application, Id::new(app_id.clone(), device_id.clone()), $app.commands.clone(), ); Ok(session) } AuthOutcome::Fail => Err("Failed"), } }}; } pub async fn connect_v3<Io, S>( mut connect: v3::Handshake<Io>, app: App<S>, ) -> Result<v3::HandshakeAck<Io, Session<S>>, ServerError> where Io: ClientCertificateRetriever + 'static, S: Sink, { let certs = connect.io().client_certs(); log::debug!("Certs: {:?}", certs); // handle connect match connect!(connect, app, certs) { Ok(session) => Ok(connect.ack(session, false)), Err(_) => Ok(connect.bad_username_or_pwd()), } } pub async fn connect_v5<Io, S>( mut connect: v5::Handshake<Io>, app: App<S>, ) -> Result<v5::HandshakeAck<Io, Session<S>>, ServerError> where Io: ClientCertificateRetriever + 'static, S: Sink, { let certs = connect.io().client_certs(); log::debug!("Certs: {:?}", certs); match connect!(connect, app, certs) { Ok(session) => Ok(connect.ack(session).with(|ack| { ack.wildcard_subscription_available = Some(false); })), Err(_) => Ok(connect.failed(ConnectAckReason::BadUserNameOrPassword)), } } macro_rules! publish { ($session: expr, $publish:expr) => {{ log::debug!( "incoming publish: {:?} -> {:?} / {:?}", $publish.id(), $publish.topic(), $publish.packet(), ); let channel = $publish.topic().path(); let id = $session.device_id.clone(); $session.state().sender.publish( Publish { channel: channel.into(), application: &$session.application, device_id: id.device_id, options: Default::default(), }, $publish.payload(), ) }}; } pub async fn publish_v3<S>( session: v3::Session<Session<S>>, publish: v3::Publish, ) -> Result<(), ServerError> where S: Sink, { match publish!(session, publish).await { Ok(PublishOutcome::Accepted) => Ok(()), Ok(PublishOutcome::Rejected) => Err(ServerError { // with MQTTv3, we can only close the connection msg: "Rejected".into(), }), Ok(PublishOutcome::QueueFull) => Err(ServerError { // with MQTTv3, we can only close the connection msg: "QueueFull".into(), }), Err(e) => Err(ServerError { msg: e.to_string() }), } } pub async fn publish_v5<S>( session: v5::Session<Session<S>>, publish: v5::Publish, ) -> Result<v5::PublishAck, ServerError> where S: Sink, { match publish!(session, publish).await { Ok(PublishOutcome::Accepted) => Ok(publish.ack()), Ok(PublishOutcome::Rejected) => Ok(publish .ack() .reason_code(PublishAckReason::UnspecifiedError)), Ok(PublishOutcome::QueueFull) => { Ok(publish.ack().reason_code(PublishAckReason::QuotaExceeded)) } Err(e) => Err(ServerError { msg: e.to_string() }), } } macro_rules! subscribe { ($s: expr, $session: expr, $fail: expr) => {{ for mut sub in $s.iter_mut() { if sub.topic() == TOPIC_COMMAND_INBOX_PATTERN { let device_id = $session.state().device_id.clone(); let mut rx = $session.state().commands.subscribe(device_id.clone()).await; let sink = $session.sink().clone(); ntex::rt::spawn(async move { while let Some(cmd) = rx.recv().await { match sink .publish( ByteString::from(format!( "{}/{}", TOPIC_COMMAND_INBOX, cmd.command )), Bytes::from(cmd.payload.unwrap()), ) .send_at_least_once() .await { Ok(_) => { log::debug!("Command sent to device subscription {:?}", device_id) } Err(e) => log::error!( "Failed to send a command to device subscription {:?}", e ), } } }); sub.subscribe(QoS::AtLeastOnce); log::debug!( "Device '{:?}' subscribed to receive commands", $session.state().device_id ); } else { log::info!("Subscribing to topic {:?} not allowed", sub.topic()); $fail(sub); } } Ok($s.ack()) }}; } macro_rules! unsubscribe { ($ack: expr, $session: expr, $log: expr) => {{ $session .state() .commands .unsubscribe(&$session.state().device_id) .await; log::debug!($log, $session.state().device_id); Ok($ack.ack()) }}; } pub async fn control_v3<S>( session: v3::Session<Session<S>>, control: v3::ControlMessage, ) -> Result<v3::ControlResult, ServerError> where S: Sink, { match control { v3::ControlMessage::Ping(p) => Ok(p.ack()), v3::ControlMessage::Disconnect(d) => unsubscribe!(d, session, "Disconnecting device {:?}"), v3::ControlMessage::Subscribe(mut s) => { subscribe!(s, session, |mut sub: v3::control::Subscription| sub.fail()) } v3::ControlMessage::Unsubscribe(u) => unsubscribe!(u, session, "Unsubscribing device {:?}"), v3::ControlMessage::Closed(c) => unsubscribe!(c, session, "Closing device connection {:?}"), } } pub async fn control_v5<E: Debug, S>( session: v5::Session<Session<S>>, control: v5::ControlMessage<E>, ) -> Result<v5::ControlResult, ServerError> where S: Sink, { match control { v5::ControlMessage::Auth(a) => Ok(a.ack(Auth::default())), v5::ControlMessage::Error(e) => Ok(e.ack(DisconnectReasonCode::UnspecifiedError)), v5::ControlMessage::ProtocolError(pe) => Ok(pe.ack()), v5::ControlMessage::Ping(p) => Ok(p.ack()), v5::ControlMessage::Disconnect(d) => unsubscribe!(d, session, "Disconnecting device {:?}"), v5::ControlMessage::Subscribe(mut s) => { subscribe!(s, session, |mut sub: v5::control::Subscription| sub .fail(v5::codec::SubscribeAckReason::NotAuthorized)) } v5::ControlMessage::Unsubscribe(u) => unsubscribe!(u, session, "Unsubscribing device {:?}"), v5::ControlMessage::Closed(c) => unsubscribe!(c, session, "Closing device connection {:?}"), } }
use ff::Field; use generic_array::sequence::GenericSequence; use generic_array::typenum::{U11, U8}; use generic_array::GenericArray; use log::info; use neptune::batch_hasher::BatcherType; use neptune::column_tree_builder::{ColumnTreeBuilder, ColumnTreeBuilderTrait}; use neptune::error::Error; use neptune::BatchHasher; use paired::bls12_381::Fr; use std::result::Result; use std::time::Instant; fn bench_column_building( batcher_type: Option<BatcherType>, leaves: usize, max_column_batch_size: usize, max_tree_batch_size: usize, ) -> Fr { info!("Creating ColumnTreeBuilder"); let mut builder = ColumnTreeBuilder::<U11, U8>::new( batcher_type, leaves, max_column_batch_size, max_tree_batch_size, ) .unwrap(); info!("ColumnTreeBuilder created"); // Simplify computing the expected root. let constant_element = Fr::zero(); let constant_column = GenericArray::<Fr, U11>::generate(|_| constant_element); let max_batch_size = if let Some(batcher) = &builder.column_batcher { batcher.max_batch_size() } else { leaves }; let effective_batch_size = usize::min(leaves, max_batch_size); info!( "Using effective batch size {} to build columns", effective_batch_size ); info!("adding column batches"); info!("start commitment"); let start = Instant::now(); let mut total_columns = 0; while total_columns + effective_batch_size < leaves { print!("."); let columns: Vec<GenericArray<Fr, U11>> = (0..effective_batch_size).map(|_| constant_column).collect(); let _ = builder.add_columns(columns.as_slice()).unwrap(); total_columns += columns.len(); } println!(""); let final_columns: Vec<_> = (0..leaves - total_columns) .map(|_| GenericArray::<Fr, U11>::generate(|_| constant_element)) .collect(); info!("adding final column batch and building tree"); let (_, res) = builder.add_final_columns(final_columns.as_slice()).unwrap(); info!("end commitment"); let elapsed = start.elapsed(); info!("commitment time: {:?}", elapsed); total_columns += final_columns.len(); assert_eq!(total_columns, leaves); let computed_root = res[res.len() - 1]; let expected_root = builder.compute_uniform_tree_root(final_columns[0]).unwrap(); let expected_size = builder.tree_size(); assert_eq!( expected_size, res.len(), "result tree was not expected size" ); assert_eq!( expected_root, computed_root, "computed root was not the expected one" ); res[res.len() - 1] } fn main() -> Result<(), Error> { env_logger::init(); let kib = 1024 * 1024 * 4; // 4GiB // let kib = 1024 * 512; // 512MiB let bytes = kib * 1024; let leaves = bytes / 32; let max_column_batch_size = 400000; let max_tree_batch_size = 700000; info!("KiB: {}", kib); info!("leaves: {}", leaves); info!("max column batch size: {}", max_column_batch_size); info!("max tree batch size: {}", max_tree_batch_size); for i in 0..3 { println!("--> Run {}", i); bench_column_building( Some(BatcherType::GPU), leaves, max_column_batch_size, max_tree_batch_size, ); } info!("end"); // Leave time to verify GPU memory usage goes to zero before exiting. std::thread::sleep(std::time::Duration::from_millis(15000)); Ok(()) }
use std::collections::HashMap; use std::io; use std::io::BufRead; enum Command { Add { department: String, name: String }, List(String), All, Quit, } impl Command { fn from_input(s: &str) -> Option<Self> { let words: Vec<&str> = s.trim().split_whitespace().collect(); match words.as_slice() { ["All"] => Some(Command::All), ["Quit"] => Some(Command::Quit), ["List", department] => Some(Command::List(department.to_string())), ["Add", name, "to", department] => Some(Command::Add { name: name.to_string(), department: department.to_string(), }), _ => None } } } fn main() { let mut emloyees: HashMap<String, Vec<String>> = HashMap::new(); let stdin = io::stdin(); println!("Type 'Add <name> to <department>' to add an employee"); println!("Type 'List <department>' to list the employees of a department"); println!("Type 'All' to list all employees by department"); println!("Type 'Quit' to quit"); for line in stdin.lock().lines() { let input = line.expect("[Error]: unable to read user input"); match Command::from_input(&input) { None => println!("[Error]: input error"), Some(Command::Add { name, department }) => { emloyees.entry(department).or_default().push(name) } Some(Command::List(department)) => match emloyees.get(&department) { Some(names) => { for name in names { println!("{}: {}", department, name); } } None => println!("I don't recognize that department!"), }, Some(Command::All) => { for (department, names) in &emloyees { let mut names = names.clone(); names.sort(); for name in names { println!("{}: {}", department, name); } } } Some(Command::Quit) => break, } } println!("Have a nice day!"); }
extern crate bincode; extern crate pocket_resources; extern crate serde; extern crate unicode_normalization; extern crate hyphenation_commons; use std::env; use std::error; use std::fmt; use std::fs::{self, File}; use std::io; use std::io::prelude::*; use std::path::Path; use serde::ser; use bincode::SizeLimit; use bincode::serde as bin; use hyphenation_commons::*; // User configuration use configurable::*; mod configurable { use unicode_normalization::*; use std::str::Chars; // In service of configurable normalization forms, a type alias and a function // are defined via conditional compilation. // // If no feature is explicitly set, we default to the declarations for NFC. // Neither Cargo nor rustc allows us to set exclusive features; we must indulge // them with this clumsy branle of cfg declarations. #[cfg(any(any(feature = "nfc", feature = "nfkc"), not(any(feature = "nfc", feature = "nfd", feature = "nfkc", feature = "nfkd", feature = "none"))))] pub type Normalizer<'a> = Recompositions<Chars<'a>>; #[cfg(any(feature = "nfd", feature = "nfkd"))] pub type Normalizer<'a> = Decompositions<Chars<'a>>; #[cfg(feature = "none")] pub type Normalizer<'a> = Chars<'a>; #[cfg(any(feature = "nfc", not(any(feature = "nfc", feature = "nfd", feature = "nfkc", feature = "nfkd", feature = "none"))))] pub fn normalize<'a>(s: &'a str) -> Normalizer<'a> { s.nfc() } #[cfg(feature = "nfd")] pub fn normalize<'a>(s: &'a str) -> Normalizer<'a> { s.nfd() } #[cfg(feature = "nfkc")] pub fn normalize<'a>(s: &'a str) -> Normalizer<'a> { s.nfkc() } #[cfg(feature = "nfkd")] pub fn normalize<'a>(s: &'a str) -> Normalizer<'a> { s.nfkd() } #[cfg(feature = "none")] pub fn normalize<'a>(s: &'a str) -> Normalizer<'a> { s.chars() } } // Pattern parsing trait Parse<'a> : KLPTrie<'a> { fn value(char) -> Option<u8>; fn non_scoring(c: &char) -> bool { Self::value(c.clone()) == None } fn tally<I>(bytes: I) -> Self::Tally where I: Iterator<Item = u8>; fn klpair(str_klp: &str) -> (String, Self::Tally) { let normalized: String = normalize(str_klp).collect(); let alphabetical: String = normalized.chars().filter(Self::non_scoring).collect(); let score = Self::tally(normalized.bytes()); (alphabetical, score) } } impl<'a> Parse<'a> for Patterns { fn value(c: char) -> Option<u8> { c.to_digit(10).map(|n| n as u8) } fn tally<I>(bytes: I) -> Self::Tally where I: Iterator<Item = u8> { bytes.enumerate() .filter_map(|(i, b)| Self::value(b as char).map(|v| (i as u8, v))) .enumerate() .map(|(j, (i, v))| (i - j as u8, v)) .collect() } } impl<'a> Parse<'a> for Exceptions { fn value(c: char) -> Option<u8> { match c == '-' { true => Some(1), false => None } } fn tally<I>(bytes: I) -> Self::Tally where I: Iterator<Item = u8> { bytes.enumerate() .filter_map(|(i, b)| Self::value(b as char).map(|_| i)) .enumerate() .map(|(j, i)| i - j) .collect() } } // Pattern IO and serialization pub fn source_klp_file(lang: &str, suffix: &str) -> Result<File, Error> { let _wdir = &env::var("CARGO_MANIFEST_DIR") ?; let work_dir = Path::new(_wdir); let fname = format!("hyph-{}.{}.txt", lang, suffix); let fpath = work_dir.join("patterns-tex").join(fname); Ok( File::open(fpath) ? ) } pub fn load_by_line(lang: &str, suffix: &str) -> Result<io::Lines<io::BufReader<File>>, Error> { let file = source_klp_file(lang, suffix) ?; let reader = io::BufReader::new(file); Ok(reader.lines()) } trait KLPTrieIO<'a> : KLPTrie<'a> + Parse<'a> + ser::Serialize { fn suffix_in() -> &'static str; fn suffix_out() -> &'static str; fn build(lang: &str) -> Self where Self: Sized { let textual_klps = load_by_line(lang, Self::suffix_in()).unwrap(); let mut klpairs: Vec<_> = textual_klps.map(|res| Self::klpair(&res.unwrap())).collect(); klpairs.sort_by_key(|&(ref ptn, _)| ptn.clone()); klpairs.dedup(); let mut trie = Self::new(); for klp in klpairs.into_iter() { trie.insert(klp); } trie } fn write(&self, lang: &'a str) -> Result<&'a str, Error> { let str_workdir = &env::var("CARGO_MANIFEST_DIR") ?; let work_dir = Path::new(str_workdir); let fname = format!("{}.{}.bincode", lang, Self::suffix_out()); let fpath = work_dir.join("patterns").join(fname); let mut buffer = io::BufWriter::new( File::create(fpath) ? ); bin::serialize_into(&mut buffer, &self, SizeLimit::Bounded(10_000_000)) ?; buffer.write("\n".as_bytes()) ?; Ok(lang) } } impl<'a> KLPTrieIO<'a> for Patterns { fn suffix_in() -> &'static str { "pat" } fn suffix_out() -> &'static str { "patterns" } } impl<'a> KLPTrieIO<'a> for Exceptions { fn suffix_in() -> &'static str { "hyp" } fn suffix_out() -> &'static str { "exceptions" } } fn main() { let out_dir = env::var_os("OUT_DIR").unwrap(); if Path::new(&out_dir).join("pocket-resources.rs").exists() { return; } let output_suffixes = vec![Patterns::suffix_out(), Exceptions::suffix_out()]; let langs = vec![ "af", "hy", "as", "eu", "bn", "bg", "ca", "zh-latn-pinyin", "cop", "hr", "cs", "da", "nl", "en-gb", "en-us", "eo", "et", "mul-ethi", "fi", "fr", "fur", "gl", "ka", "de-1901", "de-1996", "de-ch-1901", "grc", "el-monoton", "el-polyton", "gu", "hi", "hu", "is", "id", "ia", "ga", "it", "kn", "kmr", "la", "la-x-classic", "lv", "lt", "ml", "mr", "mn-cyrl", "nb", "nn", "oc", "or", "pa", "pms", "pl", "pt", "ro", "rm", "ru", "sa", "sr-cyrl", "sh-cyrl", "sh-latn", "cu", "sk", "sl", "es", "sv", "ta", "te", "th", "tr", "tk", "uk", "hsb", "cy" ]; for lang in langs.iter() { let patterns = Patterns::build(lang); let exceptions = Exceptions::build(lang); fs::create_dir_all("patterns").unwrap(); patterns.write(lang).unwrap(); exceptions.write(lang).unwrap(); } let resource_paths = langs.iter().flat_map(|tag| output_suffixes.iter().map(move |suffix| ("patterns", format!("{}.{}.bincode", tag, suffix)))); pocket_resources::package(resource_paths.collect::<Vec<_>>().iter()).unwrap(); } // Error type boilerplate #[derive(Debug)] pub enum Error { Env(env::VarError), IO(io::Error), Serialization(bin::SerializeError), Resource } impl error::Error for Error { fn description(&self) -> &str { match *self { Error::Env(ref e) => e.description(), Error::IO(ref e) => e.description(), Error::Serialization(ref e) => e.description(), Error::Resource => "Pattern resource creation failed" } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Error::Env(ref e) => e.fmt(f), Error::IO(ref e) => e.fmt(f), Error::Serialization(ref e) => e.fmt(f), Error::Resource => { let e = self as &error::Error; e.description().fmt(f) } } } } impl From<io::Error> for Error { fn from(err: io::Error) -> Error { Error::IO(err) } } impl From<env::VarError> for Error { fn from(err: env::VarError) -> Error { Error::Env(err) } } impl From<bin::SerializeError> for Error { fn from(err: bin::SerializeError) -> Error { Error::Serialization(err) } }
use std::collections::HashMap; use std::fmt; #[derive(Debug, Clone, PartialEq)] pub enum Object { Number(f64), Str(String), Bool(bool), Record(Record), Nil, } impl Object { pub fn to_str_object(&self) -> Option<Self> { use self::Object::*; let result = match *self { Number(ref a) => Str(a.to_string()), Bool(ref a) => Str(a.to_string()), Str(ref a) => Str(a.clone()), Nil => Str(String::from("<nil>")), _ => return None, }; Some(result) } } impl fmt::Display for Object { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::Object::*; match *self { Str(ref content) => write!(f, "{}", content), _ => Ok(()), } } } #[derive(Debug, Clone, PartialEq)] pub struct Record { pub content: Vec<Object>, pub map: HashMap<String, Record>, } impl Record { pub fn new(content: Vec<Object>, map: HashMap<String, Record>) -> Self { Record { content, map, } } }
fn main() { let mut s = String::from("hello"); // allocated on heap and can be mutable s.push_str(", world!"); println!("{}", s); let rs = "hello"; // string literal gets allocated on stack println!("String literal on stack: {}", rs); // MOVING // doesn't compile because ownership is moved and s1 is invalidated //let s1 = String::from("hello"); //let s2 = s1; // double free error in most languages // To ensure memory safety, after the line let s2 = s1, Rust considers s1 as no longer valid. // Therefore, Rust doesn’t need to free anything when s1 goes out of scope //println!("{}, world!", s1); // can clone heap data (expensive) let s1 = String::from("hello"); let s2 = s1.clone(); println!("s1 = {}, s2 = {}", s1, s2); let x = 5; let y = x; // on stack, so we can Copy println!("x = {}, y = {}", x, y); // BORROWING let r1 = String::from("hello"); let len = calc_length(&r1); println!("The length of '{}' is {}", r1, len); fn calc_length(s: &String) -> usize { s.len() } // mutable references // references are immutable by default let mut r = String::from("hello"); change(&mut r); fn change(some_string: &mut String) { some_string.push_str(", world"); } }
/// /// ## packet_headers.rs /// use crate::arp::ArpPacket; use crate::ethernet::{EtherSnapPacket, EthernetFrame, LlcPacket}; use crate::ipv4::Ipv4Header; use crate::ipv6::Ipv6Header; use crate::tcp::TcpHeader; use crate::udp::UdpHeader; pub enum PacketHeader { Ethernet(EthernetFrame), Snap(EtherSnapPacket), Llc(LlcPacket), Arp(ArpPacket), Ipv4(Ipv4Header), Udp(UdpHeader), Ipv6(Ipv6Header), Tcp(TcpHeader), } // END OF FILE
use simple_error::SimpleError; // Listed in reverse order of precedence. #[derive(PartialEq, Eq, PartialOrd, Ord, Debug)] pub enum Token { True, // 'T' False, // 'F' Identifier(String), // Alphanumeric chars Str(String), // Literal string OpenParen, // '(' ClosedParen, // ')' Not, // '!' And, // '&' Or, // '|' Implies, // '=>' Biconditional, // '<=>' } pub fn tokenize(expr: &str) -> Result<Vec<Token>, SimpleError> { let mut tokens: Vec<Token> = Vec::new(); let mut rest: &str = expr; while !rest.is_empty() { // Ignore whitespace if let " " | "\t" | "\n" = &rest[0..1] { rest = &rest[1..]; continue; } // Do all the single-character operators first // If we've found a single-character token, advance. if let Some(tok) = match &rest[0..1] { "&" => Some(Token::And), "|" => Some(Token::Or), "!" => Some(Token::Not), "(" => Some(Token::OpenParen), ")" => Some(Token::ClosedParen), _ => None, } { tokens.push(tok); rest = &rest[1..]; continue; } if rest.starts_with("=>") { rest = &rest[2..]; tokens.push(Token::Implies); continue; } if rest.starts_with("<=>") { rest = &rest[3..]; tokens.push(Token::Biconditional); continue; } // Scan strings if rest.starts_with("\"") { let contents: String = rest.chars().skip(1).take_while(|c| *c != '"').collect(); rest = &rest[contents.len() + 2..]; tokens.push(Token::Str(contents)); continue; } // Finally, look for possible identifiers. let ident: String = rest.chars().take_while(|c| c.is_alphanumeric()).collect(); tokens.push(match ident.as_str() { "T" => Token::True, "F" => Token::False, "" => bail!("unable to tokenize, rest: {}", rest), _ => Token::Identifier(String::from(&ident)), }); rest = &rest[ident.len()..] } Ok(tokens) } #[cfg(test)] mod test { use super::*; #[test] fn test_tokenize() { let test_str = "What a&|!=><=>T F( )"; let tokens = tokenize(test_str).unwrap(); assert_eq!( tokens, vec![ Token::Identifier("What".into()), Token::Identifier("a".into()), Token::And, Token::Or, Token::Not, Token::Implies, Token::Biconditional, Token::True, Token::False, Token::OpenParen, Token::ClosedParen, ] ); } #[test] fn test_tokenize_fail() { let fail = tokenize("$"); println!("{:?}", fail); match fail { Err(_) => (), _ => panic!("invalid expression tokenized without error"), } } #[test] fn test_ordering() { assert!(Token::Or < Token::Implies); assert!(Token::And < Token::Or); assert!(Token::Identifier("".into()) < Token::And); } }
use recap::Recap; use serde::Deserialize; use std::fs; pub fn day_two() { println!("-----DAY2------"); let entries: Vec<Entry> = convert_line_to_entry("inputs/input_day2.txt"); println!("Part 1"); part_one(entries.clone()); println!("Part 2"); part_two(entries); } #[derive(Debug, Deserialize, Recap, Clone)] #[recap(regex = r"(?P<first_num>.*)-(?P<second_num>.*) (?P<char>[a-z]): (?P<pwd>.*)")] struct Entry { first_num: i32, second_num: i32, char: char, pwd: String, } impl Entry { fn validate_rule1(self) -> bool { let count = self.pwd.matches(self.char).count() as i32; count >= self.first_num && count <= self.second_num } fn validate_rule2(self) -> bool { (self.pwd.chars().nth((self.first_num - 1) as usize).unwrap() == self.char) ^ (self .pwd .chars() .nth((self.second_num - 1) as usize) .unwrap() == self.char) } } fn part_one(entries: Vec<Entry>) { let mut count: i32 = 0; for entry in entries { if entry.validate_rule1() { count += 1; } } println!("Valid password count (Rule-set 1): {}", count); } fn part_two(entries: Vec<Entry>) { let mut count: i32 = 0; for entry in entries { if entry.validate_rule2() { count += 1; } } println!("Valid password count (Rule-set 2): {}", count); } fn convert_line_to_entry(filename: &str) -> Vec<Entry> { let contents = fs::read_to_string(filename).expect("Could not read file"); let mut entries: Vec<Entry> = Vec::new(); for line in contents.lines() { let entry: Entry = line.parse().unwrap(); entries.push(entry); } entries }
pub(crate) use super::schema; pub(crate) mod data_reports; pub(crate) mod executor_group; pub(crate) mod executor_processor; pub(crate) mod executor_processor_bind; pub(crate) mod task; pub(crate) mod task_bind; pub(crate) mod task_log; pub(crate) mod user; pub(crate) use super::prelude; pub(crate) use data_reports::*; pub(crate) use executor_group::*; pub(crate) use executor_processor::*; pub(crate) use executor_processor_bind::*; pub(crate) use task::*; pub(crate) use task_bind::*; pub(crate) use task_log::*; pub(crate) use user::*;
use parity_scale_codec::{Encode, MaxEncodedLen}; #[derive(Encode, MaxEncodedLen)] #[codec(crate = "parity_scale_codec")] struct Example; fn main() { let _ = Example::max_encoded_len(); }
use ::phi::gfx::{Sprite, CopySprite}; use ::phi::data::Rectangle; use ::sdl2::render::Renderer; #[derive(Clone)] pub struct BgSet { pub bg_back: Background, pub bg_middle: Background, pub bg_front: Background, } impl BgSet { pub fn new(renderer: &mut Renderer, bg_path: &'static str, mid_path: &'static str, fg_path: &'static str) -> BgSet { BgSet { bg_back: Background { pos: 0., vel: 20., sprite: Sprite::load(renderer, bg_path).unwrap(), }, bg_middle: Background { pos: 0., vel: 40., sprite: Sprite::load(renderer, mid_path).unwrap(), }, bg_front: Background { pos: 0., vel: 60., sprite: Sprite::load(renderer, fg_path).unwrap(), }, } } pub fn _render(&mut self, renderer: &mut Renderer, elapsed: f64) { self.render_bg(renderer, elapsed); self.render_fg(renderer, elapsed); } pub fn render_bg(&mut self, renderer: &mut Renderer, elapsed: f64) { self.bg_back.render(renderer, elapsed); self.bg_middle.render(renderer, elapsed); } pub fn render_fg(&mut self, renderer: &mut Renderer, elapsed: f64) { self.bg_front.render(renderer, elapsed); } } #[derive(Clone)] pub struct Background { pos: f64, vel: f64, sprite: Sprite, } impl Background { pub fn render(&mut self, renderer: &mut Renderer, elapsed: f64) { let size = self.sprite.size(); self.pos += self.vel * elapsed; if self.pos > size.0 { self.pos -= size.0; } let (win_w, win_h) = renderer.output_size().unwrap(); let scale = win_h as f64 / size.1; let mut physical_left = -self.pos * scale; while physical_left < win_w as f64 { renderer.copy_sprite(&self.sprite, Rectangle { x: physical_left, y: 0., w: size.0 * scale, h: win_h as f64, }); physical_left += size.0 * scale; } } }
// Copyright 2020 <盏一 w@hidva.com> // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::marker::PhantomData; use std::ptr::{self, NonNull}; use std::sync::atomic::{self, AtomicUsize, Ordering, Ordering::Relaxed}; pub trait Destory { type DestoryCtx; fn destory(&mut self, ctx: &Self::DestoryCtx); } struct Inner<T: Destory> { rc: AtomicUsize, data: T, } pub struct Marc<T: Destory> { ptr: NonNull<Inner<T>>, phantom: PhantomData<Inner<T>>, } impl<T: Destory> Marc<T> { pub fn new(v: T) -> Self { let b = Box::new(Inner { rc: AtomicUsize::new(1), data: v, }); Self::from_inner(Box::leak(b).into()) } fn from_inner(ptr: NonNull<Inner<T>>) -> Self { Self { ptr, phantom: PhantomData, } } fn inner(&self) -> &Inner<T> { unsafe { self.ptr.as_ref() } } unsafe fn get_mut_unchecked(&mut self) -> &mut T { &mut (*self.ptr.as_ptr()).data } fn do_unref(&mut self, ctx: &T::DestoryCtx) { if self.inner().rc.fetch_sub(1, Ordering::Release) != 1 { return; } atomic::fence(Ordering::Acquire); unsafe { self.get_mut_unchecked() }.destory(ctx); unsafe { Box::from_raw(self.ptr.as_ptr()); } return; } // Arc::drop pub fn unref(mut self, ctx: &T::DestoryCtx) { self.do_unref(ctx); std::mem::forget(self); return; } } impl<T: Destory + Clone> Marc<T> { pub fn make_mut(&mut self, ctx: &T::DestoryCtx) -> &mut T { // As Arc said: // > Use Acquire to ensure that we see any writes to `weak`... // Since we have no weak refcount, we use Relaxed instead of Acquire here. if self .inner() .rc .compare_exchange(1, 0, Relaxed, Relaxed) .is_err() { let bak = Marc::new((**self).clone()); self.do_unref(ctx); unsafe { ptr::write(self as *mut _, bak) }; } else { self.inner().rc.store(1, Relaxed); } debug_assert_eq!(1, self.inner().rc.load(Relaxed)); unsafe { self.get_mut_unchecked() } } } impl<T: Destory> Drop for Marc<T> { fn drop(&mut self) { // synchronizes with unref() according to // [Release-Acquire ordering](https://en.cppreference.com/w/cpp/atomic/memory_order#Release-Acquire_ordering) let rc = self.inner().rc.load(Ordering::Acquire); assert!(rc <= 1, "Marc::Drop: rc: {}", rc); unsafe { Box::from_raw(self.ptr.as_ptr()); } } } const MAX_REFCOUNT: usize = (isize::MAX) as usize; impl<T: Destory> std::clone::Clone for Marc<T> { fn clone(&self) -> Self { let rc = self.inner().rc.fetch_add(1, Ordering::Relaxed); assert!(rc <= MAX_REFCOUNT, "Marc::clone: rc: {}", rc); Self::from_inner(self.ptr) } } impl<T: Destory> std::ops::Deref for Marc<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.inner().data } } #[cfg(test)] mod test { use super::{Destory, Marc}; use std::sync::atomic::{AtomicU32, Ordering}; #[test] fn t() { #[derive(Default)] struct MarcTestCtx { destory_cnt: [AtomicU32; 8], } struct TestObj { id: usize, } impl Destory for TestObj { type DestoryCtx = MarcTestCtx; fn destory(&mut self, ctx: &Self::DestoryCtx) { ctx.destory_cnt[self.id].fetch_add(1, Ordering::Relaxed); } } let destory_ctx = MarcTestCtx::default(); let marc1 = Marc::new(TestObj { id: 3 }); { let marc2 = marc1.clone(); marc2.unref(&destory_ctx); } let id = marc1.id; assert_eq!(0, destory_ctx.destory_cnt[id].load(Ordering::Relaxed)); marc1.unref(&destory_ctx); assert_eq!(1, destory_ctx.destory_cnt[id].load(Ordering::Relaxed)); let id2 = 7; { let _ = Marc::new(TestObj { id: id2 }); } assert_eq!(0, destory_ctx.destory_cnt[id2].load(Ordering::Relaxed)); } } unsafe impl<T: Sync + Send + Destory> Send for Marc<T> {} unsafe impl<T: Sync + Send + Destory> Sync for Marc<T> {}
// Any copyright is dedicated to the Public Domain. // http://creativecommons.org/publicdomain/zero/1.0/ //! A simple rkv demo that showcases the basic usage (put/get/delete) of rkv. //! //! You can test this out by running: //! //! cargo run --example simple-store use std::fs; use tempfile::Builder; use rkv::{ backend::{SafeMode, SafeModeDatabase, SafeModeEnvironment, SafeModeRwTransaction}, Manager, Rkv, StoreOptions, Value, }; type MultiStore = rkv::MultiStore<SafeModeDatabase>; type Writer<'w> = rkv::Writer<SafeModeRwTransaction<'w>>; fn getput<'w, 's>(store: MultiStore, writer: &'w mut Writer, ids: &'s mut Vec<String>) { let keys = vec!["str1", "str2", "str3"]; // we convert the writer into a cursor so that we can safely read for k in keys.iter() { // this is a multi-valued database, so get returns an iterator let mut iter = store.get(writer, k).unwrap(); while let Some(Ok((_key, val))) = iter.next() { if let Value::Str(s) = val { ids.push(s.to_owned()); } else { panic!("didn't get a string back!"); } } } for id in ids { store.put(writer, &id, &Value::Blob(b"weeeeeee")).unwrap(); } } fn delete(store: MultiStore, writer: &mut Writer) { let keys = vec!["str1", "str2", "str3"]; let vals = vec!["string uno", "string quatro", "string siete"]; // we convert the writer into a cursor so that we can safely read for i in 0..keys.len() { store .delete(writer, &keys[i], &Value::Str(vals[i])) .unwrap(); } } fn main() { let root = Builder::new().prefix("simple-db").tempdir().unwrap(); fs::create_dir_all(root.path()).unwrap(); let p = root.path(); // The manager enforces that each process opens the same lmdb environment at most once let mut manager = Manager::<SafeModeEnvironment>::singleton().write().unwrap(); let created_arc = manager.get_or_create(p, Rkv::new::<SafeMode>).unwrap(); let k = created_arc.read().unwrap(); // Creates a store called "store" let store = k.open_single("store", StoreOptions::create()).unwrap(); let multistore = k.open_multi("multistore", StoreOptions::create()).unwrap(); println!("Inserting data..."); { // Use a writer to mutate the store let mut writer = k.write().unwrap(); store.put(&mut writer, "int", &Value::I64(1234)).unwrap(); store .put(&mut writer, "uint", &Value::U64(1234_u64)) .unwrap(); store .put(&mut writer, "float", &Value::F64(1234.0.into())) .unwrap(); store .put(&mut writer, "instant", &Value::Instant(1_528_318_073_700)) .unwrap(); store .put(&mut writer, "boolean", &Value::Bool(true)) .unwrap(); store .put(&mut writer, "string", &Value::Str("héllo, yöu")) .unwrap(); store .put( &mut writer, "json", &Value::Json(r#"{"foo":"bar", "number": 1}"#), ) .unwrap(); store .put(&mut writer, "blob", &Value::Blob(b"blob")) .unwrap(); writer.commit().unwrap(); } println!("Testing getput"); { let mut ids = Vec::new(); let mut writer = k.write().unwrap(); multistore .put(&mut writer, "str1", &Value::Str("string uno")) .unwrap(); multistore .put(&mut writer, "str1", &Value::Str("string dos")) .unwrap(); multistore .put(&mut writer, "str1", &Value::Str("string tres")) .unwrap(); multistore .put(&mut writer, "str2", &Value::Str("string quatro")) .unwrap(); multistore .put(&mut writer, "str2", &Value::Str("string cinco")) .unwrap(); multistore .put(&mut writer, "str2", &Value::Str("string seis")) .unwrap(); multistore .put(&mut writer, "str3", &Value::Str("string siete")) .unwrap(); multistore .put(&mut writer, "str3", &Value::Str("string ocho")) .unwrap(); multistore .put(&mut writer, "str3", &Value::Str("string nueve")) .unwrap(); getput(multistore, &mut writer, &mut ids); writer.commit().unwrap(); let mut writer = k.write().unwrap(); delete(multistore, &mut writer); writer.commit().unwrap(); } println!("Looking up keys..."); { // Use a reader to query the store let reader = k.read().unwrap(); println!("Get int {:?}", store.get(&reader, "int").unwrap()); println!("Get uint {:?}", store.get(&reader, "uint").unwrap()); println!("Get float {:?}", store.get(&reader, "float").unwrap()); println!("Get instant {:?}", store.get(&reader, "instant").unwrap()); println!("Get boolean {:?}", store.get(&reader, "boolean").unwrap()); println!("Get string {:?}", store.get(&reader, "string").unwrap()); println!("Get json {:?}", store.get(&reader, "json").unwrap()); println!("Get blob {:?}", store.get(&reader, "blob").unwrap()); println!( "Get non-existent {:?}", store.get(&reader, "non-existent").unwrap() ); } println!("Looking up keys via Writer.get()..."); { let mut writer = k.write().unwrap(); store.put(&mut writer, "foo", &Value::Str("bar")).unwrap(); store.put(&mut writer, "bar", &Value::Str("baz")).unwrap(); store.delete(&mut writer, "foo").unwrap(); println!( "It should be None! ({:?})", store.get(&writer, "foo").unwrap() ); println!("Get bar ({:?})", store.get(&writer, "bar").unwrap()); writer.commit().unwrap(); let reader = k.read().expect("reader"); println!( "It should be None! ({:?})", store.get(&reader, "foo").unwrap() ); println!("Get bar {:?}", store.get(&reader, "bar").unwrap()); } println!("Aborting transaction..."); { // Aborting a write transaction rollbacks the change(s) let mut writer = k.write().unwrap(); store.put(&mut writer, "foo", &Value::Str("bar")).unwrap(); writer.abort(); let reader = k.read().expect("reader"); println!( "It should be None! ({:?})", store.get(&reader, "foo").unwrap() ); // Explicitly aborting a transaction is not required unless an early // abort is desired, since both read and write transactions will // implicitly be aborted once they go out of scope. } println!("Deleting keys..."); { // Deleting a key/value also requires a write transaction let mut writer = k.write().unwrap(); store.put(&mut writer, "foo", &Value::Str("bar")).unwrap(); store.delete(&mut writer, "foo").unwrap(); println!( "It should be None! ({:?})", store.get(&writer, "foo").unwrap() ); writer.commit().unwrap(); // Committing a transaction consumes the writer, preventing you // from reusing it by failing and reporting a compile-time error. // This line would report error[E0382]: use of moved value: `writer`. // store.put(&mut writer, "baz", &Value::Str("buz")).unwrap(); } println!("Clearing store..."); { // Clearing a store deletes all the entries in that store let mut writer = k.write().unwrap(); store.put(&mut writer, "foo", &Value::Str("bar")).unwrap(); store.put(&mut writer, "bar", &Value::Str("baz")).unwrap(); store.clear(&mut writer).unwrap(); writer.commit().unwrap(); let reader = k.read().expect("reader"); println!( "It should be None! ({:?})", store.get(&reader, "foo").unwrap() ); println!( "It should be None! ({:?})", store.get(&reader, "bar").unwrap() ); } println!("Write and read on multiple stores..."); { let another_store = k .open_single("another_store", StoreOptions::create()) .unwrap(); let mut writer = k.write().unwrap(); store.put(&mut writer, "foo", &Value::Str("bar")).unwrap(); another_store .put(&mut writer, "foo", &Value::Str("baz")) .unwrap(); writer.commit().unwrap(); let reader = k.read().unwrap(); println!( "Get from store value: {:?}", store.get(&reader, "foo").unwrap() ); println!( "Get from another store value: {:?}", another_store.get(&reader, "foo").unwrap() ); } }
use crate::directory::Directory; use crate::error::{Error, Result}; use crate::manifest::Name; use crate::run::Project; use crate::rustflags; use serde_derive::Deserialize; use std::path::PathBuf; use std::process::{Command, Output, Stdio}; use std::{env, fs, iter}; #[derive(Deserialize)] pub struct Metadata { pub target_directory: Directory, pub workspace_root: Directory, pub packages: Vec<PackageMetadata>, } #[derive(Deserialize)] pub struct PackageMetadata { pub name: String, pub targets: Vec<BuildTarget>, pub manifest_path: PathBuf, } #[derive(Deserialize)] pub struct BuildTarget { pub crate_types: Vec<String>, } fn raw_cargo() -> Command { match env::var_os("CARGO") { Some(cargo) => Command::new(cargo), None => Command::new("cargo"), } } fn cargo(project: &Project) -> Command { let mut cmd = raw_cargo(); cmd.current_dir(&project.dir); cmd.envs(cargo_target_dir(project)); cmd.envs(rustflags::envs()); cmd.env("CARGO_INCREMENTAL", "0"); cmd.arg("--offline"); cmd } fn cargo_target_dir(project: &Project) -> impl Iterator<Item = (&'static str, PathBuf)> { iter::once(( "CARGO_TARGET_DIR", path!(project.target_dir / "tests" / "trybuild"), )) } pub fn manifest_dir() -> Result<Directory> { if let Some(manifest_dir) = env::var_os("CARGO_MANIFEST_DIR") { return Ok(Directory::from(manifest_dir)); } let mut dir = Directory::current()?; loop { if dir.join("Cargo.toml").exists() { return Ok(dir); } dir = dir.parent().ok_or(Error::ProjectDir)?; } } pub fn build_dependencies(project: &mut Project) -> Result<()> { let workspace_cargo_lock = path!(project.workspace / "Cargo.lock"); if workspace_cargo_lock.exists() { let _ = fs::copy(workspace_cargo_lock, path!(project.dir / "Cargo.lock")); } else { let _ = cargo(project).arg("generate-lockfile").status(); } let mut command = cargo(project); command .arg(if project.has_pass { "build" } else { "check" }) .args(target()) .arg("--bin") .arg(&project.name) .args(features(project)); let status = command.status().map_err(Error::Cargo)?; if !status.success() { return Err(Error::CargoFail); } // Check if this Cargo contains https://github.com/rust-lang/cargo/pull/10383 project.keep_going = command .arg("-Zunstable-options") .arg("--keep-going") .stdout(Stdio::null()) .stderr(Stdio::null()) .status() .map(|status| status.success()) .unwrap_or(false); Ok(()) } pub fn build_test(project: &Project, name: &Name) -> Result<Output> { let _ = cargo(project) .arg("clean") .arg("--package") .arg(&project.name) .arg("--color=never") .stdout(Stdio::null()) .stderr(Stdio::null()) .status(); cargo(project) .arg(if project.has_pass { "build" } else { "check" }) .args(target()) .arg("--bin") .arg(name) .args(features(project)) .arg("--quiet") .arg("--color=never") .arg("--message-format=json") .output() .map_err(Error::Cargo) } pub fn build_all_tests(project: &Project) -> Result<Output> { let _ = cargo(project) .arg("clean") .arg("--package") .arg(&project.name) .arg("--color=never") .stdout(Stdio::null()) .stderr(Stdio::null()) .status(); cargo(project) .arg(if project.has_pass { "build" } else { "check" }) .args(target()) .arg("--bins") .args(features(project)) .arg("--quiet") .arg("--color=never") .arg("--message-format=json") .arg("-Zunstable-options") .arg("--keep-going") .output() .map_err(Error::Cargo) } pub fn run_test(project: &Project, name: &Name) -> Result<Output> { cargo(project) .arg("run") .args(target()) .arg("--bin") .arg(name) .args(features(project)) .arg("--quiet") .arg("--color=never") .output() .map_err(Error::Cargo) } pub fn metadata() -> Result<Metadata> { let output = raw_cargo() .arg("metadata") .arg("--no-deps") .arg("--format-version=1") .output() .map_err(Error::Cargo)?; serde_json::from_slice(&output.stdout).map_err(|err| { print!("{}", String::from_utf8_lossy(&output.stderr)); Error::Metadata(err) }) } fn features(project: &Project) -> Vec<String> { match &project.features { Some(features) => vec![ "--no-default-features".to_owned(), "--features".to_owned(), features.join(","), ], None => vec![], } } fn target() -> Vec<&'static str> { const TARGET: Option<&str> = include!(concat!(env!("OUT_DIR"), "/target")); // When --target flag is passed, cargo does not pass RUSTFLAGS to rustc when // building proc-macro and build script even if the host and target triples // are the same. Therefore, if we always pass --target to cargo, tools such // as coverage that require RUSTFLAGS do not work for tests run by trybuild. // // To avoid that problem, do not pass --target to cargo if we know that it // has not been passed. // // Currently, cargo does not have a way to tell the build script whether // --target has been passed or not, and there is no heuristic that can // handle this well. // // Therefore, expose a cfg to always treat the target as host. if cfg!(trybuild_no_target) { vec![] } else if let Some(target) = TARGET { vec!["--target", target] } else { vec![] } }
use core::{ num::ParseIntError, str::FromStr, }; use problem::{Problem, solve}; enum Instruction { North, South, East, West, Left, Right, Forward } #[derive(Debug)] enum ParseInstructionError { InvalidInstruction(String), } impl FromStr for Instruction { type Err = ParseInstructionError; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "N" => Instruction::North, "S" => Instruction::South, "E" => Instruction::East, "W" => Instruction::West, "L" => Instruction::Left, "R" => Instruction::Right, "F" => Instruction::Forward, d => return Err(ParseInstructionError::InvalidInstruction(d.to_string())), }) } } struct Action { instruction: Instruction, argument: i32, } #[derive(Debug)] enum ParseActionError { InvalidInstruction(ParseInstructionError), InvalidArgument(ParseIntError), } impl From<ParseInstructionError> for ParseActionError { fn from(e: ParseInstructionError) -> Self { Self::InvalidInstruction(e) } } impl From<ParseIntError> for ParseActionError { fn from(e: ParseIntError) -> Self { Self::InvalidArgument(e) } } impl FromStr for Action { type Err = ParseActionError; fn from_str(s: &str) -> Result<Self, Self::Err> { let instruction = s[0..1].parse()?; let argument = s[1..].parse()?; Ok(Self { instruction, argument, }) } } struct Day12; impl Problem for Day12 { type Input = Vec<Action>; type Part1Output = i32; type Part2Output = i32; type Error = (); fn part_1(input: &Self::Input) -> Result<Self::Part1Output, Self::Error> { const OFFSET_X: [i32; 4] = [1, 0, -1, 0]; const OFFSET_Y: [i32; 4] = [0, 1, 0, -1]; let mut x = 0; let mut y = 0; let mut r = 0; for action in input { match action.instruction { Instruction::North => y += action.argument, Instruction::South => y -= action.argument, Instruction::East => x += action.argument, Instruction::West => x -= action.argument, Instruction::Right => r = (r + 4 - action.argument as usize / 90 % 4) % 4, Instruction::Left => r = (r + action.argument as usize / 90) % 4, Instruction::Forward => { x += OFFSET_X[r] * action.argument; y += OFFSET_Y[r] * action.argument; }, } } Ok(i32::abs(x) + i32::abs(y)) } fn part_2(input: &Self::Input) -> Result<Self::Part2Output, Self::Error> { let mut x = 0; let mut y = 0; let mut wx = 10; let mut wy = 1; for action in input { let rx = [wx, -wy, -wx, wy]; let ry = [wy, wx, -wy, -wx]; match action.instruction { Instruction::North => wy += action.argument, Instruction::South => wy -= action.argument, Instruction::East => wx += action.argument, Instruction::West => wx -= action.argument, Instruction::Right => { let r = (4 - action.argument as usize / 90 % 4) % 4; wx = rx[r]; wy = ry[r]; }, Instruction::Left => { let r = action.argument as usize / 90 % 4; wx = rx[r]; wy = ry[r]; }, Instruction::Forward => { x += action.argument * wx; y += action.argument * wy; }, } } Ok(i32::abs(x) + i32::abs(y)) } } fn main() { solve::<Day12>("input").unwrap(); }
use crate::{Error, ErrorKind}; use failure::{err_msg, Backtrace, Context, Fail}; use std::fmt::{self, Debug, Display}; #[derive(Debug)] pub struct InternalError { kind: Context<InternalErrorKind>, } #[derive(Debug, PartialEq, Eq, Clone, Display)] pub enum InternalErrorKind { /// An arithmetic overflow occurs during capacity calculation, /// e.g. `Capacity::safe_add` CapacityOverflow, /// The transaction_pool is already full TransactionPoolFull, /// The transaction already exist in transaction_pool PoolTransactionDuplicated, /// Persistent data had corrupted DataCorrupted, /// Database exception Database, /// VM internal error VM, /// Unknown system error System, } impl fmt::Display for InternalError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(cause) = self.cause() { write!(f, "{}({})", self.kind(), cause) } else { write!(f, "{}", self.kind()) } } } impl From<InternalError> for Error { fn from(error: InternalError) -> Self { error.context(ErrorKind::Internal).into() } } impl From<InternalErrorKind> for InternalError { fn from(kind: InternalErrorKind) -> Self { InternalError { kind: Context::new(kind), } } } impl From<InternalErrorKind> for Error { fn from(kind: InternalErrorKind) -> Self { Into::<InternalError>::into(kind).into() } } impl InternalErrorKind { pub fn cause<F: Fail>(self, cause: F) -> InternalError { InternalError { kind: cause.context(self), } } pub fn reason<S: Display + Debug + Sync + Send + 'static>(self, reason: S) -> InternalError { InternalError { kind: err_msg(reason).compat().context(self), } } } impl InternalError { pub fn kind(&self) -> &InternalErrorKind { &self.kind.get_context() } } impl Fail for InternalError { fn cause(&self) -> Option<&dyn Fail> { self.kind.cause() } fn backtrace(&self) -> Option<&Backtrace> { self.kind.backtrace() } }
#[macro_use] extern crate clap; extern crate regex; extern crate syllables; use clap::App; use std::fs::File; use std::io::Read; fn main() { // The YAML file is found relative to the current file, similar to how modules are found let yaml = load_yaml!("cli.yml"); let matches = App::from_yaml(yaml).get_matches(); let mut text = String::from(""); // File args take priority let path = matches.value_of("file").unwrap_or(""); if path != "" { text = handle_file_input(path).unwrap_or(String::from("")); } // If nothing was read from a file, get input through command line if text == "" { text = match matches.value_of("INPUT") { Some(v) => v.to_string(), None => { println!("Enter a line of words to count syllables in: \n"); let mut input = String::new(); std::io::stdin() .read_line(&mut input) .expect("Failed to read input"); input.trim().to_string() } }; } print_syllable_info(&text); } /// Safely attempts to read the text in a file at a given path. fn handle_file_input(path: &str) -> Result<String, std::io::Error> { let mut text = String::new(); match File::open(path) { Ok(mut f) => { f.read_to_string(&mut text)?; Ok(text.trim().to_string()) } Err(e) => Err(e), } } /// Count the number of syllables in each word in a single line of text. fn count_line_syllables_per_word(line: &str) -> Vec<(&str, usize)> { let words = line.split_whitespace(); let mut syllable_counts: Vec<(&str, usize)> = vec![]; for word in words { let count = syllables::syllables_in_word(word); syllable_counts.push((word, count)); } syllable_counts } /// Count the number of syllables in each "line" of text, /// split by newline characters. fn count_line_syllables(text: &str) -> Vec<(&str, usize)> { let lines = text.split('\n'); let mut syllables: Vec<(&str, usize)> = vec![]; for line in lines { let count = syllables::syllables_in_words(&line); syllables.push((&line, count)); } syllables } /// Calculates the number of syllables in each word and line, /// and pretty prints the results. fn print_syllable_info(text: &str) { let lines = text.split('\n'); let counts = count_line_syllables(&text); // Find length of longest line let mut longest = 4; // min spacing for header for count in counts { if count.0.len() > longest { longest = count.0.len(); } } print!( "\n{} {:>spaces$}", "Text", "Syllables", spaces = longest + 5 ); print!( "\n{:-<width1$} {:-<width2$}\n", "", "", width1 = longest, width2 = "Syllables".len() ); for line in lines { let word_counts = count_line_syllables_per_word(line); print_syllable_counts_per_word(&word_counts); let total = count_line_syllables(&line)[0].1; // Don't print anything if syllable count is 0 (i. e. empty line) if total == 0 { continue} let spacing = longest - line.len() + 1; print!("{} {:spaces$}", line, total, spaces = spacing); //print!("{}", total[0].1); print!("\n\n"); } } /// Prints the number of syllables in each word in input. /// The numbers are spaced so as to be centered above their /// respective word if the line was printed below. /// /// Takes one line at a time. fn print_syllable_counts_per_word(counts: &Vec<(&str, usize)>) { for count in counts { let word_len = count.0.len(); print!("{:^1$} ", count.1, word_len); } println!(); }
/* chapter 4 syntax and semantics */ enum OptionalInt { Value(i32), Missing, } fn main() { let a = OptionalInt::Value(5); match a { OptionalInt::Value(i) if i > 5 => println!("got an int bigger than five!"), OptionalInt::Value(..) => println!("got an int!"), OptionalInt::Missing => println!("no such luck."), } let b = OptionalInt::Missing; match b { OptionalInt::Value(i) if i > 5 => println!("got an int bigger than five!"), OptionalInt::Value(..) => println!("got an int!"), OptionalInt::Missing => println!("no such luck."), } } // output should be: /* */
use trigram::similarity; use super::Sim; pub fn str(s1: &str, s2: &str) -> Sim { let sim = similarity(s1, s2) * 100.0; log::info!("similarity between '{}' and '{}': {}", s1, s2, sim); Sim(sim as u8) }
#[macro_use] extern crate neon; extern crate markdown; use neon::vm::{Call, JsResult,Module}; use neon::js::JsString; use neon::js::JsInteger;//jsString es para String y JsInteger es para jalar ints use neon::mem::Handle; fn render(call: Call) -> JsResult<JsString> { let scope = call.scope; let md: Handle<JsString> = try!(try!(call.arguments.require(scope,0)).check::<JsString>()); let string = md.value(); let html: String = markdown::to_html(&string); Ok(JsString::new(scope, &html).unwrap()) } fn fibonacci(n: i32) -> i32 { return match n { 1 | 2 => 1, n => fibonacci(n - 1) + fibonacci(n - 2) } } fn method(call: Call) -> JsResult<JsInteger> { let scope = call.scope; let x = try!(try!(call.arguments.require(scope, 0)).check::<JsInteger>()).value(); Ok(JsInteger::new(scope, fibonacci(x as i32))) } register_module!(m, { m.export("fibonacci", method) //m.export("render",render) });
#![recursion_limit = "256"] #![cfg_attr(test, deny(warnings))] #[macro_use] extern crate quote; #[macro_use] extern crate syn; use syn::export::{Span, ToTokens, TokenStream, TokenStream2}; use syn::{ Attribute, Data, DataEnum, DeriveInput, Error, Field, Fields, Ident, Index, Lit, Member, Meta, NestedMeta, Path, Result, Type, TypeSlice, }; struct Details<'a> { struct_name: &'a Ident, field_name: TokenStream2, field_type: &'a Type, std: Path, } impl<'a> Details<'a> { pub fn from_input(struct_name: &'a Ident, field: &'a Field) -> Self { let field_name = field .ident .as_ref() .map_or_else(|| quote!(0), ToTokens::into_token_stream); Details { struct_name, field_name, field_type: &field.ty, std: std(), } } } #[proc_macro_derive(AsRef, attributes(wrap))] pub fn derive_asref(input: TokenStream) -> TokenStream { let derive_input = parse_macro_input!(input as DeriveInput); aserf_inner(derive_input) .unwrap_or_else(|e| e.to_compile_error()) .into() } #[proc_macro_derive(Index, attributes(wrap, index_output))] pub fn derive_index(input: TokenStream) -> TokenStream { let derive_input = parse_macro_input!(input as DeriveInput); index_inner(derive_input) .unwrap_or_else(|e| e.to_compile_error()) .into() } #[proc_macro_derive(LowerHex, attributes(wrap))] pub fn derive_lowerhex(input: TokenStream) -> TokenStream { let derive_input = parse_macro_input!(input as DeriveInput); lowerhex_inner(derive_input) .unwrap_or_else(|e| e.to_compile_error()) .into() } #[proc_macro_derive(LowerHexIter, attributes(wrap))] pub fn derive_lowerhex_iter(input: TokenStream) -> TokenStream { let derive_input = parse_macro_input!(input as DeriveInput); lowerhexiter_inner(derive_input) .unwrap_or_else(|e| e.to_compile_error()) .into() } #[proc_macro_derive(Display, attributes(wrap, display_from))] pub fn derive_display(input: TokenStream) -> TokenStream { let derive_input = parse_macro_input!(input as DeriveInput); display_inner(derive_input) .unwrap_or_else(|e| e.to_compile_error()) .into() } #[proc_macro_derive(From, attributes(wrap, derive_from))] pub fn derive_from(input: TokenStream) -> TokenStream { let derive_input = parse_macro_input!(input as DeriveInput); from_inner(derive_input) .unwrap_or_else(|e| e.to_compile_error()) .into() } #[proc_macro_derive(Error)] pub fn derive_error(input: TokenStream) -> TokenStream { let derive_input = parse_macro_input!(input as DeriveInput); error_inner(derive_input) .unwrap_or_else(|e| e.to_compile_error()) .into() } fn error_inner(input: DeriveInput) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let type_name = &input.ident; let std = std(); Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #std::error::Error for #type_name #ty_generics #where_clause { #[inline] fn description(&self) -> &str { "description() is deprecated; use Display" } } }) } fn from_inner(input: DeriveInput) -> Result<TokenStream2> { match input.data { Data::Struct(_) => from_inner_struct(&input), Data::Enum(ref data) => from_inner_enum(&input, &data), Data::Union(_) => Err(Error::new_spanned( &input, "Deriving From is not supported in unions", )), } } fn from_inner_enum(input: &DeriveInput, data: &DataEnum) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let std = std(); let mut res = TokenStream2::default(); let enum_name = &input.ident; for variant in &data.variants { let variant_name = &variant.ident; for attr in &variant.attrs { let mv = find_meta_value( attr, "derive_from", "#[derive_from(FirstType, SecondType)])", )?; if mv.found { if variant.fields.iter().len() > 1 { return Err(Error::new_spanned( &variant, "Deriving From for an enum variant with multiple fields isn't supported", )); } let mut froms = Vec::new(); let optional_field = variant.fields.iter().next(); match optional_field { Some(field) => match field.ident { Some(ref field_name) => froms.push(( field.ty.clone().into_token_stream(), quote! {{#field_name: inner}}, )), None => { froms.push((field.ty.clone().into_token_stream(), quote! {(inner)})) } }, None => { for name in mv.name { froms.push((name.into_token_stream(), quote! {})) } } } for from in froms { let field_type = from.0; let postfix = from.1; res = quote! { #res #[allow(unused_qualifications)] impl #impl_generics #std::convert::From<#field_type> for #enum_name #ty_generics #where_clause { #[inline] fn from(inner: #field_type) -> Self { #enum_name::#variant_name#postfix } } }; } break; } } } Ok(res) } fn from_inner_struct(input: &DeriveInput) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let field = get_field(&input, "From")?; let Details { struct_name, field_name, field_type, std, .. } = Details::from_input(&input.ident, field); Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #std::convert::From<#field_type> for #struct_name #ty_generics #where_clause { #[inline] fn from(wrap: #field_type) -> Self { #struct_name {#field_name: wrap} } } }) } fn display_inner(input: DeriveInput) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let struct_name = &input.ident; let std = std(); let display_from = get_meta_value( &input.attrs, "Display", "display_from", Some("#[display_from(Debug)]`"), )? .expect("provided example, should always return a value if succeeded."); Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #std::fmt::Display for #struct_name #ty_generics #where_clause { #[inline] fn fmt(&self, f: &mut #std::fmt::Formatter) -> #std::fmt::Result { #std::fmt::#display_from::fmt(&self, f) } } }) } fn lowerhexiter_inner(input: DeriveInput) -> Result<TokenStream2> { let field = get_field(&input, "LowerHexIter")?; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let Details { struct_name, field_name, std, .. } = Details::from_input(&input.ident, field); Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #std::fmt::LowerHex<> for #struct_name #ty_generics #where_clause { #[inline] fn fmt(&self, f: &mut #std::fmt::Formatter) -> #std::fmt::Result { for ch in self.#field_name.iter() { #std::fmt::LowerHex::fmt(&ch, f)?; } #std::result::Result::Ok(()) } } }) } fn lowerhex_inner(input: DeriveInput) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let field = get_field(&input, "LowerHex")?; let Details { struct_name, field_name, std, .. } = Details::from_input(&input.ident, field); Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #std::fmt::LowerHex for #struct_name #ty_generics #where_clause { #[inline] fn fmt(&self, f: &mut #std::fmt::Formatter) -> #std::fmt::Result { #std::fmt::LowerHex::fmt(&self.#field_name, f) } } }) } #[allow(non_snake_case)] fn generate_index_from_T( output: Option<TokenStream2>, T: TokenStream2, input: &DeriveInput, field: &Field, ) -> TokenStream2 { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let Details { struct_name, field_name, field_type, std, } = Details::from_input(&input.ident, field); let field_type = array_to_slice(field_type.clone()); let output = output.unwrap_or_else(|| quote!(<#field_type as #std::ops::Index<#T>>::Output)); quote! { #[allow(unused_qualifications)] impl #impl_generics #std::ops::Index<#T> for #struct_name #ty_generics #where_clause { type Output = #output; #[inline] fn index(&self, index: #T) -> &Self::Output { &self.#field_name[index] } } } } fn index_inner(input: DeriveInput) -> Result<TokenStream2> { let field = get_field(&input, "Index")?; let index_output = get_meta_value(&input.attrs, "Index", "index_output", None)? .map(ToTokens::into_token_stream); let std = std(); let slice_output = index_output.clone().map(|index| quote!([#index])); let index_usize = generate_index_from_T(index_output, quote!(usize), &input, field); let index_range_usize = generate_index_from_T( slice_output.clone(), quote!(#std::ops::Range<usize>), &input, field, ); let index_range_to_usize = generate_index_from_T( slice_output.clone(), quote!(#std::ops::RangeTo<usize>), &input, field, ); let index_range_from_usize = generate_index_from_T( slice_output.clone(), quote!(#std::ops::RangeFrom<usize>), &input, field, ); let index_range_full = generate_index_from_T(slice_output, quote!(#std::ops::RangeFull), &input, field); Ok(quote! { #index_usize #index_range_usize #index_range_to_usize #index_range_from_usize #index_range_full }) } fn get_meta_value( attrs: &[Attribute], trait_name: &str, attribute_name: &str, example_if_required: Option<&str>, ) -> Result<Option<Member>> { let mut traits_found = Vec::with_capacity(attrs.len()); for attr in attrs { let mv = find_meta_value( attr, attribute_name, example_if_required.unwrap_or_default(), ) .unwrap_or_default(); if mv.multiple() { return Err(Error::new_spanned( attr, format!( "derive_wrapper: {} doesn't nested attributes", attribute_name ), )); } if mv.found { if let Some(trait_name) = mv.name.get(0) { traits_found.push(trait_name.clone()); } else { return Err(Error::new_spanned(attr, format!("derive_wrapper: when using the {} attribute on the struct you must specify the trait you want to use to implement {}", attribute_name, trait_name))); } } } match traits_found.len() { 1 => Ok(traits_found.pop()), 0 => { if let Some(example) = example_if_required { Err(Error::new(Span::call_site(), format!("Deriving {} requires specifying which trait to use using the `{}` attribute. Try: `{}`", trait_name, attribute_name, example))) } else { Ok(None) } } _ => Err(Error::new( Span::call_site(), format!( "Deriving {} supports only a single {} attribute", trait_name, attribute_name ), )), } } fn array_to_slice(ty: Type) -> Type { if let Type::Array(arr) = ty { Type::Slice(TypeSlice { bracket_token: arr.bracket_token, elem: arr.elem, }) } else { ty } } fn aserf_inner(input: DeriveInput) -> Result<TokenStream2> { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let field = get_field(&input, "AsRef")?; let Details { struct_name, field_name, field_type, std, } = Details::from_input(&input.ident, field); Ok(quote! { #[allow(unused_qualifications)] impl #impl_generics #std::convert::AsRef<#field_type> for #struct_name #ty_generics #where_clause { #[inline] fn as_ref(&self) -> &#field_type { &self.#field_name } } }) } fn get_field<'a>(input: &'a DeriveInput, trait_name: &str) -> Result<&'a Field> { let fields = match input.data { Data::Struct(ref data) => &data.fields, _ => { return Err(Error::new_spanned( &input, format!("Deriving {} is supported only in structs", trait_name), )) } }; if fields.iter().len() > 1 { let mut marked_fields = parse_outer_attributes(&input.attrs, &fields)?; marked_fields.extend(parse_field_attributes(&fields)?); match marked_fields.len() { 1 => Ok(marked_fields.pop().unwrap()), 0 => Err(Error::new_spanned(&input, format!("Deriving {} for a struct with multiple fields requires specifying a wrap attribute", trait_name))), _ => Err(Error::new_spanned(&input, format!("Deriving {} supports only a single wrap attribute", trait_name))), } } else { fields.iter().next().ok_or_else(|| { Error::new_spanned( &input, format!( "Deriving {} for an empty struct isn't supported", trait_name ), ) }) } } #[derive(Default)] struct MetaValue { pub found: bool, pub name: Vec<Member>, } impl MetaValue { pub fn push_name_ident(&mut self, ident: Ident) { self.name.push(Member::Named(ident)); } pub fn multiple(&self) -> bool { self.name.len() > 1 } pub fn push_name_index(&mut self, index: u32, span: Span) { self.name.push(Member::Unnamed(Index { index, span })); } pub fn push_name_from_lit(&mut self, lit: Lit) -> Result<()> { match lit { Lit::Str(l) => { if let Ok(index) = l.value().parse::<u32>() { self.push_name_index(index, l.span()); } else { self.push_name_ident(l.parse::<Ident>()?); } } Lit::Int(int) => self.push_name_index(int.value() as u32, int.span()), _ => (), } Ok(()) } pub fn get_first_name(&self) -> Option<String> { self.name.get(0).map(|name| match *name { Member::Unnamed(ref index) => index.index.to_string(), Member::Named(ref ident) => ident.to_string(), }) } pub fn get_first_index(&self) -> Option<u32> { self.name.get(0).and_then(|n| match *n { Member::Unnamed(ref i) => Some(i.index), Member::Named(_) => None, }) } } fn find_meta_value(attr: &Attribute, name: &str, example: &str) -> Result<MetaValue> { let mut res = MetaValue::default(); match attr.parse_meta() { Ok(meta) => { if meta.name() == name { res.found = true; match meta { Meta::NameValue(nv) => res.push_name_from_lit(nv.lit)?, Meta::List(list) => { for nestedmeta in list.nested.into_iter() { match nestedmeta { NestedMeta::Literal(lit) => res.push_name_from_lit(lit)?, NestedMeta::Meta(meta) => { if let Meta::Word(ident) = meta { res.push_name_ident(ident) } } } } } Meta::Word(_) => (), } } } Err(e) => return Err(Error::new(e.span(), format!("{}. Try: `{}`", e, example))), } Ok(res) } fn parse_outer_attributes<'a>(attrs: &[Attribute], fields: &'a Fields) -> Result<Vec<&'a Field>> { let mut res = Vec::with_capacity(attrs.len()); for attr in attrs { let mv = find_meta_value(attr, "wrap", "#[wrap]")?; if mv.found { if let Some(index) = mv.get_first_index() { if let Some(field) = fields.iter().nth(index as usize) { res.push(field); } else { return Err(Error::new_spanned(&fields, format!("derive_wrapper: there's no field no. {} in the struct or it's not a tuple", index))); } } else if let Some(lit_name) = mv.get_first_name() { let mut found = false; for f in fields { if let Some(ref field_name) = f.ident { if field_name == &lit_name { res.push(f); found = true; break; } } } if !found { return Err(Error::new_spanned( &fields, format!("derive_wrapper: field {} doesn't exist", lit_name), )); } } else { return Err(Error::new_spanned(&fields, "derive_wrapper: when using the wrap attribute on the struct you must specify the field name")); } } } Ok(res) } fn parse_field_attributes(fields: &Fields) -> Result<Vec<&Field>> { let mut res = Vec::with_capacity(fields.iter().len()); for field in fields.iter() { for attr in &field.attrs { let mv = find_meta_value(attr, "wrap", "#[wrap = \"first_field\"]")?; if mv.found { if let Some(ref ident) = field.ident { if let Some(lit) = mv.get_first_name() { if ident != &lit { return Err(Error::new_spanned(&field, format!("derive_wrapper: The provided field name doesn't match the field name it's above: `{} != {}`", lit, ident))); } } res.push(field) } else { return Err(Error::new_spanned(&field, "derive_wrapper doesn't yet support attributes on unnamed fields (Please file an issue)")); } } } } Ok(res) } #[inline(always)] fn std() -> Path { #[cfg(feature = "std")] return parse_quote!(::std); #[cfg(not(feature = "std"))] return parse_quote!(::core); }
use super::Provider; use decipher::decipher; use nom::types::CompleteStr; use reqwest; use serde_json; use std::collections::HashMap; use std::error::Error; use std::io::prelude::*; use std::process::{Command, Stdio}; use url::Url; named!( ytconfig<CompleteStr, CompleteStr>, do_parse!( take_until_and_consume!("ytplayer.config") >> ws!(tag!("=")) >> obj: take_until!(";ytplayer.load") >> (obj) ) ); #[derive(Debug, Deserialize)] struct Assets { js: String, } #[derive(Debug, Deserialize)] struct Args { adaptive_fmts: String, } #[derive(Debug, Deserialize)] struct PlayerConfig { assets: Assets, args: Args, } #[derive(Debug)] pub enum VideoUrl { Direct(String), Dash(String, String), } pub fn parse(p: &Provider) -> Result<VideoUrl, Box<Error>> { // let body = reqwest::get(&format!( // "http://youtube.com/get_video_info?video_id={}", // vid // ))?.text()?; // let mapping = parse_url(body.as_str())?; // if let Some(v) = mapping.get("url_encoded_fmt_stream_map") { // for d in v.split(",") { // let m2 = parse_url(d)?; // if let Some(url) = m2.get("url") { // return Ok(VideoUrl::Direct(url.to_string())); // } else { // let pair = parse_dash(vid); // return Ok(VideoUrl::Dash(pair.0, pair.1)); // } // } // } else { // unreachable!("dash") // } match p { Provider::Youtube(id) => { let pair = parse_dash(id); return Ok(VideoUrl::Dash(pair.0, pair.1)); } Provider::Douyin(id) => { return Ok(parse_douyin(id)); } } } fn parse_douyin(url: &str) -> VideoUrl { let body = reqwest::get(url).unwrap().text().unwrap(); println!("{}", body); VideoUrl::Direct("https://aweme.snssdk.com/aweme/v1/playwm/?video_id=44e3f61f5b2542e1b600be67bd4ee584&line=0".to_string()) } fn parse_url(qs: &str) -> Result<HashMap<String, String>, Box<Error>> { let url = Url::parse(format!("https://example.com?{}", qs).as_str())?; let mapping: HashMap<_, _> = url.query_pairs().into_owned().collect(); return Ok(mapping); } #[test] fn test_parse_douyin() { parse_douyin("https://www.tiktokv.com/i18n/share/video/6560042923969219841"); }
use bigneon_db::models::TicketPricing; use chrono::NaiveDateTime; use uuid::Uuid; #[derive(Debug, Deserialize, PartialEq, Serialize)] pub struct DisplayTicketPricing { pub id: Uuid, pub name: String, pub status: String, pub start_date: NaiveDateTime, pub end_date: NaiveDateTime, pub price_in_cents: i64, } impl From<TicketPricing> for DisplayTicketPricing { fn from(ticket_pricing: TicketPricing) -> Self { DisplayTicketPricing { id: ticket_pricing.id, name: ticket_pricing.name.clone(), status: ticket_pricing.status().to_string(), start_date: ticket_pricing.start_date, end_date: ticket_pricing.end_date, price_in_cents: ticket_pricing.price_in_cents, } } }
use crate::utils::wait_until; use crate::{Net, Spec, TestProtocol}; use ckb_sync::NetworkProtocol; use ckb_types::{ bytes::Bytes, packed::{GetHeaders, SyncMessage}, prelude::*, }; use log::info; pub struct MalformedMessage; impl Spec for MalformedMessage { crate::name!("malformed_message"); crate::setup!(protocols: vec![TestProtocol::sync()]); fn run(&self, net: &mut Net) { info!("Run malformed message"); info!("Connect node0"); let node0 = &net.nodes[0]; net.exit_ibd_mode(); net.connect(node0); info!("Test node should receive GetHeaders message from node0"); let peer_id = net.should_receive( |data: &Bytes| { SyncMessage::from_slice(&data) .map(|message| message.to_enum().item_name() == GetHeaders::NAME) .unwrap_or(false) }, "Test node should receive GetHeaders message from node0", ); info!("Send malformed message to node0 twice"); net.send( NetworkProtocol::SYNC.into(), peer_id, vec![0, 0, 0, 0].into(), ); net.send( NetworkProtocol::SYNC.into(), peer_id, vec![0, 1, 2, 3].into(), ); let rpc_client = net.nodes[0].rpc_client(); let ret = wait_until(10, || rpc_client.get_peers().is_empty()); assert!(ret, "Node0 should disconnect test node"); let ret = wait_until(10, || { rpc_client .get_banned_addresses() .iter() .any(|ban| ban.address == "127.0.0.1/32") }); assert!(ret, "Node0 should ban test node"); } } pub struct MalformedMessageWithWhitelist; impl Spec for MalformedMessageWithWhitelist { crate::name!("malformed_message_with_whitelist"); crate::setup!(num_nodes: 2, protocols: vec![TestProtocol::sync()]); fn run(&self, net: &mut Net) { info!("Run malformed message with whitelist"); let node1 = net.nodes.pop().unwrap(); net.exit_ibd_mode(); let mut node0 = net.nodes.pop().unwrap(); net.connect(&node0); info!("Test node should receive GetHeaders message from node0"); let peer_id = net.should_receive( |data: &Bytes| { SyncMessage::from_slice(&data) .map(|message| message.to_enum().item_name() == GetHeaders::NAME) .unwrap_or(false) }, "Test node should receive GetHeaders message from node0", ); let net_listen = format!( "/ip4/127.0.0.1/tcp/{}/p2p/{}", net.p2p_port(), net.node_id() ); node0.stop(); node0.edit_config_file( Box::new(|_| ()), Box::new(move |config| { config.network.whitelist_peers = vec![net_listen.parse().unwrap()] }), ); node0.start(); net.connect(&node0); let rpc_client = node0.rpc_client(); let ret = wait_until(10, || rpc_client.get_peers().len() == 1); assert!(ret, "Node0 should connect test node"); info!("Send malformed message to node0 twice"); net.send( NetworkProtocol::SYNC.into(), peer_id, vec![0, 0, 0, 0].into(), ); net.send( NetworkProtocol::SYNC.into(), peer_id, vec![0, 1, 2, 3].into(), ); node1.connect(&node0); let rpc_client = node0.rpc_client(); let ret = wait_until(10, || rpc_client.get_peers().len() == 2); assert!(ret, "Node0 should keep connection with test node"); let ret = wait_until(10, || rpc_client.get_banned_addresses().is_empty()); assert!(ret, "Node0 should not ban test node"); } }
use anyhow::Result; use cargo_scaffold::cli_init; fn main() -> Result<()> { cli_init() }
//! Fields based on user input evaluation /// Fields based on user input evaluation #[derive(Default)] pub struct InputCommands { pub move_camera_right: bool, pub move_camera_left: bool, pub move_camera_bottom: bool, pub move_camera_up: bool, pub entity_move_up: bool, pub entity_move_down: bool, pub entity_move_left: bool, pub entity_move_right: bool, }
/* arbres binaires de recherche */ struct BST { value: i32, sub: Vec<BST> // de taille 0 ou 2 } fn null() -> BST { let r = BST { value: 42, sub: vec![] }; r } fn is_null(a: & BST) -> bool { a.sub.len() == 0 } // Petit Rust ne permet pas de définir ces raccourcis ; tant pis... // fn left(a: & BST) -> & BST { & a.sub[0] } // fn right(a: & BST) -> & BST { & a.sub[1] } // fn left_mut(a: &mut BST) -> &mut BST { &mut a.sub[0] } // fn right_mut(a: &mut BST) -> &mut BST { &mut a.sub[1] } fn leaf(v: i32) -> BST { let r = BST { value: v, sub: vec![null(), null()] }; r } fn insert(a: &mut BST, x: i32) { if x == a.value { return; } if x < a.value { if is_null(& a.sub[0]) { a.sub[0] = leaf(x); } else { insert(&mut a.sub[0], x); } } else { if is_null(& a.sub[1]) { a.sub[1] = leaf(x); } else { insert(&mut a.sub[1], x); } } } fn contient(a: & BST, x: i32) -> bool { if x == a.value { return true; } if x < a.value && !is_null(& a.sub[0]) { return contient(& a.sub[0], x); } if !is_null(& a.sub[1]) { return contient(& a.sub[1], x); } return false; } fn print_bool(b: bool) { if b { print!("true\n") } else { print!("false\n") } } fn print_int(x: i32) { if x < 0 { print!("-") } else if x > 0 { print!("+") } else { print!("0") } } fn print(a: & BST) { print!("("); if !is_null(& a.sub[0]) { print(& a.sub[0]) } print_int(a.value); if !is_null(& a.sub[1]) { print(& a.sub[1]) } print!(")"); } fn main() { let mut d = leaf(1); insert(&mut d, 17); insert(&mut d, -5); insert(&mut d, 8); print_bool(contient(& d, -5)); print_bool(contient(& d, 0)); print_bool(contient(& d, 17)); print_bool(contient(& d, 3)); insert(&mut d, 42); insert(&mut d, 8); insert(&mut d, -1000); insert(&mut d, 0); print(& d); print!("\n") }
#![deny(warnings, clippy::all)] use std::{fmt::Debug, io::Cursor}; use cdr::{ BigEndian, Bounded, CdrBe, CdrLe, Error, Infinite, LittleEndian, PlCdrBe, PlCdrLe, Result, }; use serde_derive::{Deserialize, Serialize}; const ENCAPSULATION_HEADER_SIZE: u64 = 4; fn check<'de, T>(element: T, maybe_size: Option<u64>) where T: serde::Serialize + serde::Deserialize<'de> + PartialEq + Debug, { check_serialized_size(&element, maybe_size); check_round_trip(&element, maybe_size); check_capacity_shortage(&element, maybe_size); check_size_limit(&element, maybe_size); } fn check_serialized_size<'de, T>(element: &T, maybe_size: Option<u64>) where T: serde::Serialize + serde::Deserialize<'de> + PartialEq + Debug, { if let Some(serialized_size) = maybe_size { { let size = cdr::size::calc_serialized_data_size(&element); assert_eq!(serialized_size, size); } { let size = cdr::calc_serialized_size(&element); assert_eq!(serialized_size + ENCAPSULATION_HEADER_SIZE, size); } } } fn check_round_trip<'de, T>(element: &T, maybe_size: Option<u64>) where T: serde::Serialize + serde::Deserialize<'de> + PartialEq + Debug, { let size = match maybe_size { Some(v) => v, None => cdr::calc_serialized_size(&element), }; { let encoded = cdr::ser::serialize_data::<_, _, BigEndian>(element, Infinite).unwrap(); let decoded = cdr::de::deserialize_data::<T, BigEndian>(&encoded).unwrap(); assert_eq!(*element, decoded); assert_eq!(size, encoded.len() as u64); } { let encoded = cdr::ser::serialize_data::<_, _, LittleEndian>(element, Infinite).unwrap(); let decoded = cdr::de::deserialize_data::<T, LittleEndian>(&encoded).unwrap(); assert_eq!(*element, decoded); assert_eq!(size, encoded.len() as u64); } { let encoded = cdr::serialize::<_, _, CdrBe>(element, Infinite).unwrap(); let decoded = cdr::deserialize(&encoded).unwrap(); assert_eq!(*element, decoded); assert_eq!(size + ENCAPSULATION_HEADER_SIZE, encoded.len() as u64); } { let encoded = cdr::serialize::<_, _, CdrLe>(element, Infinite).unwrap(); let decoded = cdr::deserialize(&encoded).unwrap(); assert_eq!(*element, decoded); assert_eq!(size + ENCAPSULATION_HEADER_SIZE, encoded.len() as u64); } { let encoded = cdr::serialize::<_, _, PlCdrBe>(element, Infinite).unwrap(); let decoded = cdr::deserialize(&encoded).unwrap(); assert_eq!(*element, decoded); assert_eq!(size + ENCAPSULATION_HEADER_SIZE, encoded.len() as u64); } { let encoded = cdr::serialize::<_, _, PlCdrLe>(element, Infinite).unwrap(); let decoded = cdr::deserialize(&encoded).unwrap(); assert_eq!(*element, decoded); assert_eq!(size + ENCAPSULATION_HEADER_SIZE, encoded.len() as u64); } } fn check_capacity_shortage<'de, T>(element: &T, maybe_size: Option<u64>) where T: serde::Serialize + serde::Deserialize<'de> + PartialEq + Debug, { let mut buf = [0u8; 2000]; if let Some(bound) = calc_invalid_size(element, maybe_size) { { let mut buf = Cursor::new(&mut buf[0..bound as usize]); assert!(cdr::ser::serialize_data_into::<_, _, _, BigEndian>( &mut buf, &element, Infinite ) .is_err()); } { let mut buf = Cursor::new(&mut buf[0..bound as usize]); assert!(cdr::ser::serialize_data_into::<_, _, _, LittleEndian>( &mut buf, &element, Infinite ) .is_err()); } { let mut buf = Cursor::new(&mut buf[0..bound as usize]); assert!(cdr::serialize_into::<_, _, _, CdrBe>(&mut buf, &element, Infinite).is_err()); } { let mut buf = Cursor::new(&mut buf[0..bound as usize]); assert!(cdr::serialize_into::<_, _, _, CdrLe>(&mut buf, &element, Infinite).is_err()); } { let mut buf = Cursor::new(&mut buf[0..bound as usize]); assert!(cdr::serialize_into::<_, _, _, PlCdrBe>(&mut buf, &element, Infinite).is_err()); } { let mut buf = Cursor::new(&mut buf[0..bound as usize]); assert!(cdr::serialize_into::<_, _, _, PlCdrLe>(&mut buf, &element, Infinite).is_err()); } } else { { let mut buf = Cursor::new(&mut buf[0..0]); assert!(cdr::ser::serialize_data_into::<_, _, _, BigEndian>( &mut buf, &element, Infinite ) .is_ok()); } { let mut buf = Cursor::new(&mut buf[0..0]); assert!(cdr::ser::serialize_data_into::<_, _, _, LittleEndian>( &mut buf, &element, Infinite ) .is_ok()); } { let mut buf = Cursor::new(&mut buf[0..ENCAPSULATION_HEADER_SIZE as usize]); assert!(cdr::serialize_into::<_, _, _, CdrBe>(&mut buf, &element, Infinite).is_ok()); } { let mut buf = Cursor::new(&mut buf[0..ENCAPSULATION_HEADER_SIZE as usize]); assert!(cdr::serialize_into::<_, _, _, CdrLe>(&mut buf, &element, Infinite).is_ok()); } { let mut buf = Cursor::new(&mut buf[0..ENCAPSULATION_HEADER_SIZE as usize]); assert!(cdr::serialize_into::<_, _, _, PlCdrBe>(&mut buf, &element, Infinite).is_ok()); } { let mut buf = Cursor::new(&mut buf[0..ENCAPSULATION_HEADER_SIZE as usize]); assert!(cdr::serialize_into::<_, _, _, PlCdrLe>(&mut buf, &element, Infinite).is_ok()); } } } fn check_size_limit<'de, T>(element: &T, maybe_size: Option<u64>) where T: serde::Serialize + serde::Deserialize<'de> + PartialEq + Debug, { if let Some(bound) = calc_invalid_size(element, maybe_size) { assert!(cdr::ser::serialize_data::<_, _, BigEndian>(&element, Bounded(bound)).is_err()); assert!(cdr::ser::serialize_data::<_, _, LittleEndian>(&element, Bounded(bound)).is_err()); assert!(cdr::serialize::<_, _, CdrBe>(&element, Bounded(bound)).is_err()); assert!(cdr::serialize::<_, _, CdrLe>(&element, Bounded(bound)).is_err()); assert!(cdr::serialize::<_, _, PlCdrBe>(&element, Bounded(bound)).is_err()); assert!(cdr::serialize::<_, _, PlCdrLe>(&element, Bounded(bound)).is_err()); { let encoded = cdr::ser::serialize_data::<_, _, BigEndian>(&element, Infinite).unwrap(); let mut encoded = encoded.as_slice(); assert!(cdr::de::deserialize_data_from::<_, T, _, BigEndian>( &mut encoded, Bounded(bound) ) .is_err()); } { let encoded = cdr::ser::serialize_data::<_, _, LittleEndian>(&element, Infinite).unwrap(); let mut encoded = encoded.as_slice(); assert!(cdr::de::deserialize_data_from::<_, T, _, LittleEndian>( &mut encoded, Bounded(bound) ) .is_err()); } { let encoded = cdr::serialize::<_, _, CdrBe>(&element, Infinite).unwrap(); let mut encoded = encoded.as_slice(); assert!(cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(bound)).is_err()); } { let encoded = cdr::serialize::<_, _, CdrLe>(&element, Infinite).unwrap(); let mut encoded = encoded.as_slice(); assert!(cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(bound)).is_err()); } { let encoded = cdr::serialize::<_, _, PlCdrBe>(&element, Infinite).unwrap(); let mut encoded = encoded.as_slice(); assert!(cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(bound)).is_err()); } { let encoded = cdr::serialize::<_, _, PlCdrLe>(&element, Infinite).unwrap(); let mut encoded = encoded.as_slice(); assert!(cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(bound)).is_err()); } } else { { let encoded = cdr::ser::serialize_data::<_, _, BigEndian>(&element, Bounded(0)).unwrap(); let mut encoded = encoded.as_slice(); let decoded = cdr::de::deserialize_data_from::<_, T, _, BigEndian>(&mut encoded, Bounded(0)) .unwrap(); assert_eq!(*element, decoded); } { let encoded = cdr::ser::serialize_data::<_, _, LittleEndian>(&element, Bounded(0)).unwrap(); let mut encoded = encoded.as_slice(); let decoded = cdr::de::deserialize_data_from::<_, T, _, LittleEndian>(&mut encoded, Bounded(0)) .unwrap(); assert_eq!(*element, decoded); } { let encoded = cdr::serialize::<_, _, CdrBe>(&element, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); let mut encoded = encoded.as_slice(); let decoded = cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); assert_eq!(*element, decoded); } { let encoded = cdr::serialize::<_, _, CdrLe>(&element, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); let mut encoded = encoded.as_slice(); let decoded = cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); assert_eq!(*element, decoded); } { let encoded = cdr::serialize::<_, _, PlCdrBe>(&element, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); let mut encoded = encoded.as_slice(); let decoded = cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); assert_eq!(*element, decoded); } { let encoded = cdr::serialize::<_, _, PlCdrLe>(&element, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); let mut encoded = encoded.as_slice(); let decoded = cdr::deserialize_from::<_, T, _>(&mut encoded, Bounded(ENCAPSULATION_HEADER_SIZE)) .unwrap(); assert_eq!(*element, decoded); } } } fn calc_invalid_size<'de, T>(element: &T, maybe_size: Option<u64>) -> Option<u64> where T: serde::Serialize + serde::Deserialize<'de> + PartialEq + Debug, { match maybe_size { Some(v) if v > 0 => Some(v - 1), Some(_) => None, None => { let size = cdr::size::calc_serialized_data_size(&element); if size > 0 { Some(size - 1) } else { None } } } } #[test] fn test_octet() { check(std::u8::MIN, Some(1)); check(std::u8::MAX, Some(1)); } #[test] fn test_char() { check('a', Some(1)); check('Z', Some(1)); } #[test] fn test_unsigned_short() { check(std::u16::MIN, Some(2)); check(std::u16::MAX, Some(2)); } #[test] fn test_short() { check(std::i16::MIN, Some(2)); check(std::i16::MAX, Some(2)); } #[test] fn test_unsigned_long() { check(std::u32::MIN, Some(4)); check(std::u32::MAX, Some(4)); } #[test] fn test_long() { check(std::i32::MIN, Some(4)); check(std::i32::MAX, Some(4)); } #[test] fn test_unsigned_long_long() { check(std::u64::MIN, Some(8)); check(std::u64::MAX, Some(8)); } #[test] fn test_long_long() { check(std::i64::MIN, Some(8)); check(std::i64::MAX, Some(8)); } #[test] fn test_float() { check(std::f32::MIN, Some(4)); check(std::f32::MAX, Some(4)); } #[test] fn test_double() { check(std::f64::MIN, Some(8)); check(std::f64::MAX, Some(8)); } #[test] fn test_bool() { check(false, Some(1)); check(true, Some(1)); } #[test] fn test_string() { check("".to_string(), Some(5)); check("a".to_string(), Some(6)); } #[test] fn test_unsigned_short_alignment() { check(('a', 1u16), Some(1 + 1 + 2)); check((1u8, 1u16), Some(1 + 1 + 2)); check((1i8, 1u16), Some(1 + 1 + 2)); check((1u16, 1u16), Some(2 + 2)); check((1i16, 1u16), Some(2 + 2)); check((1u32, 1u16), Some(4 + 2)); check((1i32, 1u16), Some(4 + 2)); check((1f32, 1u16), Some(4 + 2)); check((1f64, 1u16), Some(8 + 2)); check((true, 1u16), Some(1 + 1 + 2)); check(("a".to_string(), 1u16), Some(6 + 2)); } #[test] fn test_short_alignment() { check(('a', 1i16), Some(1 + 1 + 2)); check((1u8, 1i16), Some(1 + 1 + 2)); check((1i8, 1i16), Some(1 + 1 + 2)); check((1u16, 1i16), Some(2 + 2)); check((1i16, 1i16), Some(2 + 2)); check((1u32, 1i16), Some(4 + 2)); check((1i32, 1i16), Some(4 + 2)); check((1f32, 1i16), Some(4 + 2)); check((1f64, 1i16), Some(8 + 2)); check((true, 1i16), Some(1 + 1 + 2)); check(("a".to_string(), 1i16), Some(6 + 2)); } #[test] fn test_unsigned_long_alignment() { check(('a', 1u32), Some(1 + 3 + 4)); check((1u8, 1u32), Some(1 + 3 + 4)); check((1i8, 1u32), Some(1 + 3 + 4)); check((1u16, 1u32), Some(2 + 2 + 4)); check((1i16, 1u32), Some(2 + 2 + 4)); check((1u32, 1u32), Some(4 + 4)); check((1i32, 1u32), Some(4 + 4)); check((1f32, 1u32), Some(4 + 4)); check((1f64, 1u32), Some(8 + 4)); check((true, 1u32), Some(1 + 3 + 4)); check(("a".to_string(), 1u32), Some(6 + 2 + 4)); } #[test] fn test_long_alignment() { check(('a', 1i32), Some(1 + 3 + 4)); check((1u8, 1i32), Some(1 + 3 + 4)); check((1i8, 1i32), Some(1 + 3 + 4)); check((1u16, 1i32), Some(2 + 2 + 4)); check((1i16, 1i32), Some(2 + 2 + 4)); check((1u32, 1i32), Some(4 + 4)); check((1i32, 1i32), Some(4 + 4)); check((1f32, 1i32), Some(4 + 4)); check((1f64, 1i32), Some(8 + 4)); check((true, 1i32), Some(1 + 3 + 4)); check(("a".to_string(), 1i32), Some(6 + 2 + 4)); } #[test] fn test_unsigned_long_long_alignment() { check(('a', 1u64), Some(1 + 7 + 8)); check((1u8, 1u64), Some(1 + 7 + 8)); check((1i8, 1u64), Some(1 + 7 + 8)); check((1u16, 1u64), Some(2 + 6 + 8)); check((1i16, 1u64), Some(2 + 6 + 8)); check((1u32, 1u64), Some(4 + 4 + 8)); check((1i32, 1u64), Some(4 + 4 + 8)); check((1f32, 1u64), Some(4 + 4 + 8)); check((1f64, 1u64), Some(8 + 8)); check((true, 1u64), Some(1 + 7 + 8)); check(("a".to_string(), 1u64), Some(6 + 2 + 8)); } #[test] fn test_long_long_alignment() { check(('a', 1i64), Some(1 + 7 + 8)); check((1u8, 1i64), Some(1 + 7 + 8)); check((1i8, 1i64), Some(1 + 7 + 8)); check((1u16, 1i64), Some(2 + 6 + 8)); check((1i16, 1i64), Some(2 + 6 + 8)); check((1u32, 1i64), Some(4 + 4 + 8)); check((1i32, 1i64), Some(4 + 4 + 8)); check((1f32, 1i64), Some(4 + 4 + 8)); check((1f64, 1i64), Some(8 + 8)); check((true, 1i64), Some(1 + 7 + 8)); check(("a".to_string(), 1i64), Some(6 + 2 + 8)); } #[test] fn test_float_alignment() { check(('a', 1f32), Some(1 + 3 + 4)); check((1u8, 1f32), Some(1 + 3 + 4)); check((1i8, 1f32), Some(1 + 3 + 4)); check((1u16, 1f32), Some(2 + 2 + 4)); check((1i16, 1f32), Some(2 + 2 + 4)); check((1u32, 1f32), Some(4 + 4)); check((1f32, 1f32), Some(4 + 4)); check((1f32, 1f32), Some(4 + 4)); check((1f64, 1f32), Some(8 + 4)); check((true, 1f32), Some(1 + 3 + 4)); check(("a".to_string(), 1f32), Some(6 + 2 + 4)); } #[test] fn test_double_alignment() { check(('a', 1f64), Some(1 + 7 + 8)); check((1u8, 1f64), Some(1 + 7 + 8)); check((1i8, 1f64), Some(1 + 7 + 8)); check((1u16, 1f64), Some(2 + 6 + 8)); check((1i16, 1f64), Some(2 + 6 + 8)); check((1u32, 1f64), Some(4 + 4 + 8)); check((1i32, 1f64), Some(4 + 4 + 8)); check((1f32, 1f64), Some(4 + 4 + 8)); check((1f64, 1f64), Some(8 + 8)); check((true, 1f64), Some(1 + 7 + 8)); check(("a".to_string(), 1f64), Some(6 + 2 + 8)); } #[test] fn test_seq_octet() { check(Vec::<u8>::new(), Some(4)); check(vec![0u8, 1, 2], Some(4 + 3)); } #[test] fn test_seq_char() { check(Vec::<char>::new(), Some(4)); check(vec!['a', 'b', 'c'], Some(4 + 3)); } #[test] fn test_seq_unsigned_short() { check(Vec::<u16>::new(), Some(4)); check(vec![0u16, 1, 2], Some(4 + 2 * 3)); } #[test] fn test_seq_short() { check(Vec::<i16>::new(), Some(4)); check(vec![0i16, 1, 2], Some(4 + 2 * 3)); } #[test] fn test_seq_unsigned_long() { check(Vec::<u32>::new(), Some(4)); check(vec![0u32, 1, 2], Some(4 + 4 * 3)); } #[test] fn test_seq_long() { check(Vec::<i32>::new(), Some(4)); check(vec![0i32, 1, 2], Some(4 + 4 * 3)); } #[test] fn test_seq_unsigned_long_long() { check(Vec::<u64>::new(), Some(4)); check(vec![0u64, 1, 2], Some(4 + 4 + 8 * 3)); } #[test] fn test_seq_long_long() { check(Vec::<i64>::new(), Some(4)); check(vec![0i64, 1, 2], Some(4 + 4 + 8 * 3)); } #[test] fn test_seq_float() { check(Vec::<f32>::new(), Some(4)); check(vec![0f32, 1., 2.], Some(4 + 4 * 3)); } #[test] fn test_seq_double() { check(Vec::<f64>::new(), Some(4)); check(vec![0f64, 1., 2.], Some(4 + 4 + 8 * 3)); } #[test] fn test_seq_bool() { check(Vec::<bool>::new(), Some(4)); check(vec![false, true, false], Some(4 + 3)); } #[test] fn test_seq_string() { check(Vec::<String>::new(), Some(4)); check( vec!["".to_string(), "a".to_string(), "b".to_string()], Some(4 + 4 + 1 + 3 + 4 + 2 + 2 + 4 + 2), ); } #[test] fn test_seq_in_seq() { check(vec![Vec::<usize>::new()], Some(8)); check(vec![vec![1i64, 3, 5], vec![-1, -3, -5]], Some(64)); } #[test] fn test_array_octet() { check([] as [u8; 0], Some(0)); check([0u8, 1, 2], Some(3)); } #[test] fn test_array_char() { check([] as [char; 0], Some(0)); check(['a', 'b', 'c'], Some(3)); } #[test] fn test_array_unsigned_short() { check([] as [u16; 0], Some(0)); check([0u16, 1, 2], Some(6)); } #[test] fn test_array_short() { check([] as [i16; 0], Some(0)); check([0i16, 1, 2], Some(6)); } #[test] fn test_array_unsigned_long() { check([] as [u32; 0], Some(0)); check([0u32, 1, 2], Some(12)); } #[test] fn test_array_long() { check([] as [i32; 0], Some(0)); check([0i32, 1, 2], Some(12)); } #[test] fn test_array_unsigned_long_long() { check([] as [u64; 0], Some(0)); check([0u64, 1, 2], Some(24)); } #[test] fn test_array_long_long() { check([] as [i64; 0], Some(0)); check([0i64, 1, 2], Some(24)); } #[test] fn test_array_float() { check([] as [f32; 0], Some(0)); check([0f32, 1., 2.], Some(12)); } #[test] fn test_array_double() { check([] as [f64; 0], Some(0)); check([0f64, 1., 2.], Some(24)); } #[test] fn test_array_bool() { check([] as [bool; 0], Some(0)); check([false, true, false], Some(3)); } #[test] fn test_array_string() { check([] as [String; 0], Some(0)); check( ["".to_string(), "a".to_string(), "b".to_string()], Some(5 + 3 + 6 + 2 + 6), ); } #[test] fn test_array_in_array() { check([[]] as [[usize; 0]; 1], Some(0)); check( [[std::f64::consts::PI, 2.71, 1.41], [1.73, 2.23, 2.44]], Some(48), ); } #[test] fn test_tuple() { check((1u32,), Some(4)); check((1u32, 2i32), Some(4 + 4)); check( (1u16, 2i16, std::f32::consts::PI, "hi".to_string()), Some(2 + 2 + 4 + 7), ); } #[test] fn test_tuple_containing_padding() { check((true, 1u64, 'z', 2.71f32), Some(24)); } #[test] fn test_struct() { #[derive(Serialize, Deserialize, PartialEq, Debug)] struct S { c: char, n: i32, b: bool, m: u64, s: String, } check( S { c: 'x', n: -7, b: true, m: 17, s: "hello".to_string(), }, Some(34), ); } #[test] fn test_struct_in_struct() { #[derive(Serialize, Deserialize, PartialEq, Debug)] struct Outer { i: Inner1, ii: Inner2, iii: Inner3, } #[derive(Serialize, Deserialize, PartialEq, Debug)] struct Inner1 { a: i32, b: u64, } #[derive(Serialize, Deserialize, PartialEq, Debug)] struct Inner2 { a: bool, b: f64, } #[derive(Serialize, Deserialize, PartialEq, Debug)] struct Inner3 { a: char, b: f32, } check( Outer { i: Inner1 { a: -3, b: 5 }, ii: Inner2 { a: false, b: 1.414 }, iii: Inner3 { a: 'a', b: 1.732 }, }, Some(40), ); } #[test] fn test_enum() { #[derive(Serialize, Deserialize, PartialEq, Debug)] enum E { One = 0, Two, Three, } check(vec![E::One, E::Two, E::Three], Some(4 + 4 * 3)); check( vec![E::One as u32, E::Two as u32, E::Three as u32], Some(4 + 4 * 3), ); } #[test] fn test_union() { #[derive(Serialize, Deserialize, PartialEq, Debug)] enum U { A(u32), B(i16, u32, u64), C { c: char, n: u32, b: bool, v: Vec<u8>, }, D, } check(U::A(3), Some(4 + 4)); check(U::B(1, 2, 3), Some(4 + 2 + 2 + 4 + 4 + 8)); check( U::C { c: 'a', n: 5, b: true, v: vec![1, 1, 2, 3, 5], }, Some(4 + 1 + 3 + 4 + 1 + 3 + 4 + 5), ); check(U::D, Some(4)); } #[test] fn test_unsupported() { use std::collections::{BTreeMap, HashMap}; fn check_error_kind<T: Debug>(res: Result<T>) { match res { Err(e) => match e { Error::TypeNotSupported => (), e => panic!("unexpected error kind: {}", e), }, _ => panic!("should be error"), } } check_error_kind(cdr::ser::serialize_data::<_, _, BigEndian>( &Some(1usize), Infinite, )); check_error_kind(cdr::ser::serialize_data::<_, _, BigEndian>( &None::<usize>, Infinite, )); check_error_kind(cdr::ser::serialize_data::<_, _, BigEndian>( &HashMap::<usize, usize>::new(), Infinite, )); check_error_kind(cdr::ser::serialize_data::<_, _, BigEndian>( &BTreeMap::<usize, usize>::new(), Infinite, )); check_error_kind(cdr::de::deserialize_data::<Option<usize>, BigEndian>( Vec::new().as_slice(), )); check_error_kind( cdr::de::deserialize_data::<HashMap<usize, usize>, BigEndian>(Vec::new().as_slice()), ); check_error_kind( cdr::de::deserialize_data::<BTreeMap<usize, usize>, BigEndian>(Vec::new().as_slice()), ); }
// 5 10 123 456 struct A { int x; int y; } A* alloc_a(int num) { A* a = allocate(8 * num); return a; } int main(int arg) { A* a = alloc_a(2); (*a).x = 5; (*a).y = 10; (*(a + 1)).x = 123; (*(a + 1)).y = 456; A* b = a + 1; printf("%d %d %d %d\n", (*a).x, (*a).y, (*b).x, (*b).y); free(a); return 0; }
// Copyright 2019, 2020 Wingchain // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fs; use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; use tempfile::tempdir; use crypto::address::AddressImpl; use crypto::dsa::DsaImpl; use node_chain::{module, Chain, ChainConfig, DBConfig}; use node_txpool::support::DefaultTxPoolSupport; use node_txpool::{TxPool, TxPoolConfig}; use primitives::{Address, FullTransaction}; use utils_test::test_accounts; #[tokio::test] async fn test_txpool() { let dsa = Arc::new(DsaImpl::Ed25519); let address = Arc::new(AddressImpl::Blake2b160); let test_accounts = test_accounts(dsa, address); let (account1, account2) = (&test_accounts[0], &test_accounts[1]); let chain = get_chain(&account1.address); let config = TxPoolConfig { pool_capacity: 1024, }; let txpool_support = Arc::new(DefaultTxPoolSupport::new(chain.clone())); let txpool = TxPool::new(config, txpool_support).unwrap(); let tx = chain .build_transaction( Some((account1.secret_key.clone(), 0, 1)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); let expected_queue = vec![Arc::new(FullTransaction { tx: tx.clone(), tx_hash: chain.hash_transaction(&tx.clone()).unwrap(), })]; txpool.insert(tx).unwrap(); loop { { let queue = txpool.get_queue().read(); if queue.len() > 0 { assert_eq!(*queue, expected_queue); break; } } tokio::time::sleep(Duration::from_millis(10)).await; } } #[tokio::test] async fn test_txpool_dup() { let dsa = Arc::new(DsaImpl::Ed25519); let address = Arc::new(AddressImpl::Blake2b160); let test_accounts = test_accounts(dsa, address); let (account1, account2) = (&test_accounts[0], &test_accounts[1]); let chain = get_chain(&account1.address); let config = TxPoolConfig { pool_capacity: 1024, }; let txpool_support = Arc::new(DefaultTxPoolSupport::new(chain.clone())); let txpool = TxPool::new(config, txpool_support).unwrap(); let tx = chain .build_transaction( Some((account1.secret_key.clone(), 0, 1)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); txpool.insert(tx.clone()).unwrap(); let result = txpool.insert(tx); assert!(format!("{}", result.unwrap_err()).contains("Duplicated tx")); } #[tokio::test] async fn test_txpool_validate() { let dsa = Arc::new(DsaImpl::Ed25519); let address = Arc::new(AddressImpl::Blake2b160); let test_accounts = test_accounts(dsa, address); let (account1, account2) = (&test_accounts[0], &test_accounts[1]); let chain = get_chain(&account1.address); let config = TxPoolConfig { pool_capacity: 1024, }; let txpool_support = Arc::new(DefaultTxPoolSupport::new(chain.clone())); let txpool = TxPool::new(config, txpool_support).unwrap(); let mut tx = chain .build_transaction( Some((account1.secret_key.clone(), 0, 1)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); tx.call.module = "unknown".to_string(); let result = txpool.insert(tx.clone()); assert_eq!( format!("{}", result.unwrap_err()), "TxPool Error: Insert error: Invalid tx: Invalid tx witness: Invalid signature" ); let tx = chain .build_transaction( Some((account1.secret_key.clone(), 0, 21)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); let result = txpool.insert(tx); assert_eq!( format!("{}", result.unwrap_err()), "TxPool Error: Insert error: Invalid tx: Invalid tx until: Exceed max until: 21" ); let tx = chain .build_transaction( Some((account1.secret_key.clone(), 0, 0)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); let result = txpool.insert(tx); assert_eq!( format!("{}", result.unwrap_err()), "TxPool Error: Insert error: Invalid tx: Invalid tx until: Exceed min until: 0" ); } #[tokio::test] async fn test_txpool_capacity() { let dsa = Arc::new(DsaImpl::Ed25519); let address = Arc::new(AddressImpl::Blake2b160); let test_accounts = test_accounts(dsa, address); let (account1, account2) = (&test_accounts[0], &test_accounts[1]); let chain = get_chain(&account1.address); let config = TxPoolConfig { pool_capacity: 2 }; let txpool_support = Arc::new(DefaultTxPoolSupport::new(chain.clone())); let txpool = TxPool::new(config, txpool_support).unwrap(); let tx = chain .build_transaction( Some((account1.secret_key.clone(), 0, 1)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); let tx2 = chain .build_transaction( Some((account1.secret_key.clone(), 1, 1)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); let tx3 = chain .build_transaction( Some((account1.secret_key.clone(), 2, 1)), chain .build_call( "balance".to_string(), "transfer".to_string(), module::balance::TransferParams { recipient: account2.address.clone(), value: 2, }, ) .unwrap(), ) .unwrap(); txpool.insert(tx).unwrap(); txpool.insert(tx2).unwrap(); let result = txpool.insert(tx3); assert!(format!("{}", result.unwrap_err()).contains("Exceed capacity")); } fn get_chain(address: &Address) -> Arc<Chain> { let path = tempdir().expect("Could not create a temp dir"); let home = path.into_path(); init(&home, address); let db = DBConfig { memory_budget: 1 * 1024 * 1024, path: home.join("data").join("db"), partitions: vec![], }; let chain_config = ChainConfig { home, db }; let chain = Arc::new(Chain::new(chain_config).unwrap()); chain } fn init(home: &PathBuf, address: &Address) { let config_path = home.join("config"); fs::create_dir_all(&config_path).unwrap(); let spec = format!( r#" [basic] hash = "blake2b_256" dsa = "ed25519" address = "blake2b_160" [genesis] [[genesis.txs]] module = "system" method = "init" params = ''' {{ "chain_id": "chain-test", "timestamp": "2020-04-29T15:51:36.502+08:00", "max_until_gap": 20, "max_execution_gap": 8, "consensus": "poa" }} ''' [[genesis.txs]] module = "balance" method = "init" params = ''' {{ "endow": [ ["{}", 10] ] }} ''' "#, address ); fs::write(config_path.join("spec.toml"), &spec).unwrap(); }
use tide::log; //use tide::prelude::*; use tide::{Response, StatusCode}; mod charts; mod config; //use charts::Chart; #[async_std::main] async fn main() -> tide::Result<()> { log::start(); let conf = config::make_config(); let app = routes(tide::new()); app.listen(conf.bind_address()).await?; Ok(()) } fn routes(mut app: tide::Server<()>) -> tide::Server<()> { app.at("/").get(index); app.at("/fsm/new").post(new_fsm); app } async fn index(_req: tide::Request<()>) -> tide::Result { let mut graph = charts::Chart::new( "dot-server".into(), charts::ChartUnits::KiloBytes, false, "#8ff0a4".into(), 15, ); graph.add_point(1.0, 1.0); graph.add_point(1.5, 2.0); graph.add_point(2.0, 3.0); graph.add_point(3.0, 8.5); graph.add_point(3.5, 3.0); graph.add_point(4.0, 6.0); graph.add_point(4.5, 5.0); graph.add_point(5.0, 3.0); graph.add_point(5.5, 2.0); let svg: String = graph.draw_svg(1280, 1024)?; let response = Response::builder(StatusCode::Ok) .body(svg.to_string()) .content_type("image/svg+xml") .build(); Ok(response) } async fn new_fsm(_req: tide::Request<()>) -> tide::Result { Ok(format!("Hello, {}! I've put in an order for {} shoes", "lol", "lol").into()) }