text
stringlengths
8
4.13M
use std::io::prelude::*; use std::fs::{File}; // NOTE(Lito): We are ignoring Normals and Objects for now. #[derive(Clone, Copy, Debug)] pub struct Vertex { pub x: f64, pub y: f64, pub z: f64, } #[derive(Debug)] pub struct Face { pub verts: Vec<usize> } #[derive(Debug)] pub struct Model { pub faces: Vec<Face>, pub verts: Vec<Vertex>, // normals // objects } impl Model { pub fn scale_uniform(&mut self, s: f64) { for v in &mut self.verts { v.x = v.x * s; v.y = v.y * s; v.z = v.z * s; } } pub fn scale(&mut self, x: f64, y: f64, z: f64) { for v in &mut self.verts { v.x = v.x * x; v.y = v.y * y; v.z = v.z * z; } } pub fn shift(&mut self, x: f64, y: f64, z: f64) { for v in &mut self.verts { v.x = v.x+x; v.y = v.y+y; v.z = v.z+z; } } } fn parse_obj_face(line: &str) -> Face { // f v1/vt1/vn1 v2/vt2/vn2 v3/vt3/vn3 let verts : Vec<usize> = line.split_whitespace().skip(1) // get down to vertex groups .map(|vgroup| { let vertex : &str = vgroup.split("/").collect::<Vec<&str>>().first().unwrap(); match vertex.parse::<usize>() { Ok(v) => v, Err(e) => panic!("{}, {}", e, vertex), } }).collect(); Face { verts: verts } } fn parse_obj_vert(line: &str) -> Vertex { // v x y z let verts : Vec<f64> = line.split_whitespace() .filter_map(|c| c.parse::<f64>().ok() ) .collect(); Vertex { x: verts[0], y: verts[1], z: verts[2], } } pub fn parse(file: String) -> Model { let mut verts : Vec<Vertex> = Vec::new(); let mut faces : Vec<Face> = Vec::new(); for line in file.lines() { let leading_str = line.split(" ").next(); match leading_str { Some("v") => verts.push(parse_obj_vert(line)), Some("f") => faces.push(parse_obj_face(line)), _ => continue } } Model { verts: verts, faces: faces, } } pub fn read(path: &str) -> String { let mut f = File::open(path).unwrap(); let mut s = String::new(); f.read_to_string(&mut s).unwrap(); s } pub fn load(path: &str) -> Model { let objfile : String = read(path); parse(objfile) }
use model::Grid; use std::iter; pub struct TextRenderer; impl TextRenderer { pub fn render(&self, grid: &Grid) { self.render_horizontal_wall(&grid); self.render_rows(&grid); self.render_horizontal_wall(&grid); } fn render_rows(&self, grid: &Grid) { for row in 0..grid.rows { let spacer: String = iter::repeat(" |").take(grid.columns as usize).collect(); let inner_separator = "|".to_string() + &spacer; println!("{}", inner_separator); if row != (grid.rows - 1) { self.render_horizontal_wall(&grid); } } } fn render_horizontal_wall(&self, grid: &Grid) { let horizontal_wall: String = iter::repeat("---+").take(grid.columns as usize).collect(); let outer_separators = "+".to_string() + &horizontal_wall; println!("{}", outer_separators); } }
use futures::ready; use std::io::{Error, ErrorKind, Result}; use std::pin::Pin; use std::task::{Context, Poll}; use crate::{AsyncReadAll, AsyncWriteAll, Request, Response}; use protocol::{MetaType, Protocol}; pub struct MetaStream<P, B> { instances: Vec<B>, idx: usize, parser: P, } impl<P, B> MetaStream<P, B> { pub fn from(parser: P, instances: Vec<B>) -> Self { Self { idx: 0, instances: instances, parser: parser, } } } impl<P, B> AsyncReadAll for MetaStream<P, B> where P: Unpin, B: AsyncReadAll + Unpin, { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<Response>> { let me = &mut *self; unsafe { Pin::new(me.instances.get_unchecked_mut(me.idx)).poll_next(cx) } } } impl<P, B> AsyncWriteAll for MetaStream<P, B> where P: Protocol, B: AsyncWriteAll + Unpin, { fn poll_write(mut self: Pin<&mut Self>, cx: &mut Context, buf: &Request) -> Poll<Result<()>> { let me = &mut *self; match me.parser.meta_type(buf.data()) { MetaType::Version => { // 只需要发送请求到一个backend即可 for (i, b) in me.instances.iter_mut().enumerate() { ready!(Pin::new(b).poll_write(cx, buf))?; me.idx = i; return Poll::Ready(Ok(())); } } } return Poll::Ready(Err(Error::new( ErrorKind::Other, "all meta instance failed", ))); } }
//! The Credentials Provider for Credentials stored in a profile inside of a Credentials file. use regex::Regex; use std::ascii::AsciiExt; use std::collections::HashMap; use std::env::{home_dir, var as env_var}; use std::fs; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::{Path, PathBuf}; use {AwsCredentials, CredentialsError, ProvideAwsCredentials, in_ten_minutes}; /// Provides AWS credentials from a profile in a credentials file. #[derive(Clone, Debug)] pub struct ProfileProvider { /// The File Path the Credentials File is located at. file_path: PathBuf, /// The Profile Path to parse out of the Credentials File. profile: String, } impl ProfileProvider { /// Create a new `ProfileProvider` for the default credentials file path and profile name. pub fn new() -> Result<ProfileProvider, CredentialsError> { let profile_location = match env_var("AWS_SHARED_CREDENTIALS_FILE") { Ok(path) => PathBuf::from(path), Err(_) => { match ProfileProvider::default_profile_location() { Ok(path) => path, Err(err) => return Err(err), } } }; Ok(ProfileProvider { file_path: profile_location, profile: "default".to_owned(), }) } /// Default credentials file location: /// `~/.aws/credentials` (Linux/Mac) /// `%USERPROFILE%\.aws\credentials` (Windows) fn default_profile_location() -> Result<PathBuf, CredentialsError> { match home_dir() { Some(home_path) => { let mut credentials_path = PathBuf::from(".aws"); credentials_path.push("credentials"); Ok(home_path.join(credentials_path)) } None => Err(CredentialsError::new( "The environment variable HOME must be set.", )), } } /// Create a new `ProfileProvider` for the credentials file at the given path, using /// the given profile. pub fn with_configuration<F, P>(file_path: F, profile: P) -> ProfileProvider where F: Into<PathBuf>, P: Into<String>, { ProfileProvider { file_path: file_path.into(), profile: profile.into(), } } /// Get a reference to the credentials file path. pub fn file_path(&self) -> &Path { self.file_path.as_ref() } /// Get a reference to the profile name. pub fn profile(&self) -> &str { &self.profile } /// Set the credentials file path. pub fn set_file_path<F>(&mut self, file_path: F) where F: Into<PathBuf>, { self.file_path = file_path.into(); } /// Set the profile name. pub fn set_profile<P>(&mut self, profile: P) where P: Into<String>, { self.profile = profile.into(); } } impl ProvideAwsCredentials for ProfileProvider { fn credentials(&self) -> Result<AwsCredentials, CredentialsError> { parse_credentials_file(self.file_path()).and_then(|mut profiles| { profiles.remove(self.profile()).ok_or_else(|| { CredentialsError::new("profile not found") }) }) } } /// Parses a Credentials file into a Map of <ProfileName, AwsCredentials> fn parse_credentials_file( file_path: &Path, ) -> Result<HashMap<String, AwsCredentials>, CredentialsError> { match fs::metadata(file_path) { Err(_) => { return Err(CredentialsError::new(format!( "Couldn't stat credentials file: [ {:?} ]. Non existant, or no permission.", file_path ))) } Ok(metadata) => { if !metadata.is_file() { return Err(CredentialsError::new(format!( "Credentials file: [ {:?} ] is not a file.", file_path ))); } } }; let file = try!(File::open(file_path)); let profile_regex = Regex::new(r"^\[([^\]]+)\]$").expect("Failed to compile regex"); let mut profiles: HashMap<String, AwsCredentials> = HashMap::new(); let mut access_key: Option<String> = None; let mut secret_key: Option<String> = None; let mut token: Option<String> = None; let mut profile_name: Option<String> = None; let file_lines = BufReader::new(&file); for (line_no, line) in file_lines.lines().enumerate() { let unwrapped_line: String = line.expect(&format!( "Failed to read credentials file, line: {}", line_no )); // skip empty lines if unwrapped_line.is_empty() { continue; } // skip comments if unwrapped_line.starts_with('#') { continue; } // handle the opening of named profile blocks if profile_regex.is_match(&unwrapped_line) { if profile_name.is_some() && access_key.is_some() && secret_key.is_some() { let creds = AwsCredentials::new( access_key.unwrap(), secret_key.unwrap(), token, in_ten_minutes(), ); profiles.insert(profile_name.unwrap(), creds); } access_key = None; secret_key = None; token = None; let caps = profile_regex.captures(&unwrapped_line).unwrap(); profile_name = Some(caps.get(1).unwrap().as_str().to_string()); continue; } // otherwise look for key=value pairs we care about let lower_case_line = unwrapped_line.to_ascii_lowercase().to_string(); if lower_case_line.contains("aws_access_key_id") && access_key.is_none() { let v: Vec<&str> = unwrapped_line.split('=').collect(); if !v.is_empty() { access_key = Some(v[1].trim_matches(' ').to_string()); } } else if lower_case_line.contains("aws_secret_access_key") && secret_key.is_none() { let v: Vec<&str> = unwrapped_line.split('=').collect(); if !v.is_empty() { secret_key = Some(v[1].trim_matches(' ').to_string()); } } else if lower_case_line.contains("aws_session_token") && token.is_none() { let v: Vec<&str> = unwrapped_line.split('=').collect(); if !v.is_empty() { token = Some(v[1].trim_matches(' ').to_string()); } } else if lower_case_line.contains("aws_security_token") { if token.is_none() { let v: Vec<&str> = unwrapped_line.split('=').collect(); if !v.is_empty() { token = Some(v[1].trim_matches(' ').to_string()); } } } else { // Ignore unrecognized fields continue; } } if profile_name.is_some() && access_key.is_some() && secret_key.is_some() { let creds = AwsCredentials::new( access_key.unwrap(), secret_key.unwrap(), token, in_ten_minutes(), ); profiles.insert(profile_name.unwrap(), creds); } if profiles.is_empty() { return Err(CredentialsError::new("No credentials found.")); } Ok(profiles) } #[cfg(test)] mod tests { use std::env; use std::path::Path; use {CredentialsError, ProvideAwsCredentials}; use super::*; #[test] fn parse_credentials_file_default_profile() { let result = super::parse_credentials_file( Path::new("tests/sample-data/default_profile_credentials"), ); assert!(result.is_ok()); let profiles = result.ok().unwrap(); assert_eq!(profiles.len(), 1); let default_profile = profiles.get("default").expect( "No Default profile in default_profile_credentials", ); assert_eq!(default_profile.aws_access_key_id(), "foo"); assert_eq!(default_profile.aws_secret_access_key(), "bar"); } #[test] fn parse_credentials_file_multiple_profiles() { let result = super::parse_credentials_file( Path::new("tests/sample-data/multiple_profile_credentials"), ); assert!(result.is_ok()); let profiles = result.ok().unwrap(); assert_eq!(profiles.len(), 2); let foo_profile = profiles.get("foo").expect( "No foo profile in multiple_profile_credentials", ); assert_eq!(foo_profile.aws_access_key_id(), "foo_access_key"); assert_eq!(foo_profile.aws_secret_access_key(), "foo_secret_key"); let bar_profile = profiles.get("bar").expect( "No bar profile in multiple_profile_credentials", ); assert_eq!(bar_profile.aws_access_key_id(), "bar_access_key"); assert_eq!(bar_profile.aws_secret_access_key(), "bar_secret_key"); } #[test] fn parse_all_values_credentials_file() { let result = super::parse_credentials_file(Path::new("tests/sample-data/full_profile_credentials")); assert!(result.is_ok()); let profiles = result.ok().unwrap(); assert_eq!(profiles.len(), 1); let default_profile = profiles.get("default").expect( "No default profile in full_profile_credentials", ); assert_eq!(default_profile.aws_access_key_id(), "foo"); assert_eq!(default_profile.aws_secret_access_key(), "bar"); } #[test] fn profile_provider_happy_path() { let provider = ProfileProvider::with_configuration( "tests/sample-data/multiple_profile_credentials", "foo", ); let result = provider.credentials(); assert!(result.is_ok()); let creds = result.ok().unwrap(); assert_eq!(creds.aws_access_key_id(), "foo_access_key"); assert_eq!(creds.aws_secret_access_key(), "foo_secret_key"); } #[test] fn profile_provider_via_environment_variable() { let credentials_path = "tests/sample-data/default_profile_credentials"; env::set_var("AWS_SHARED_CREDENTIALS_FILE", credentials_path); let result = ProfileProvider::new(); assert!(result.is_ok()); let provider = result.unwrap(); assert_eq!(provider.file_path().to_str().unwrap(), credentials_path); env::remove_var("AWS_SHARED_CREDENTIALS_FILE"); } #[test] fn profile_provider_bad_profile() { let provider = ProfileProvider::with_configuration( "tests/sample-data/multiple_profile_credentials", "not_a_profile", ); let result = provider.credentials(); assert!(result.is_err()); assert_eq!( result.err(), Some(CredentialsError::new("profile not found")) ); } #[test] fn profile_provider_profile_name() { let mut provider = ProfileProvider::new().unwrap(); assert_eq!("default", provider.profile()); provider.set_profile("foo"); assert_eq!("foo", provider.profile()); } #[test] fn existing_file_no_credentials() { let result = super::parse_credentials_file(Path::new("tests/sample-data/no_credentials")); assert_eq!( result.err(), Some(CredentialsError::new("No credentials found.")) ) } #[test] fn parse_credentials_bad_path() { let result = super::parse_credentials_file(Path::new("/bad/file/path")); assert_eq!( result.err(), Some(CredentialsError::new( "Couldn\'t stat credentials file: [ \"/bad/file/path\" ]. Non existant, or no permission.", )) ); } #[test] fn parse_credentials_directory_path() { let result = super::parse_credentials_file(Path::new("tests/")); assert_eq!( result.err(), Some(CredentialsError::new( "Credentials file: [ \"tests/\" ] is not a file.", )) ); } #[test] fn parse_credentials_unrecognized_field() { let result = super::parse_credentials_file(Path::new( "tests/sample-data/unrecognized_field_profile_credentials", )); assert!(result.is_ok()); let profiles = result.ok().unwrap(); assert_eq!(profiles.len(), 1); let default_profile = profiles.get("default").expect( "No default profile in full_profile_credentials", ); assert_eq!(default_profile.aws_access_key_id(), "foo"); assert_eq!(default_profile.aws_secret_access_key(), "bar"); } }
//! This module implements a parser for the TSBLIB95 specification. This //! specification can be found //! (here)[http://comopt.ifi.uni-heidelberg.de/software/TSPLIB95/] //! under the link "Documentation". Currently this parser only supports //! symmetric TSPs. use nom::*; use std::str::{self, FromStr}; use itertools::Itertools; /// Essentially a deserialization of the TSPLIB95 file from specification /// From Section 1 #[derive(Debug, Clone, PartialEq)] pub struct DataFile { /// Identifies the data file, Section 1.1.1 pub name: String, /// Specifies the type of data, Section 1.1.2 pub data_type: DataType, /// Additional comments (usually contributor or creator of /// the problem instance), Section 1.1.3 pub comment: Option<String>, /// For TSP or ATSP data type, the number of nodes (cities). For /// CVRP it is the total number of nodes and depots. For a TOUR /// it is the dimension of the corresponding problem. Unsure whether /// this is required across all data types, thus the Optional. /// Section 1.1.4 pub dimension: Option<u64>, /// Specifies the truck capcity in a CVRP, Section 1.1.5 pub capacity: Option<u64>, /// Specifies how the edge weights (or distances) are given. Generally /// this corresponds to a metric (though not always). Section 1.1.6 pub edge_weight_type: Option<EdgeWeightType>, /// Describes the format of the edge weights if they are given explicitly /// Section 1.1.7 pub edge_weight_format: Option<EdgeWeightFormat>, /// Describes the format in which the edges of a graph are given, if the graph /// is not complete, Section 1.1.7 [sic] pub edge_data_format: Option<EdgeDataFormat>, /// Specify coordinate types associated with each node (which, for example, /// may be used for either graphical displays or distance computations). /// It is stated that the default is No Coords but problems with coords /// commonly do not include this attribute, Section 1.1.9 pub node_coord_type: Option<NodeCoordType>, /// Specifies how a graphical display of the nodes can be obtained, Section 1.1.10 pub display_data_type: Option<DisplayDataType>, /// Hold each data section in the fact enum DataSection. Up to caller on how to handle pub data_section: Vec<DataSection>, } // We parse the file by doing the following: // 1. Get the values of the keywords from the specification section. The parsers are written // such that none of them consume the input. Thus the keywords may be any particular order. // 2. Consume the specification part // 3. Take until EOF and map into a many which matches on keyword, invoking a parser // on a chunk that is taken until the next keyword. The data section parser is define named!(pub file_parser<DataFile>, do_parse!(name: name_field >> data_type: data_type_field >> comment: comment_field >> dimension: dimension_field >> capacity: capacity_field >> edge_weight_type: edge_weight_type_field >> edge_weight_format: edge_weight_format_field >> edge_data_format: edge_data_format_field >> node_coord_type: node_coord_type_field >> display_data_type: display_data_type_field >> get_spec >> data_section: apply!(identify_data_section, edge_weight_format) >> (DataFile { name: name, data_type: data_type, comment: comment, dimension: dimension, capacity: capacity, edge_weight_type: edge_weight_type, edge_weight_format: edge_weight_format, edge_data_format: edge_data_format, node_coord_type: node_coord_type, display_data_type: display_data_type, data_section: data_section }) ) ); /// Enumerates the possible kinds of data found in a DataFile /// Identified by the keyword "TYPE" /// Found in Section 1.1.2 #[derive(Debug, Clone, PartialEq)] pub enum DataType { /// Symmetric Traveling Salesman Problem TSP, /// Asymmetric Traveling Salesman Problem ATSP, /// Sequential Ordering Problem SOP, /// Hamilton Cycle Problem HCP, /// Capacitated Vehicle Routing Problem CVRP, /// A collection of tours TOUR, } // Parser for the enum value named!(identify_data_type<&[u8], DataType>, alt_complete!( tag_no_case!("tsp") => {|_| DataType::TSP} | tag_no_case!("atsp") => {|_| DataType::ATSP} | tag_no_case!("sop") => {|_| DataType::SOP} | tag_no_case!("hcp") => {|_| DataType::HCP} | tag_no_case!("cvrp") => {|_| DataType::CVRP} | tag_no_case!("tour") => {|_| DataType::TOUR} ) ); /// Enumerates the ways that edge weights are given, Section 1.1.6 #[derive(Debug, Clone, Copy, PartialEq)] #[allow(non_camel_case_types)] pub enum EdgeWeightType { /// Weigthts are listed explicitly in the corresponding section Explicit, /// Weights are Euclidean distances in 2-D Euc_2D, /// Weights are Euclidean distances in 3-D Euc_3D, /// Weights are Maximum distances in 2-D Max_2D, /// Weights are Maximum distances in 3-D Max_3D, /// Weights are Manhattan distances in 2-D Man_2D, /// Weights are Manhattan distances in 3-D Man_3D, /// Weights are in Euclidean distances in 2-D, rounded up Ceil_2D, /// Weights are geographical distances Geo, /// Special distance function for att48 and att532 Att, /// Special ditance function for crystallography problems (V1) Xray1, /// Special ditance function for crystallography problems (V2) Xray2, /// There is a special distance function documented elsewhere Special, } named!(identify_edge_weight_type<&[u8], EdgeWeightType>, alt_complete!( map!(tag_no_case!("explicit"), |_| EdgeWeightType::Explicit) | map!(tag_no_case!("euc_2d"), |_| EdgeWeightType::Euc_2D) | map!(tag_no_case!("euc_3d"), |_| EdgeWeightType::Euc_3D) | map!(tag_no_case!("max_2d"), |_| EdgeWeightType::Max_2D) | map!(tag_no_case!("max_3d"), |_| EdgeWeightType::Max_3D) | map!(tag_no_case!("man_2d"), |_| EdgeWeightType::Man_2D) | map!(tag_no_case!("man_3d"), |_| EdgeWeightType::Man_3D) | map!(tag_no_case!("ceil_2d"), |_| EdgeWeightType::Ceil_2D) | map!(tag_no_case!("geo"), |_| EdgeWeightType::Geo) | map!(tag_no_case!("att"), |_| EdgeWeightType::Att) | map!(tag_no_case!("xray1"), |_| EdgeWeightType::Xray1) | map!(tag_no_case!("xray2"), |_| EdgeWeightType::Xray2) | map!(tag_no_case!("special"), |_| EdgeWeightType::Special) ) ); /// Enumerate the ways the edge weights are given explicitly, Section 1.1.7 #[derive(Debug, Clone, Copy, PartialEq)] pub enum EdgeWeightFormat { /// Weights are given by a function Function, /// Weights are given by a full matrix FullMatrix, /// Upper Triangular Matrix (row-wise without diagonal entries) UpperRow, /// Lower Triangular Matrix (row-wise without diagonal entries) LowerRow, /// Upper Triangular Matrix (col-wise without diagonal entries) UpperCol, /// Lower Triangular Matrix (col-wise without diagonal entries) LowerCol, /// Upper Triangular Matrix (row-wise with diagonal entries) UpperDiagRow, /// Lower Triangular Matrix (row-wise with diagonal entries) LowerDiagRow, /// Upper Triangular Matrix (col-wise with diagonal entries) UpperDiagCol, /// Lower Triangular Matrix (col-wise with diagonal entries) LowerDiagCol, } named!(identify_edge_weight_format<&[u8], EdgeWeightFormat>, alt_complete!( map!(tag_no_case!("function"), |_| EdgeWeightFormat::Function) | map!(tag_no_case!("full_matrix"), |_| EdgeWeightFormat::FullMatrix) | map!(tag_no_case!("upper_row"), |_| EdgeWeightFormat::UpperRow) | map!(tag_no_case!("lower_row"), |_| EdgeWeightFormat::LowerRow) | map!(tag_no_case!("upper_diag_row"), |_| EdgeWeightFormat::UpperDiagRow) | map!(tag_no_case!("lower_diag_row"), |_| EdgeWeightFormat::LowerDiagRow) | map!(tag_no_case!("upper_col"), |_| EdgeWeightFormat::UpperCol) | map!(tag_no_case!("lower_col"), |_| EdgeWeightFormat::LowerCol) | map!(tag_no_case!("upper_diag_col"), |_| EdgeWeightFormat::UpperDiagCol) | map!(tag_no_case!("lower_diag_col"), |_| EdgeWeightFormat::LowerDiagCol) ) ); /// Enumeration of the ways in which edges of a graph are given if /// the graph is not complete, Section 1.1.7 [sic] #[derive(Debug, Clone, PartialEq)] pub enum EdgeDataFormat { /// The graph is given by an edge list EdgeList, /// The graph is given as an adjacency list AdjList, } named!(identify_edge_data_format<&[u8], EdgeDataFormat>, alt_complete!( map!(tag_no_case!("edge_list"), |_| EdgeDataFormat::EdgeList) | map!(tag_no_case!("adj_list"), |_| EdgeDataFormat::AdjList) ) ); /// Enumeration of the coordinate types associated with each node /// (which, for example, may be used for either graphical displays /// or distance computations). It is stated that the default is /// No Coords but problems with coords commonly do not include /// this attribute, Section 1.1.9 #[derive(Debug, Clone, PartialEq)] pub enum NodeCoordType { /// Nodes are specified by coordinates in 2-D TwoDCoords, /// Nodes are specified by coordinates in 3-D ThreeDCoords, /// The nodes do not have associated coordinates NoCoords, } named!(identify_node_coord_type<NodeCoordType>, alt_complete!( map!(tag_no_case!("twod_coords"), |_| NodeCoordType::TwoDCoords) | map!(tag_no_case!("threed_coords"), |_| NodeCoordType::ThreeDCoords) | map!(tag_no_case!("no_coords"), |_| NodeCoordType::NoCoords) ) ); /// Enumeration of ways to graphically display the nodes #[derive(Debug, Clone, PartialEq)] pub enum DisplayDataType { /// Display is generated from the node coordinates CoordDisplay, /// Explicit coordinates in 2-D are given TwoDDisplay, /// No graphical display is possible NoDisplay, } named!(identify_display_data_type<&[u8], DisplayDataType>, alt_complete!( map!(tag_no_case!("coord_display"), |_| DisplayDataType::CoordDisplay) | map!(tag_no_case!("twod_display"), |_| DisplayDataType::TwoDDisplay) | map!(tag_no_case!("no_display"), |_| DisplayDataType::NoDisplay) ) ); /// Data Section Enum specifier, Section 1.2 /// I don't think an enum is the right way to /// represent this. /// /// TODO: Invesitigate a different way. /// /// Actually, on second thought, I am coming around to the idea. #[derive(Debug, Clone, PartialEq)] pub enum DataSection { /// Data holder for NodeCoordSection NodeCoordBlock(NodeCoordSection), // TODO: Implement // /// Data holder for DepotSection // DepotBlock(DepotSection), // // /// Data holder for DemandSection // DemandBlock(DemandSection), // // /// Data holder for EdgeDataSection // EdgeDataBlock(EdgeDataSection), // // /// Data holder for FixedEdgesSection // FixedEdgesBlock(FixedEdgesSection), /// Data holder for DisplayDataSection DisplayDataBlock(DisplayDataSection), /// Data holder for TourSection TourBlock(TourSection), /// Data holder for EdgeWeightSection EdgeWeightBlock(EdgeWeightSection), EOF, } // The text passed into this parser does not include the Specification part or the EOF tag. // This parser returns a vector of data sections. It passes each data section, including // its keyword, up until the keyword of the next section or the end of buffer to the // respective parser. // First a convenience parser and enum to recognize data section keywords case-insensitive enum DataSectionKeywords { NodeCoord, DisplayData, Tour, EdgeWeight, EOF, } named!(identify_data_section_keywords<DataSectionKeywords>, alt_complete!(tag!("NODE_COORD_SECTION") => {|_| DataSectionKeywords::NodeCoord}| tag!("DISPLAY_DATA_SECTION") => {|_| DataSectionKeywords::DisplayData} | tag!("TOUR_SECTION") => {|_| DataSectionKeywords::Tour} | tag!("EDGE_WEIGHT_SECTION") => {|_| DataSectionKeywords::EdgeWeight} | tag!("EOF") => {|_| DataSectionKeywords::EOF})); named!(recognize_data_section_block, recognize!(alt!(tag!("EOF") | preceded!(identify_data_section_keywords, alt!( take_until!("NODE_COORD_SECTION") | take_until!("DISPLAY_DATA_SECTION") | take_until!("TOUR_SECTION") | take_until!("EDGE_WEIGHT_SECTION") | take_until!("EOF")))))); fn forward_to_identifier<'a>(input: &'a [u8], edge_weight_format: Option<EdgeWeightFormat>) -> IResult<&'a [u8], DataSection> { match identify_data_section_keywords(input) { IResult::Done(rem, key) => { use self::DataSectionKeywords as dsk; match key { dsk::DisplayData => identify_display_data_section(rem), dsk::EdgeWeight => identify_edge_weight_section(rem, edge_weight_format.unwrap()), dsk::NodeCoord => identify_node_coord_section(rem), dsk::Tour => identify_tour_section(rem), // Return signal dsk::EOF => IResult::Done(rem, DataSection::EOF), } } IResult::Incomplete(i) => IResult::Incomplete(i), IResult::Error(err) => IResult::Error(err), } } fn identify_data_section<'a>(input: &'a [u8], edge_weight_format: Option<EdgeWeightFormat>) -> IResult<&'a [u8], Vec<DataSection>> { // Take the many0 implementation and customize let ret; let mut res = ::std::vec::Vec::new(); let mut new_input = input; loop { if input.input_len() == 0 { ret = IResult::Done(new_input, res); break; } match recognize_data_section_block(new_input) { // Data section is finished on error. (Data section keyword not at top of buffer) IResult::Error(_) => { ret = IResult::Done(new_input, res); break; } IResult::Incomplete(Needed::Unknown) => { ret = IResult::Incomplete(Needed::Unknown); break; } IResult::Incomplete(Needed::Size(i)) => { let size = i + (input).input_len() - new_input.input_len(); ret = IResult::Incomplete(Needed::Size(size)); break; } IResult::Done(i, o) => { // Check to make sure the input is actually being reduced if i == new_input { ret = IResult::Error(error_position!(ErrorKind::Many0,input)); break; } // A whole data section should be passed in, so non-Done is bad. match forward_to_identifier(o, edge_weight_format) { IResult::Error(err) => { ret = IResult::Error(err); break; } IResult::Incomplete(Needed::Unknown) => { ret = IResult::Incomplete(Needed::Unknown); break; } IResult::Incomplete(Needed::Size(i)) => { let size = i + (input).input_len() - new_input.input_len(); ret = IResult::Incomplete(Needed::Size(size)); break; } IResult::Done(_, data) => { // No check to be sure input has been consumed if data == DataSection::EOF { ret = IResult::Done(i, res); break; } res.push(data); } } // Advance the input new_input = i; } } } ret } /// Node coordinates are given in this section. Each line is of the form /// > <integer> <real> <real> /// if NodeCoordType is TwoDCoords, or /// > <integer> <real> <real> <real> /// if NodeCoordType is ThreeDCoords. The integers give the identifier /// of the respective nodes and the real numbers give the associated coordinates /// Section 1.2.1 #[derive(Debug, Clone, PartialEq)] pub struct NodeCoordSection(pub Vec<Coord>); named!(identify_node_coord_section<DataSection>, map!(map!(ws!(many0!(flat_map!(terminated!(not_line_ending, line_ending), identify_coord))), NodeCoordSection), DataSection::NodeCoordBlock)); /// Each coordinate has an identifier and then a collection of /// coordinates. We keep the coordinates in a Vec since we /// aren't sure if there will be two or three (and we can't /// parameterize dynamically #[derive(Debug, Clone, PartialEq)] pub struct Coord { /// First element in each line pub identifier: u64, /// Remaining elements in each line pub coords: Vec<f64>, } // The first element is taken as a u64 // and then the rest are taken as f64s. // do_parse automatically unwraps singular tuples named!(identify_coord<Coord>, ws!(do_parse!(identifier: u64_from_bytes >> coordinates: many0!(float) >> (Coord {identifier: identifier, coords: coordinates}) ))); /// Edge weights are given in the format specified by /// EdgeWeightFormat. Presently all explicit data is integral /// and is given in one of the self-explanatory matrix formats. /// Only ever call when edge_weight_format is Some(..) #[derive(Debug, Clone, PartialEq)] pub struct EdgeWeightSection(pub Vec<Vec<f64>>); fn identify_edge_weight_section<'a>(input: &'a [u8], edge_weight_format: EdgeWeightFormat) -> IResult<&'a [u8], DataSection> { // Match only on Full, UpperRow, UpperDiagRow, and LowerDiagRow // as they are the only ones used in the symmetric TSPs. Transform non-Full // formats to a FullMatrix println!("{}", str::from_utf8(input).unwrap()); // Match on the type of matrix, parse that data section, then transform it into // an explicit full matrix, returning just the data wrapped in a newtype match edge_weight_format { EdgeWeightFormat::FullMatrix => { map!(input, map!(call!(identify_full_matrix_data), |m: FullMatrixData| EdgeWeightSection(m.0)), DataSection::EdgeWeightBlock) } EdgeWeightFormat::UpperRow => { map!(input, map!(call!(identify_upper_row_data), |m: UpperRowData| EdgeWeightSection(<UpperRowData as Into<FullMatrixData>>::into(m).0)), DataSection::EdgeWeightBlock) } EdgeWeightFormat::UpperDiagRow => { map!(input, map!(call!(identify_upper_diag_row_data), |m: UpperDiagRowData| EdgeWeightSection(<UpperDiagRowData as Into<FullMatrixData>>::into(m).0)), DataSection::EdgeWeightBlock) } EdgeWeightFormat::LowerDiagRow => { map!(input, map!(call!(identify_lower_diag_row_data), |m: LowerDiagRowData| EdgeWeightSection(<LowerDiagRowData as Into<FullMatrixData>>::into(m).0)), DataSection::EdgeWeightBlock) } _ => unimplemented!(), } } // General parsers for 2-dimensional, line-delimited data named!(get_2d_data< Vec< Vec<f64> > >, many0!(flat_map!(terminated!(not_line_ending, line_ending), dbg!(ws!(many0!(float)))))); /// Hold the data in a 2 dimensional vector. This is row-major. /// It will likely need to be converted to column major for /// arrayfire but is not the responsibility of this module. /// Actually since this is a symmetric TSP, the data will be correctly oriented #[derive(Debug, Clone, PartialEq)] pub struct FullMatrixData(pub Vec<Vec<f64>>); // Rows are line delimited, columns are space delimited named!(identify_full_matrix_data<FullMatrixData>, map!(get_2d_data, FullMatrixData)); // Make Other data types convertable to a full matrix // We convert everything first to an upper diagonal row matrix // and then convert that to a full matrix // To convert from Upper Diagonal row format we need to // Vec-wise extend an Upper Diagonal row with a Lower Row format. So we // 1. Clone data into a Lower Row format (inline transformation) // 2. Iterate over the outer pairwise extending lower with upper impl From<UpperDiagRowData> for FullMatrixData { fn from(data: UpperDiagRowData) -> FullMatrixData { // Get the data we care about let data = data.0; // Create a Lower Row format matrix let mut lower: Vec<Vec<f64>> = data.clone(); lower.reverse(); for (mut lower_inner, upper_inner) in izip!(&mut lower, data) { lower_inner.reverse(); lower_inner.pop(); // Now extend lower by upper lower_inner.extend(upper_inner); } FullMatrixData(lower) } } // Now just reuse existing trait impls impl From<UpperRowData> for FullMatrixData { fn from(data: UpperRowData) -> FullMatrixData { <UpperRowData as Into<UpperDiagRowData>>::into(data).into() } } // Now just reuse existing trait impls impl From<LowerDiagRowData> for FullMatrixData { fn from(data: LowerDiagRowData) -> FullMatrixData { <LowerDiagRowData as Into<UpperDiagRowData>>::into(data).into() } } /// Coordinates are as follows /// + left -> right : ascending /// + top -> bottom : descending /// Rows are line separated #[derive(Debug, Clone, PartialEq)] pub struct UpperRowData(Vec<Vec<f64>>); named!(identify_upper_row_data<UpperRowData>, map!(get_2d_data, UpperRowData)); /// Coordinates are as follows /// + left -> right : ascending /// + top -> bottom : ascending /// Rows are not line separated but each new row begins with a 0 #[derive(Debug, Clone, PartialEq)] pub struct UpperDiagRowData(Vec<Vec<f64>>); // place in an explicit function for closure usage pub fn identify_upper_diag_row_data<'a>(input: &'a [u8]) -> IResult<&'a [u8], UpperDiagRowData> { let res = ws!(input, many0!(float)); let mut key = false; match res { IResult::Incomplete(need) => IResult::Incomplete(need), IResult::Error(err) => IResult::Error(err), IResult::Done(rem, v) => { // Take the vector and group them by runs after 0, map each group to its own // vector and then collect into one final vector let res = v.into_iter() .group_by(|e| { if *e == 0f64 { key = !key; } key }) .into_iter() .map(|e| e.1.collect::<Vec<f64>>()) .collect::<Vec<_>>(); IResult::Done(rem, UpperDiagRowData(res)) } } } // For UpperRow -> UpperDiagRowformat we need to // 1. Place a new vector on the back // 2. Place a 0 on the end of every vector // 3. Reverse every inner vector impl From<UpperRowData> for UpperDiagRowData { fn from(data: UpperRowData) -> UpperDiagRowData { // Take out the part we care about let mut data = data.0; // Append a new empty vector to the end data.push(Vec::new()); // Append a 0 to the end of every inner vector for inner in data.iter_mut() { inner.push(0f64); // And finally reverse every inner vector inner.reverse(); } UpperDiagRowData(data) } } // For LowerDiagRow -> UpperDiagRow format we need to // reverse both the inner and outer vector impl From<LowerDiagRowData> for UpperDiagRowData { fn from(data: LowerDiagRowData) -> UpperDiagRowData { // Take out the part we care about let mut data = data.0; // First reverse the outer vector data.reverse(); // then reverse the inner vector for inner in data.iter_mut() { inner.reverse(); } UpperDiagRowData(data) } } /// Coordinates are as follows /// + left -> right : ascending /// + top -> bottom : ascending /// Rows are not line separated but each new row ends with a 0 #[derive(Debug, Clone, PartialEq)] pub struct LowerDiagRowData(Vec<Vec<f64>>); named!(identify_lower_diag_row_data<LowerDiagRowData>, map!(map!(ws!(many0!(float)), |v: Vec<f64>| { // Split at 0s and then pop off the end let mut ret = v.split(|e: &f64| *e == 0f64) .map(|i| i.to_vec()) .map(|mut i| {i.push(0f64); i}) .collect::<Vec<_>>(); ret.pop(); ret }), LowerDiagRowData)); /// Holds the tours given in a Tour Section /// Tours are separated by a -1 #[derive(Debug, Clone, PartialEq)] pub struct TourSection(pub Vec<Tour>); named!(identify_tour_section<DataSection>, map!(map!(ws!(many0!(terminated!(identify_tour, tag!("-1")))), TourSection), DataSection::TourBlock)); /// Holds the Data for each individual tour #[derive(Debug, Clone, PartialEq)] pub struct Tour(pub Vec<u64>); // Tours are white-space-delimited integers terminated by a -1. However, // the parent parser will strip the -1 named!(identify_tour<Tour>, map!(ws!(many0!(u64_from_bytes)), Tour)); /// Holds data for the Display Data Section. For symmetric TSPs only /// two coords are given. We reuse the Coord struct #[derive(Debug, Clone, PartialEq)] pub struct DisplayDataSection(pub Vec<Coord>); named!(identify_display_data_section<DataSection>, map!(map!(ws!(many0!(flat_map!(terminated!(not_line_ending, line_ending), identify_coord))), DisplayDataSection), DataSection::DisplayDataBlock)); // BEGIN SECTION : PARSERS // We begin first by introducing general parsers // Recognizes Keyword : Value from section 1.1 named!(key_value<(&[u8], &[u8])>, ws!(separated_pair!(take_until!(":"), tag!(":"), not_line_ending))); // Recognize the specification part, we take keyword : values until the parser fails named!(get_spec, recognize!(many0!(key_value))); // // Get the data part. The data is the last section so will finish upon reach EOF // // Each file is terminated by the tag EOF // named!(take_until_eof, terminated!(take_until!("EOF"), tag!("EOF"))); // Identify a u64 anf f64 from bytes. u64 is composed entirely of digits, and f64 is // digits potentially with a "." somewhere named!(u64_from_bytes<u64>, map_res!(map_res!(digit, str::from_utf8),u64::from_str)); // Taken from nom examples. // TODO: Study and figure out how this works named!(unsigned_float <f64>, map_res!(map_res!( recognize!(alt_complete!(delimited!(digit, tag!("."), opt!(complete!(digit))) | delimited!(opt!(digit), tag!("."), digit) | digit)), str::from_utf8), f64::from_str)); // TODO: Study and figure out how this works named!(float <f64>, map!(pair!(opt!(alt!(tag!("+") | tag!("-"))), unsigned_float), |(sign, value): (Option<&[u8]>, f64)| { (sign.and_then(|s| if s[0] == ('-' as u8) { Some(-1f64) } else { None }) .unwrap_or(1f64)) * value })); /// Convenience tuple accessor function fn get_second<T, U>(t: (T, U)) -> U { t.1 } // Get the 'value' of the pair ('keyword', 'value') returned from key_value named!(get_val_from_keyword, map!(key_value, get_second)); /// Generic return of value for specified keyword. The keyword is assumed to be at the front of /// the buffer fn get_val_of_tag_from_peek<'a>(input: &'a [u8], tag: &str) -> IResult<&'a [u8], &'a [u8]> { peek!(input, preceded!(take_until!(tag), get_val_from_keyword)) } // Parameterize for each keyword. List of keywords: // + NAME // + TYPE // + COMMENT // + DIMENSION // + CAPACITY // + EDGE_WEIGHT_TYPE // + EDGE_WEIGHT_FORMAT // + EDGE_DATA_FORMAT // + NODE_COORD_TYPE // + DISPLAY_DATA_TYPE named!(val_of_name, apply!(get_val_of_tag_from_peek, "NAME")); named!(val_of_data_type, apply!(get_val_of_tag_from_peek, "TYPE")); named!(val_of_comment, apply!(get_val_of_tag_from_peek, "COMMENT")); named!(val_of_dimension, apply!(get_val_of_tag_from_peek, "DIMENSION")); named!(val_of_capacity, apply!(get_val_of_tag_from_peek, "CAPACITY")); named!(val_of_edge_weight_type, apply!(get_val_of_tag_from_peek, "EDGE_WEIGHT_TYPE")); named!(val_of_edge_weight_format, apply!(get_val_of_tag_from_peek, "EDGE_WEIGHT_FORMAT")); named!(val_of_edge_data_format, apply!(get_val_of_tag_from_peek, "EDGE_DATA_FORMAT")); named!(val_of_node_coord_type, apply!(get_val_of_tag_from_peek, "NODE_COORD_TYPE")); named!(val_of_display_data_type, apply!(get_val_of_tag_from_peek, "DISPLAY_DATA_TYPE")); // These parsers will return the types as specified in the file struct and // will not consume the data fed to them named!(name_field<String>, map_res!(map!(val_of_name, <[u8] as ToOwned>::to_owned), String::from_utf8)); named!(data_type_field<DataType>, flat_map!(val_of_data_type, identify_data_type)); named!(comment_field<Option<String> >, opt!(map_res!(map!(val_of_comment, <[u8] as ToOwned>::to_owned), String::from_utf8))); named!(dimension_field<Option<u64> >, opt!(map_res!(map_res!(val_of_dimension, str::from_utf8), u64::from_str))); named!(capacity_field<Option<u64> >, opt!(map_res!(map_res!(val_of_capacity, str::from_utf8), u64::from_str))); named!(edge_weight_type_field<Option<EdgeWeightType> >, opt!(flat_map!(val_of_edge_weight_type, identify_edge_weight_type))); named!(edge_weight_format_field<Option<EdgeWeightFormat> >, opt!(flat_map!(val_of_edge_weight_format, identify_edge_weight_format))); named!(edge_data_format_field<Option<EdgeDataFormat> >, opt!(flat_map!(val_of_edge_data_format, identify_edge_data_format))); named!(node_coord_type_field<Option<NodeCoordType> >, opt!(flat_map!(val_of_node_coord_type, identify_node_coord_type))); named!(display_data_type_field<Option<DisplayDataType> >, opt!(flat_map!(val_of_display_data_type, identify_display_data_type))); // BEGIN SECTION PARSER TESTING extern crate glob; #[test] #[ignore] fn test_parsing_assets_folder() { use self::glob::glob; use std::fs::File; use std::io::Read; for test_path in glob(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/*.tsp")) .expect("Failed to find paths with glob") { let my_path = test_path.unwrap(); println!("{:?}", my_path); let mut my_file = File::open(my_path).unwrap(); let mut my_bytes: Vec<u8> = Vec::new(); my_file.read_to_end(&mut my_bytes); file_parser(&my_bytes).unwrap(); } } #[test] fn test_node_coord_section() { let test_buf = include_bytes!(concat!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets"), "/a280.tsp")); let (left, res) = file_parser(test_buf).unwrap(); assert_eq!(left, b"\n"); assert_eq!(res.name, "a280"); assert_eq!(res.dimension, Some(280)); } #[test] fn test_swiss_42() { let test_buf = include_bytes!(concat!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets"), "/swiss42.tsp")); let (left, res) = file_parser(test_buf).unwrap(); }
use anyhow::{anyhow, Error}; use css_in_rust::Style; use log::{debug, trace}; use std::collections::hash_map::HashMap; use std::time::Duration; use yew::services::timeout::TimeoutService; use yew::services::Task; use yew::{html, Component, ComponentLink, Html, ShouldRender}; use yewtil::NeqAssign; use crate::components::direction_control::{ DirectionControl, DirectionControlMode, DirectionModuleMode, }; use crate::components::sensors_data::SensorsData; use crate::services::rover_service::RoverService; #[derive(Debug)] pub enum Msg { RequestSensors, SensorDirectionUpdate((i32, i32)), SensorDirectionUpdateError(Error, (i32, i32)), MoveDirectionUpdate((i32, i32)), DistanceUpdate(f32), DistanceUpdateError(Error), ObstaclesUpdate(Vec<bool>), ObstaclesUpdateError(Error), LinesUpdate(Vec<bool>), LinesUpdateError(Error), } #[derive(Default)] pub struct State { pub sensor_direction: (i32, i32), pub sensor_direction_error: Option<Error>, pub move_direction: (i32, i32), pub distance: f32, pub distance_error: Option<Error>, pub lines: Vec<bool>, pub lines_error: Option<Error>, pub obstacles: Vec<bool>, pub obstacles_error: Option<Error>, } const REQUEST_SENSORS_TASK: &str = "task/timeout/request_sensors"; const GET_DISTANCE_TASK: &str = "task/sense/distance"; const GET_LINES_TASK: &str = "task/sense/lines"; const GET_OBSTACLES_TASK: &str = "task/sense/obstacles"; const MOVE_TASK: &str = "task/move"; const LOOK_TASK: &str = "task/look"; pub struct App { link: ComponentLink<Self>, state: State, style: Style, rover_service: RoverService, backend_tasks: HashMap<&'static str, Box<dyn Task>>, } impl App { fn current_sensor_direction(&self) -> Html { html! { <p> {"Sensor direction "}<b>{"[ "}{self.state.sensor_direction.0}{" ; "}{self.state.sensor_direction.1}{" ]"}</b> </p> } } fn current_move_direction(&self) -> Html { let mut direction = "■"; if self.state.move_direction.1 > 0 { direction = "↑"; } else if self.state.move_direction.1 < 0 { direction = "↓"; } else if self.state.move_direction.0 > 0 { direction = "↻"; } else if self.state.move_direction.0 < 0 { direction = "↺"; } let mut speed = 0; if self.state.move_direction.0 != 0 { speed = self.state.move_direction.0; } else if self.state.move_direction.1 != 0 { speed = self.state.move_direction.1; } return html! { <p> {"Move direction "}<b>{direction}</b>{" Speed "}<b>{speed}</b> </p> }; } fn update_sensor_direction(&mut self, new_direction: (i32, i32)) -> ShouldRender { let old_direction = self.state.sensor_direction.clone(); if self.state.sensor_direction.neq_assign(new_direction) { self.backend_tasks.remove(LOOK_TASK); match self .rover_service .look_at( - self.state.sensor_direction.0 as i16, - self.state.sensor_direction.1 as i16, self.link.callback(move |r| match r { Ok(()) => Msg::SensorDirectionUpdate(new_direction), Err(e) => Msg::SensorDirectionUpdateError(e, old_direction) })) { Ok(task) => { self.backend_tasks.insert(LOOK_TASK, Box::new(task)); } Err(e) => { self.link.send_message( Msg::SensorDirectionUpdateError( anyhow!("Failed to request a look: {}", e), old_direction ) ); } } return true; } else { self.state.sensor_direction_error.take(); } false } fn update_sensor_direction_error(&mut self, e: Error, prev_direction: (i32, i32)) -> ShouldRender { self.backend_tasks.remove(LOOK_TASK); self.state.sensor_direction_error = Some(e); self.state.sensor_direction = prev_direction; true } fn update_move_direction(&mut self, new_direction: (i32, i32)) -> ShouldRender { self.state.move_direction.neq_assign(new_direction) } fn update_distance(&mut self, new_distance: f32) -> ShouldRender { self.backend_tasks.remove(GET_DISTANCE_TASK); self.state.distance_error.take(); self.reschedule_sensors_update(); self.state.distance.neq_assign(new_distance) } fn update_distance_error(&mut self, e: Error) -> ShouldRender { self.backend_tasks.remove(GET_DISTANCE_TASK); self.state.distance_error = Some(e); self.reschedule_sensors_update(); true } fn update_lines_state(&mut self, new_lines: Vec<bool>) -> ShouldRender { self.backend_tasks.remove(GET_LINES_TASK); self.state.lines_error.take(); self.state.lines = new_lines; self.reschedule_sensors_update(); true } fn update_lines_error(&mut self, e: Error) -> ShouldRender { self.backend_tasks.remove(GET_LINES_TASK); self.state.lines_error = Some(e); self.reschedule_sensors_update(); true } fn update_obstacles_state(&mut self, new_obstacles: Vec<bool>) -> ShouldRender { self.backend_tasks.remove(GET_OBSTACLES_TASK); self.state.obstacles_error.take(); self.state.obstacles = new_obstacles; self.reschedule_sensors_update(); true } fn update_obstacles_error(&mut self, e: Error) -> ShouldRender { self.backend_tasks.remove(GET_OBSTACLES_TASK); self.state.obstacles_error = Some(e); self.reschedule_sensors_update(); true } fn reschedule_sensors_update(&mut self) { if !self.backend_tasks.contains_key(GET_DISTANCE_TASK) && !self.backend_tasks.contains_key(GET_LINES_TASK) && !self.backend_tasks.contains_key(GET_OBSTACLES_TASK) { self.request_sensors_update(); } } fn request_sensors_update(&mut self) -> ShouldRender { match self .rover_service .get_distance(self.link.callback(|r| match r { Ok(d) => Msg::DistanceUpdate(d), Err(e) => Msg::DistanceUpdateError(e), })) { Ok(task) => { self.backend_tasks.insert(GET_DISTANCE_TASK, Box::new(task)); } Err(e) => { self.link.send_message(Msg::DistanceUpdateError(anyhow!( "Failed to request distance: {}", e ))); } }; match self .rover_service .get_lines(self.link.callback(|r| match r { Ok(ls) => Msg::LinesUpdate(ls), Err(e) => Msg::LinesUpdateError(e), })) { Ok(task) => { self.backend_tasks.insert(GET_LINES_TASK, Box::new(task)); } Err(e) => { self.link.send_message(Msg::LinesUpdateError(anyhow!( "Failed to request line detections: {}", e ))); } }; match self .rover_service .get_obstacles(self.link.callback(|r| match r { Ok(os) => Msg::ObstaclesUpdate(os), Err(e) => Msg::ObstaclesUpdateError(e), })) { Ok(task) => { self.backend_tasks.insert(GET_OBSTACLES_TASK, Box::new(task)); } Err(e) => { self.link.send_message(Msg::ObstaclesUpdateError(anyhow!( "Failed to request line detections: {}", e ))); } }; false } } impl Component for App { type Message = Msg; type Properties = (); fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self { let state = State::default(); let style = Style::create( "App", r" width: 100%; height: 100%; display: flex; flex-direction: column; justify-content: center; align-items: center; .error { color: red; } .controls { display: flex; flex-direction: row; justify-content: space-between; box-sizing: border-box; width: 100%; padding: 10px 20px; position: fixed; bottom: 0; } .controls>div { width: 50%; display: flex; flex-direction: column; justify-content: space-between; align-items: center; } .controls>div>h5 { margin: 10px auto; text-align: center; } ", ) .unwrap(); let rover_service = RoverService::new("http://rover/api"); let sensor_update_handle = TimeoutService::spawn( Duration::from_secs(1), link.callback(|_| Msg::RequestSensors), ); let mut web_tasks = HashMap::<&str, Box<dyn Task>>::new(); web_tasks.insert(REQUEST_SENSORS_TASK, Box::new(sensor_update_handle)); trace!("Created."); App { link, state, style, rover_service, backend_tasks: web_tasks, } } fn update(&mut self, msg: Self::Message) -> ShouldRender { debug!("Processing message: {:#?}", msg); let should_render = match msg { Msg::RequestSensors => self.request_sensors_update(), Msg::LinesUpdate(ls) => self.update_lines_state(ls), Msg::LinesUpdateError(e) => self.update_lines_error(e), Msg::ObstaclesUpdate(os) => self.update_obstacles_state(os), Msg::ObstaclesUpdateError(e) => self.update_obstacles_error(e), Msg::DistanceUpdate(d) => self.update_distance(d), Msg::DistanceUpdateError(e) => self.update_distance_error(e), Msg::SensorDirectionUpdate(sd) => self.update_sensor_direction(sd), Msg::SensorDirectionUpdateError(e, sd) => self.update_sensor_direction_error(e, sd), Msg::MoveDirectionUpdate(md) => self.update_move_direction(md), }; trace!( "{} re-render.", if should_render { "Skipping" } else { "Will" } ); true } fn change(&mut self, _props: Self::Properties) -> ShouldRender { false } fn view(&self) -> Html { trace!("Rendering."); let mut extra_messages: Vec<String> = vec![]; if let Some(ref distance_err) = self.state.distance_error { extra_messages.push(format!("Distance/{}", distance_err)); } if let Some(ref obstactles_err) = self.state.obstacles_error { extra_messages.push(format!("Obstacles/{}", obstactles_err)); } if let Some(ref look_err) = self.state.sensor_direction_error { extra_messages.push(format!("Sensors/{}", look_err)) } return html! { <div class=self.style.clone()> <SensorsData left_obstacle={self.state.obstacles.get(0).unwrap_or(&false)} right_obstacle={self.state.obstacles.get(1).unwrap_or(&false)} distance={self.state.distance} messages={extra_messages} /> <div class="controls"> <div> <h5>{"Sensor Direction"}</h5> {self.current_sensor_direction()} <DirectionControl controller_id="sensor" control_mode={DirectionControlMode::Multidirectional} module_mode={DirectionModuleMode::Cumulative} on_direction_change=self.link.callback(|dir| Msg::SensorDirectionUpdate(dir)) size={50} /> </div> <div> <h5>{"Move Control"}</h5> {self.current_move_direction()} <DirectionControl controller_id="platform" on_direction_change=self.link.callback(|dir| Msg::MoveDirectionUpdate(dir)) size={50} x_step={10} y_step={10} xinc_title="↻" xdec_title="↺" has_reset={true} /> </div> </div> </div> }; } }
use apllodb_server::{ApllodbCommandSuccess, ApllodbServer}; use apllodb_shared_components::Session; use self::step_res::StepRes; pub(crate) mod step_res; pub(crate) mod steps; #[derive(Debug)] pub struct Step { sql: String, expected: StepRes, } impl Step { pub fn new(sql: impl Into<String>, expected: StepRes) -> Self { Self { sql: sql.into(), expected, } } pub(super) async fn run(&self, server: &ApllodbServer, session: Session) -> Session { match server.command(session, self.sql.to_string()).await { Ok(success) => match success { ApllodbCommandSuccess::QueryResponse { session: sess, records, } => { match &self.expected { StepRes::OkQuery(f) => f(records).unwrap_or_else(|e| { panic!("closure in StepRes::OkQuery caused error: {:#?}", e) }), StepRes::Ok => { panic!( "use StepRes::OkQuery for Step with SELECT SQL - step: {:#?}", self ) } StepRes::Err(_) => { panic!("SELECT SQL has unexpectedly succeeded - step: {:#?}", self) } } Session::from(sess) } ApllodbCommandSuccess::ModificationResponse { session } | ApllodbCommandSuccess::DdlResponse { session } | ApllodbCommandSuccess::BeginTransactionResponse { session } => { match &self.expected { StepRes::Ok => {} StepRes::OkQuery(_) => { panic!( "StepRes::OkQuery is only for SELECT SQL - step: {:#?}", self ) } StepRes::Err(_) => { panic!("SQL has unexpectedly succeeded - step: {:#?}", self) } } Session::from(session) } ApllodbCommandSuccess::CreateDatabaseResponse { session } => session, ApllodbCommandSuccess::UseDatabaseResponse { session } | ApllodbCommandSuccess::TransactionEndResponse { session } => { Session::from(session) } }, Err(sess_err) => { let e = sess_err.err; match &self.expected { StepRes::Err(kind) => { assert_eq!( kind, e.kind(), "\nexpected {:?} but got {:?} (got error detail follows)\n{:#?}\n", kind, e.kind(), e ); } _ => panic!( "unexpected error {} on ApllodbServer::command() - step: {:#?}", e, self ), } sess_err.session } } } }
//! # chan_downloader //! //! `chan_downloader` is a collection of utilities to //! download images/webms from a 4chan thread use log::info; use reqwest::{Client, Error}; use std::{ fs::File, io::{self, Cursor}, }; /// Represents a 4chan thread #[derive(Debug)] pub struct Thread { pub board: String, pub id: u32, } #[derive(Debug)] pub struct Link { pub url: String, pub name: String, } /// Saves the image from the url to the given path. /// Returns the path on success /// /// # Examples /// /// ``` /// use reqwest::Client; /// use std::{env, fs::remove_file}; /// let client = Client::builder().user_agent("reqwest").build().unwrap(); /// let workpath = env::current_dir().unwrap().join("1489266570954.jpg"); /// let url = "https://i.4cdn.org/wg/1489266570954.jpg"; /// async { /// let answer = chan_downloader::save_image(url, workpath.to_str().unwrap(), &client) /// .await /// .unwrap(); /// assert_eq!(workpath.to_str().unwrap(), answer); /// remove_file(answer).unwrap(); /// }; /// ``` pub async fn save_image(url: &str, path: &str, client: &Client) -> Result<String, Error> { info!(target: "image_events", "Saving image to: {}", path); let response = client.get(url).send().await?; if response.status().is_success() { let mut dest = File::create(path).unwrap(); let mut content = Cursor::new(response.bytes().await?); io::copy(&mut content, &mut dest).unwrap(); } info!("Saved image to: {}", path); Ok(String::from(path)) } /// Returns the page content from the given url. /// /// # Examples /// /// ``` /// use reqwest::Client; /// use std::io; /// let client = Client::builder().user_agent("reqwest").build().unwrap(); /// let url = "https://raw.githubusercontent.com/mariot/chan-downloader/master/.gitignore"; /// async { /// let result = chan_downloader::get_page_content(url, &client) /// .await /// .unwrap(); /// assert_eq!(result, "/target/\nCargo.lock\n**/*.rs.bk\n"); /// }; /// ``` pub async fn get_page_content(url: &str, client: &Client) -> Result<String, Error> { info!(target: "page_events", "Loading page: {}", url); let response = client.get(url).send().await?; let content = response.text().await?; info!("Loaded page: {}", url); Ok(content) } /// Returns the board name and thread id. /// /// # Examples /// /// ``` /// let url = "https://boards.4chan.org/wg/thread/6872254"; /// let thread = chan_downloader::get_thread_info(url); /// /// assert_eq!(thread.board, "wg"); /// assert_eq!(thread.id, 6872254); /// ``` #[must_use] pub fn get_thread_info(url: &str) -> Thread { info!(target: "thread_events", "Getting thread info from: {}", url); let url_vec: Vec<&str> = url.split('/').collect(); let board_name = url_vec[3]; let thread_vec: Vec<&str> = url_vec[5].split('#').collect(); let thread_id = thread_vec[0]; info!("Got thread info from: {}", url); Thread { board: board_name.to_owned(), id: thread_id.parse::<u32>().expect("failed to parse thread id"), } } /// Returns the links and the number of links from a page. /// Note that the links are doubled /// /// # Examples /// /// ``` /// use reqwest::Client; /// let client = Client::builder().user_agent("reqwest").build().unwrap(); /// let url = "https://boards.4chan.org/wg/thread/6872254"; /// async { /// match chan_downloader::get_page_content(url, &client).await { /// Ok(page_string) => { /// let links_iter = chan_downloader::get_image_links(page_string.as_str()); /// /// for link in links_iter { /// println!("{} and {}", link.name, link.url); /// } /// }, /// Err(err) => eprintln!("Error: {}", err), /// } /// }; /// ``` /// /// Sample image links: // - https://img.4plebs.org/boards/x/image/1660/66/1660662319160984.png // - https://i.4cdn.org/sp/1661019073822058.jpg #[must_use] pub fn get_image_links(page_content: &str) -> Vec<Link> { info!(target: "link_events", "Getting image links"); let reg = regex!( r"(//i(?:s|mg)?(?:\d*)?\.(?:4cdn|4chan|4plebs)\.org/(?:\w+/){1,3}(?:\d+/){0,2}(\d+\.(?:jpg|png|gif|webm)))" ); let links_iter = reg.captures_iter(page_content); let number_of_links = reg.captures_iter(page_content).count() / 2; info!("Got {} image links from page", number_of_links); let mut links_v: Vec<Link> = Vec::new(); for cap in links_iter.step_by(2) { links_v.push(Link { url: String::from(&cap[1]), name: String::from(&cap[2]), }); } links_v } /// Initialize a [`Regex`] once #[macro_export] macro_rules! regex { ($re:expr $(,)?) => {{ static RE: once_cell::sync::OnceCell<regex::Regex> = once_cell::sync::OnceCell::new(); RE.get_or_init(|| regex::Regex::new($re).unwrap()) }}; } #[cfg(test)] mod tests { use super::*; use reqwest::Client; #[test] fn it_gets_4chan_thread_info() { let url = "https://boards.4chan.org/wg/thread/6872254"; let thread = get_thread_info(url); assert_eq!(thread.board, "wg"); assert_eq!(thread.id, 6872254); } #[test] fn it_gets_4plebs_thread_info() { let url = "https://archive.4plebs.org/x/thread/32661196"; let thread = get_thread_info(url); assert_eq!(thread.board, "x"); assert_eq!(thread.id, 32661196); } #[test] fn it_gets_4chan_image_links() { let links_iter = get_image_links( r#" <a href="//i.4cdn.org/wg/1489266570954.jpg" target="_blank">stickyop.jpg</a> <a href="//i.4cdn.org/wg/1489266570954.jpg" target="_blank">stickyop.jpg</a> "#, ); for link in links_iter { assert_eq!(link.url, "//i.4cdn.org/wg/1489266570954.jpg"); assert_eq!(link.name, "1489266570954.jpg"); } } #[test] fn it_gets_4plebs_image_links() { let links_iter = get_image_links( r#" <a href="https://img.4plebs.org/boards/x/image/1660/66/1660662319160984.png" target="_blank"></a> <a href="https://img.4plebs.org/boards/x/image/1660/66/1660662319160984.png" target="_blank"></a> "#, ); for link in links_iter { assert_eq!(link.url, "//img.4plebs.org/boards/x/image/1660/66/1660662319160984.png"); assert_eq!(link.name, "1660662319160984.png"); } } #[tokio::test] async fn it_gets_page_content() { let client = Client::builder().user_agent("reqwest").build().unwrap(); let url = "https://raw.githubusercontent.com/mariot/chan-downloader/master/.gitignore"; let result = get_page_content(url, &client).await.unwrap(); assert_eq!(result, "/target/\nCargo.lock\n**/*.rs.bk\n.idea/"); } #[tokio::test] async fn it_saves_4chan_image() { use std::{env, fs}; let client = Client::builder().user_agent("reqwest").build().unwrap(); let workpath = env::current_dir().unwrap().join("1489266570954.jpg"); let url = "https://i.4cdn.org/wg/1489266570954.jpg"; let answer = save_image(url, workpath.to_str().unwrap(), &client) .await .unwrap(); assert_eq!(workpath.to_str().unwrap(), answer); fs::remove_file(answer).unwrap(); } #[tokio::test] async fn it_saves_4plebs_image() { use std::{env, fs}; let client = Client::builder().user_agent("reqwest").build().unwrap(); let workpath = env::current_dir().unwrap().join("1614942709612.jpg"); let url = "https://img.4plebs.org/boards/x/image/1614/94/1614942709612.jpg"; let answer = save_image(url, workpath.to_str().unwrap(), &client) .await .unwrap(); assert_eq!(workpath.to_str().unwrap(), answer); fs::remove_file(answer).unwrap(); } }
#[doc = "Reader of register CH6_TOP"] pub type R = crate::R<u32, super::CH6_TOP>; #[doc = "Writer for register CH6_TOP"] pub type W = crate::W<u32, super::CH6_TOP>; #[doc = "Register CH6_TOP `reset()`'s with value 0xffff"] impl crate::ResetValue for super::CH6_TOP { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0xffff } } #[doc = "Reader of field `CH6_TOP`"] pub type CH6_TOP_R = crate::R<u16, u16>; #[doc = "Write proxy for field `CH6_TOP`"] pub struct CH6_TOP_W<'a> { w: &'a mut W, } impl<'a> CH6_TOP_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff); self.w } } impl R { #[doc = "Bits 0:15"] #[inline(always)] pub fn ch6_top(&self) -> CH6_TOP_R { CH6_TOP_R::new((self.bits & 0xffff) as u16) } } impl W { #[doc = "Bits 0:15"] #[inline(always)] pub fn ch6_top(&mut self) -> CH6_TOP_W { CH6_TOP_W { w: self } } }
#[macro_use] extern crate log; use async_pq::Client; use async_std::task; use futures::future::join_all; use std::{thread, time}; fn main() { env_logger::init(); let st = time::Instant::now(); let conc = 10; let client = Client::new("postgresql://myuser:secret@localhost:15432/mydb", conc).unwrap(); let mut futures = vec![]; for i in 0..conc { let pool = client.pool.clone(); futures.push(query_something(pool, i)) } task::block_on(async { let res = join_all(futures).await; info!("{:?}", res); }); info!("All finished"); info!("Elapsed: {:?}", st.elapsed()); thread::sleep(time::Duration::from_secs(3600)) } async fn query_something(p: async_pq::Pool, i: usize) -> Result<(), Box<dyn std::error::Error>> { debug!("Futures: {} getting connection", i); let mut conn = p.get_conn().await?; Ok(()) }
/* * Binomial heap test (Rust) * * Copyright (c) 2022 Project Nayuki. (MIT License) * https://www.nayuki.io/page/binomial-heap * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * - The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * - The Software is provided "as is", without warranty of any kind, express or * implied, including but not limited to the warranties of merchantability, * fitness for a particular purpose and noninfringement. In no event shall the * authors or copyright holders be liable for any claim, damages or other * liability, whether in an action of contract, tort or otherwise, arising from, * out of or in connection with the Software or the use or other dealings in the * Software. */ extern crate rand; use rand::distributions::IndependentSample; use rand::distributions::range::Range; mod binomialheap; use binomialheap::BinomialHeap; fn main() { test_size_1(); test_size_2(); test_size_7(); test_iterator(); test_against_vec_randomly(); test_against_rust_binary_heap_randomly(); println!("Test passed"); } fn test_size_1() { let mut h = BinomialHeap::<i32>::new(); h.push(3); h.check_structure(); assert_eq!(h.len(), 1); assert_eq!(*h.peek().unwrap(), 3); assert_eq!(h.pop(), Some(3)); h.check_structure(); assert_eq!(h.len(), 0); } fn test_size_2() { let mut h = BinomialHeap::<i32>::new(); h.push(4); h.push(2); h.check_structure(); assert_eq!(h.len(), 2); assert_eq!(*h.peek().unwrap(), 2); assert_eq!(h.pop(), Some(2)); h.check_structure(); assert_eq!(h.len(), 1); assert_eq!(*h.peek().unwrap(), 4); assert_eq!(h.pop(), Some(4)); h.check_structure(); assert_eq!(h.len(), 0); } fn test_size_7() { let mut h = BinomialHeap::<i32>::new(); h.push(2); h.push(7); h.push(1); h.push(8); h.push(3); h.check_structure(); h.push(1); h.push(4); h.check_structure(); assert_eq!(h.len(), 7); assert_eq!(h.pop(), Some(1)); assert_eq!(h.len(), 6); assert_eq!(h.pop(), Some(1)); assert_eq!(h.len(), 5); assert_eq!(h.pop(), Some(2)); assert_eq!(h.len(), 4); assert_eq!(h.pop(), Some(3)); assert_eq!(h.len(), 3); h.check_structure(); assert_eq!(h.pop(), Some(4)); assert_eq!(h.len(), 2); assert_eq!(h.pop(), Some(7)); assert_eq!(h.len(), 1); assert_eq!(h.pop(), Some(8)); assert_eq!(h.len(), 0); h.check_structure(); } fn test_iterator() { const SEQUENCE: &[i32] = &[ 21, 62, 99, 22, 26, 48, 25, 33, 85, 58, 57, 31, 47, 32, 59, 41, 27, 42, 95, 94, 67, 90, 80, 52, 50, 13, 17, 19, 66, 11, 44, 51, 68, 89, 64, 53, 65, 49, 34, 39, 16, 37, 71, 96, 74, 46, 18, 36, 30, 38, 75, 61, 29, 84, 87, 73, 15, 40, 82, 83, 63, 86, 54, 77, 55, 14, 70, 45, 92, 93, 43, 12, 24, 35, 23, 91, 10, 76, 98, 69, 28, 88, 81, 79, 72, 97, 78, 56, 60, 20, ]; for i in 0 .. SEQUENCE.len() { let mut vec: Vec<i32> = SEQUENCE[ .. i].to_vec(); let mut heap = BinomialHeap::<i32>::new(); for &val in &vec { heap.push(val); } vec.sort(); let mut iter0 = vec.into_iter(); let mut iter1 = heap.into_iter(); loop { match (iter0.next(), iter1.next()) { (None, None) => break, (Some(x), Some(y)) => assert_eq!(x, y), _ => panic!(), } } } } fn test_against_vec_randomly() { let trials = 10_000; let maxsize: usize = 1000; let range: i32 = 1000; let rng = &mut rand::thread_rng(); let sizedist = Range::new(0, maxsize); let valuedist = Range::new(0, range); let mut heap = BinomialHeap::<i32>::new(); for _ in 0 .. trials { let size = sizedist.ind_sample(rng); let mut values = Vec::<i32>::with_capacity(size); for _ in 0 .. size { let val = valuedist.ind_sample(rng); values.push(val); heap.push(val); } values.sort(); for val in values { assert_eq!(heap.pop(), Some(val)); } heap.clear(); } } fn test_against_rust_binary_heap_randomly() { let trials = 100_000; let iterops: usize = 100; let range: i32 = 10_000; let rng = &mut rand::thread_rng(); let opcountdist = Range::new(1, iterops + 1); let valuedist = Range::new(0, range); let mut heap = BinomialHeap::<i32>::new(); let mut queue = std::collections::binary_heap::BinaryHeap::<i32>::new(); let mut size: usize = 0; for _ in 0 .. trials { let op = Range::new(0, 100).ind_sample(rng); if op < 1 { // Clear heap.check_structure(); for _ in 0 .. size { assert_eq!(queue.pop().map(|x| -x), heap.pop()); } size = 0; } else if op < 2 { // Peek heap.check_structure(); assert_eq!(queue.peek().map(|x| -x), heap.peek().cloned()); } else if op < 70 { // Enqueue/merge let merge = !(op < 60); let mut temp = BinomialHeap::<i32>::new(); let n = opcountdist.ind_sample(rng); for _ in 0 .. n { let val = valuedist.ind_sample(rng); queue.push(-val); if merge { temp.push(val); } else { heap.push(val); } } if merge { heap.merge(temp); } size += n; } else if op < 100 { // Dequeue let n = std::cmp::min(opcountdist.ind_sample(rng), size); for _ in 0 .. n { assert_eq!(queue.pop().map(|x| -x), heap.pop()); } size -= n; } else { unreachable!(); } assert_eq!(queue.len(), size); assert_eq!(heap.len(), size); assert_eq!(queue.is_empty(), size == 0); assert_eq!(heap.is_empty(), size == 0); } }
use super::*; #[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] #[repr(transparent)] pub struct SoundControl(u16); impl SoundControl { const_new!(); bitfield_int!(u16; 0..=2: u16, right_volume, with_right_volume, set_right_volume); bitfield_int!(u16; 4..=6: u16, left_volume, with_left_volume, set_left_volume); bitfield_bool!(u16; 8, tone1_right, with_tone1_right, set_tone1_right); bitfield_bool!(u16; 9, tone2_right, with_tone2_right, set_tone2_right); bitfield_bool!(u16; 10, wave_right, with_wave_right, set_wave_right); bitfield_bool!(u16; 11, noise_right, with_noise_right, set_noise_right); bitfield_bool!(u16; 12, tone1_left, with_tone1_left, set_tone1_left); bitfield_bool!(u16; 13, tone2_left, with_tone2_left, set_tone2_left); bitfield_bool!(u16; 14, wave_left, with_wave_left, set_wave_left); bitfield_bool!(u16; 15, noise_left, with_noise_left, set_noise_left); }
// // Copyright 2020 The Project Oak Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // use crate::{ io::{channel_create, ReceiverExt, SenderExt}, node::{http::util::Pipe, CreatedNode, Node, NodeIsolation}, permissions::PermissionsConfiguration, proto::oak::invocation::{HttpInvocation, HttpInvocationSender}, NodePrivilege, RuntimeProxy, }; use log::{error, info}; use maplit::hashmap; use oak_abi::{ label::{confidentiality_label, public_key_identity_tag, tls_endpoint_tag, Label}, proto::oak::application::{ node_configuration::ConfigType, ApplicationConfiguration, HttpClientConfiguration, HttpServerConfiguration, NodeConfiguration, }, OakStatus, }; use oak_io::{handle::ReadHandle, OakError, Receiver}; use oak_services::proto::oak::encap::{HttpRequest, HttpResponse}; use prost::Message; use std::{fs, io, sync::mpsc}; use tokio::sync::oneshot; const LOCAL_CA: &str = "../examples/certs/local/ca.pem"; const GCP_CA: &str = "../examples/certs/gcp/ca.pem"; /// A router node that creates a per-request [`EchoNode`] for each incoming request. struct RouterNode; impl Node for RouterNode { fn node_type(&self) -> &'static str { "test-router" } fn isolation(&self) -> NodeIsolation { // Even though this node is not actually sandboxed, we are simulating a Wasm node during // testing. NodeIsolation::Sandboxed } fn run( self: Box<Self>, runtime: RuntimeProxy, handle: oak_abi::Handle, _notify_receiver: oneshot::Receiver<()>, ) { // Get invocation message that contains the response_writer handle. let invocation_receiver = Receiver::<HttpInvocation>::new(ReadHandle { handle }); while let Ok(invocation) = invocation_receiver.receive(&runtime) { // Compare the request and response labels. If the echo-node (which gets the // request-label) can write to the response-channel, let the `EchoNode` send the // response. Otherwise, the RouterNode should send the response to the caller. let request_label = invocation .clone() .receiver .unwrap() .label(&runtime) .unwrap(); let response_label = invocation.clone().sender.unwrap().label(&runtime).unwrap(); let can_reply = request_label.flows_to(&response_label); let echo_node = EchoNode { can_reply }; // Create a public init channel to send the invocation to the `EchoNode`. let (echo_sender, echo_receiver) = channel_create(&runtime, "echo-init", &Label::public_untrusted()) .expect("Couldn't create invocation channel"); // Send the newly created invocation to the request channel. echo_sender.send(invocation.clone(), &runtime).unwrap(); if let Err(error) = echo_sender.close(&runtime) { panic!("Couldn't close the `invocation_sender` channel: {}", error); } runtime .node_register( CreatedNode { instance: Box::new(echo_node), privilege: NodePrivilege::default(), }, "echo_node", &request_label, echo_receiver.handle.handle, ) .unwrap(); if !can_reply { // If the `EchoNode` cannot respond, send a 200 (OK) response to the user. let resp = HttpResponse { body: vec![], status: http::status::StatusCode::OK.as_u16() as i32, headers: None, }; invocation .sender .expect("Empty sender on invocation.") .send(resp, &runtime) .unwrap(); } } } } /// A simple Oak node that responds with 200 (OK) to every request it receives, and echos the /// request body and headers in the response. struct EchoNode { can_reply: bool, } impl Node for EchoNode { fn node_type(&self) -> &'static str { "test-echo" } fn isolation(&self) -> NodeIsolation { // Even though this node is not actually sandboxed, we are simulating a Wasm node during // testing. NodeIsolation::Sandboxed } fn run( self: Box<Self>, runtime: RuntimeProxy, handle: oak_abi::Handle, _notify_receiver: oneshot::Receiver<()>, ) { let invocation_receiver = Receiver::<HttpInvocation>::new(ReadHandle { handle }); if let Ok(invocation) = invocation_receiver.receive(&runtime) { let request = invocation .receiver .unwrap() .receive(&runtime) .expect("Couldn't receive the request"); info!("Got the request: {:?}", request); if self.can_reply { let resp = HttpResponse { body: request.body, status: http::status::StatusCode::OK.as_u16() as i32, headers: request.headers, }; invocation .sender .expect("Empty sender on invocation.") .send(resp, &runtime) .unwrap(); } } } } struct HttpServerTester { runtime: RuntimeProxy, } impl HttpServerTester { /// Test setup. Creates an Oak runtime, a test HTTP server node on the given port, and an Oak /// node simulator thread. fn new(port: u32) -> HttpServerTester { let runtime = create_runtime(get_permissions()); let invocation_receiver = create_server_node(&runtime, port).expect("Couldn't create HTTP server node!"); let _ = env_logger::builder().is_test(true).try_init(); // Create an Oak node that responds with 200 (OK) to every request it receives. runtime .node_register( CreatedNode { instance: Box::new(RouterNode), privilege: NodePrivilege::default(), }, "oak_node_for_test", &Label::public_untrusted(), invocation_receiver.handle.handle, ) .expect("Couldn't create Oak node!"); HttpServerTester { runtime } } fn cleanup(&mut self) { // stop the runtime and any servers it is running self.runtime.runtime.stop(); } } fn init_logger() { // Ignore the result. We don't want to panic if the logger cannot be initialized, or is being // initialized more than once. Also, if the logger is not initialized, we cannot log an // error! let _res = env_logger::builder().is_test(true).try_init(); } #[cfg(not(feature = "oak-unsafe"))] #[test] fn test_cannot_create_server_node_if_not_permitted() { init_logger(); let runtime = create_runtime(PermissionsConfiguration::default()); let result = create_server_node(&runtime, 8080); assert!(result.is_err()) } #[cfg(not(feature = "oak-unsafe"))] #[test] fn test_cannot_create_insecure_http_client_node_if_not_permitted() { init_logger(); let runtime = create_runtime(PermissionsConfiguration::default()); let result = create_client_node(&runtime, "".to_string()); assert!(result.is_err()) } #[tokio::test] async fn test_https_server_can_serve_https_requests() { init_logger(); // Start a runtime with an HTTP server node, and a thread simulating an Oak node to respond to // HTTP requests. let mut http_server_tester = HttpServerTester::new(2525); let client_with_valid_tls = create_client(LOCAL_CA); // Send an HTTPS request with an empty label, and check that response has StatusCode::OK let resp = send_request( client_with_valid_tls, "https://localhost:2525", create_signature(), Label::public_untrusted(), ) .await; assert!(resp.is_ok()); let resp = resp.unwrap(); assert_eq!(resp.status(), http::status::StatusCode::OK.as_u16()); assert!(resp .headers() .contains_key(oak_abi::OAK_LABEL_HTTP_PROTOBUF_KEY)); // Stop the runtime and the servers http_server_tester.cleanup(); } #[tokio::test] async fn test_https_server_cannot_serve_http_requests() { init_logger(); // Start a runtime with an HTTP server node. The HTTP server in this case rejects the requests, // and would not send anything to the Oak node. let mut http_server_tester = HttpServerTester::new(2526); let client_with_valid_tls = create_client(LOCAL_CA); // Send an HTTP request with empty label, and check that the server responds with an error let resp = send_request( client_with_valid_tls, "http://localhost:2526", create_signature(), Label::public_untrusted(), ) .await; assert!(resp.is_err()); // Stop the runtime and the servers http_server_tester.cleanup(); } #[tokio::test] async fn test_https_server_does_not_terminate_after_a_bad_request() { init_logger(); // Start a runtime with an HTTP server node, and a test Oak node to respond to HTTP requests. let mut http_server_tester = HttpServerTester::new(2527); let client_with_valid_tls = create_client(LOCAL_CA); let client_with_invalid_tls = create_client(GCP_CA); // Send a valid request, making sure that the server is started let resp = send_request( client_with_valid_tls.clone(), "https://localhost:2527", create_signature(), Label::public_untrusted(), ) .await; assert!(resp.is_ok()); // Send an HTTPS request with invalid certificate, and check that the server responds with error let resp = send_request( client_with_invalid_tls, "https://localhost:2527", create_signature(), Label::public_untrusted(), ) .await; assert!(resp.is_err()); // Send another valid request, and check that the server is alive and responsive // let client_with_valid_tls = create_client(LOCAL_CA); let resp = send_request( client_with_valid_tls, "https://localhost:2527", create_signature(), Label::public_untrusted(), ) .await; assert!(resp.is_ok()); // Stop the runtime and the servers http_server_tester.cleanup(); } #[tokio::test] async fn test_https_server_can_serve_https_requests_with_non_empty_request_label() { init_logger(); // Start a runtime with an HTTP server node, and a thread simulating an Oak node to respond to // HTTP requests. let mut http_server_tester = HttpServerTester::new(2528); let client_with_valid_tls = create_client(LOCAL_CA); let label = confidentiality_label(tls_endpoint_tag("localhost")); // Send an HTTPS request, and check that response has StatusCode::OK let resp = send_request( client_with_valid_tls, "https://localhost:2528", create_signature(), label, ) .await; assert!(resp.is_ok()); let resp = resp.unwrap(); assert_eq!(resp.status(), http::status::StatusCode::OK.as_u16()); // Stop the runtime and the servers http_server_tester.cleanup(); } #[tokio::test] async fn test_https_server_can_serve_https_requests_with_user_identity_as_request_label() { init_logger(); // Start a runtime with an HTTP server node, and a thread simulating an Oak node to respond to // HTTP requests. let mut http_server_tester = HttpServerTester::new(2529); let client_with_valid_tls = create_client(LOCAL_CA); let signature = create_signature(); let label = confidentiality_label(public_key_identity_tag(&signature.clone().public_key)); // Send an HTTPS request, and check that response has StatusCode::OK let resp = send_request( client_with_valid_tls, "https://localhost:2529", signature, label, ) .await; assert!(resp.is_ok()); let resp = resp.unwrap(); assert_eq!(resp.status(), http::status::StatusCode::OK.as_u16()); assert!(resp .headers() .contains_key(oak_abi::OAK_LABEL_HTTP_PROTOBUF_KEY)); // Stop the runtime and the servers http_server_tester.cleanup(); } #[test] fn test_https_client_can_handle_https_requests_to_an_external_service() { init_logger(); let runtime = create_runtime(get_permissions()); // Create an HTTP client pseudo-node to serve requests to `https://www.google.com/`. A valid // authority is expected in the URI, so we cannot instead start a server on localhost and use // that as the external service. The HTTP client pseudo-node is created with a communication // channel. A handle to the read half of the channel is returned. The Oak node will use it // to fetch the write handle to an invocation channel for sending the invocations to the // HTTP client pseudo-node. let authority = "www.google.com"; let oak_node_init_receiver = create_client_node(&runtime, authority.to_string()) .expect("Couldn't create HTTP client node!"); // Create a sync_channel to be notified when the Oak Node is completed. let (result_sender, result_receiver) = mpsc::sync_channel(1); // Create a test Oak node that sends requests to the HTTP client pseudo-node. let client_test_node = ClientTesterNode { uri: "https://www.google.com/".to_string(), result_sender, authority: authority.to_string(), }; // Register the test Oak node in the runtime. runtime .node_register( CreatedNode { instance: Box::new(client_test_node), // The node requires this privilege to be able to create a [`Pipe`] for interaction // with the HTTP client pseudo-node. In a more realistic scenario, // such a node should not have a privilege like this, and the // [`Pipe`] has to be created by the planner node. privilege: crate::node::http::client::get_privilege(authority), }, "client_tester_node", &confidentiality_label(tls_endpoint_tag(authority)), oak_node_init_receiver.handle.handle, ) .unwrap(); // Wait for the test Node to complete execution before terminating the Runtime. let result_value = result_receiver.recv().expect("test node disconnected"); assert!(result_value.is_ok()); let resp = result_value.unwrap(); // The test uses a self signed certificate. So, we expect an error that is converted to an // INTERNAL_SERVER_ERROR response in the HTTP client node. assert_eq!( resp.status, http::StatusCode::INTERNAL_SERVER_ERROR.as_u16() as i32 ); runtime.runtime.stop(); } #[test] fn test_https_client_can_handle_http_requests_to_an_external_service() { init_logger(); let runtime = create_runtime(get_permissions()); // This is a public node, so the authority can be empty. let empty_authority = "".to_string(); // Create an HTTP client pseudo-node to serve `HTTP` requests`. let oak_node_init_receiver = create_client_node(&runtime, empty_authority).expect("Couldn't create HTTP client node!"); // Create a sync_channel to be notified when the Oak Node is completed. let (result_sender, result_receiver) = mpsc::sync_channel(1); // Create a test Oak node that sends requests to the HTTP client pseudo-node. let client_test_node = ClientTesterNode { uri: "http://www.google.com".to_string(), result_sender, authority: "".to_string(), }; // Register the test Oak node in the runtime. runtime .node_register( CreatedNode { instance: Box::new(client_test_node), privilege: NodePrivilege::default(), }, "client_tester_node", &Label::public_untrusted(), oak_node_init_receiver.handle.handle, ) .unwrap(); // Wait for the test Node to complete execution before terminating the Runtime. let result_value = result_receiver.recv().expect("test node disconnected"); assert!(result_value.is_ok()); let resp = result_value.unwrap(); assert_eq!(resp.status, http::StatusCode::OK.as_u16() as i32); runtime.runtime.stop(); } fn create_runtime(permissions: PermissionsConfiguration) -> RuntimeProxy { let configuration = ApplicationConfiguration { wasm_modules: hashmap! {}, initial_node_configuration: None, module_signatures: vec![], }; let tls_config = crate::tls::TlsConfig::new( "../examples/certs/local/local.pem", "../examples/certs/local/local.key", ) .expect("Couldn't create TLS config from local certs."); let secure_server_config = crate::SecureServerConfiguration { grpc_config: None, http_config: Some(crate::HttpConfiguration { tls_config, http_client_root_tls_certificate: crate::tls::Certificate::parse( include_bytes!("../../../../examples/certs/local/ca.pem").to_vec(), ) .ok(), }), }; let signature_table = crate::SignatureTable::default(); info!("Create runtime for test"); crate::RuntimeProxy::create_runtime( &configuration, &permissions, &secure_server_config, &signature_table, None, ) } fn get_permissions() -> PermissionsConfiguration { PermissionsConfiguration { allow_http_server_nodes: true, allow_insecure_http_egress: true, allow_egress_https_authorities: vec!["www.google.com".to_string()], ..Default::default() } } fn create_server_node( runtime: &RuntimeProxy, port: u32, ) -> Result<Receiver<HttpInvocation>, OakStatus> { let (init_receiver, invocation_receiver) = create_communication_channel(runtime); let server_config = NodeConfiguration { config_type: Some(ConfigType::HttpServerConfig(HttpServerConfiguration { address: format!("[::]:{}", port), })), }; // TODO(#1631): When we have a separate top for each sub-lattice, this should be changed to // the top of the identity sub-lattice. let top_label = oak_abi::label::confidentiality_label(oak_abi::label::top()); runtime.node_create( "test_server", &server_config, &top_label, init_receiver.handle.handle, )?; Ok(invocation_receiver) } fn create_communication_channel( runtime: &RuntimeProxy, ) -> (Receiver<HttpInvocationSender>, Receiver<HttpInvocation>) { // Create channel: one end to server_node::run; the other to the Oak node. let (init_sender, init_receiver) = channel_create::<HttpInvocationSender>( runtime, "HTTP server init", &Label::public_untrusted(), ) .expect("Couldn't create channel"); // At the start the HTTP server pseudo-Node expects to receive an invocation channel, with // exactly one handle in it. // // Create a channel for receiving invocations to pass to the HTTP server pseudo-Node. let (invocation_sender, invocation_receiver) = channel_create::<HttpInvocation>( runtime, "HTTP server invocation", &Label::public_untrusted(), ) .expect("Couldn't create channel"); let http_invocation_sender = HttpInvocationSender { sender: Some(invocation_sender), }; if let Err(error) = init_sender.send(http_invocation_sender, runtime) { panic!("Couldn't write to the `init_sender` channel: {}", error); } if let Err(error) = init_sender.close(runtime) { panic!("Couldn't close the `init_sender` channel: {}", error); } (init_receiver, invocation_receiver) } // Build a TLS client, using the given CA store fn create_client( ca_path: &str, ) -> hyper::client::Client<hyper_rustls::HttpsConnector<hyper::client::HttpConnector>> { let ca_file = fs::File::open(ca_path).unwrap_or_else(|e| panic!("Failed to open {}: {}", ca_path, e)); let mut ca = io::BufReader::new(ca_file); // Build an HTTP connector which supports HTTPS too. let mut http = hyper::client::HttpConnector::new(); http.enforce_http(false); // Build a TLS client, using the custom CA store for lookups. let mut tls = rustls::ClientConfig::new(); tls.root_store .add_pem_file(&mut ca) .expect("Failed to load custom CA store"); // Join the above part into an HTTPS connector. let https = hyper_rustls::HttpsConnector::from((http, tls)); hyper::client::Client::builder().build(https) } fn create_signature() -> oak_abi::proto::oak::identity::SignedChallenge { let key_pair = oak_sign::KeyPair::generate().unwrap(); let signature = oak_sign::SignatureBundle::create(oak_abi::OAK_CHALLENGE.as_bytes(), &key_pair).unwrap(); oak_abi::proto::oak::identity::SignedChallenge { signed_hash: signature.signed_hash, public_key: key_pair.public_key_der().unwrap(), } } async fn send_request( client: hyper::client::Client<hyper_rustls::HttpsConnector<hyper::client::HttpConnector>>, uri: &str, signature: oak_abi::proto::oak::identity::SignedChallenge, request_label: Label, ) -> Result<http::response::Response<hyper::Body>, hyper::Error> { // Send a request, and wait for the response let mut label_bytes = vec![]; if let Err(err) = request_label.encode(&mut label_bytes) { panic!("Failed to encode label: {}", err); } let label_bytes = base64::encode(label_bytes); let mut sig_bytes = vec![]; if let Err(err) = signature.encode(&mut sig_bytes) { panic!("Failed to encode signature: {}", err); } let sig_bytes = base64::encode(sig_bytes); // The client thread may start sending the requests before the server is up. In this case, the // request will be rejected with a "ConnectError". To make the tests are stable, we need to // retry sending the requests until the server is up. To distinguish between these cases and // actual errors (e.g., errors due to invalid TLS certificates), we need to check the cause of // the error. loop { let request = hyper::Request::builder() .method(http::Method::GET) .uri(uri) .header(oak_abi::OAK_LABEL_HTTP_PROTOBUF_KEY, label_bytes.clone()) .header( oak_abi::OAK_SIGNED_CHALLENGE_HTTP_PROTOBUF_KEY, sig_bytes.clone(), ) .body(hyper::Body::empty()) .unwrap(); match client.request(request).await { Ok(reps) => return Ok(reps), Err(error) => { // We cannot access the cause of the error, so we need to check the string instead. let error_str = format!("{:?}", error); // If the cause is `ConnectError` (https://github.com/hyperium/hyper/blob/66fc127c8d4f81aed9300c9d0f13246b8206067a/src/client/connect/http.rs#L392) // it means that a connection to the server cannot be made. Retry sending the // request in this case. if error_str.contains("ConnectError") { continue; } else { return Err(error); } } } } } /// Creates an HTTP client pseudo-node in the given Runtime. fn create_client_node( runtime: &RuntimeProxy, authority: String, ) -> Result<Receiver<HttpInvocationSender>, OakStatus> { let label = if authority.is_empty() { Label::public_untrusted() } else { confidentiality_label(tls_endpoint_tag(&authority)) }; let (init_receiver, invocation_receiver) = create_http_client_communication_channel(runtime); let client_config = NodeConfiguration { config_type: Some(ConfigType::HttpClientConfig(HttpClientConfiguration { authority, })), }; runtime.node_create( "test_http_client", &client_config, &label, invocation_receiver.handle.handle, )?; Ok(init_receiver) } /// An HTTP client pseudo-node needs a channel to read `HttpInvocation`s from. /// We wrap the write half of the channel in an `HttpInvocationSender` and hand it to the Oak node, /// which will use the write half of the channel to send the requests to the HTTP client /// pseudo-node. /// This function creates two channels, and returns their receiver (read) ends. The first receiver /// will be used as the initial handle to the Oak node. The second receiver is the initial handle /// to the HTTP client node. fn create_http_client_communication_channel( runtime: &RuntimeProxy, ) -> (Receiver<HttpInvocationSender>, Receiver<HttpInvocation>) { // Create HttpInvocation channel: The receiver end goes to the Oak node. The other end goes to // the HTTP client pseudo-node. let (init_sender, init_receiver) = channel_create::<HttpInvocationSender>( runtime, "Oak node init", &Label::public_untrusted(), ) .expect("Couldn't create channel"); // Create HttpInvocationSender channel: At the start, the Oak Node expects to receive an // invocation channel, with exactly one handle in it. // // Create a channel for sending invocations to the HTTP client pseudo-Node. let (invocation_sender, invocation_receiver) = channel_create::<HttpInvocation>( runtime, "HTTP client invocation", &Label::public_untrusted(), ) .expect("Couldn't create channel"); let http_invocation_sender = HttpInvocationSender { sender: Some(invocation_sender), }; if let Err(error) = init_sender.send(http_invocation_sender, runtime) { panic!("Couldn't write to the `init_sender` channel: {}", error); } if let Err(error) = init_sender.close(runtime) { panic!("Couldn't close the `init_sender` channel: {}", error); } (init_receiver, invocation_receiver) } /// Struct representing an Oak node that sends requests to an external server via an HTTP client /// pseudo-node, collects the response and sends it back to the test method using a `SyncSender`. struct ClientTesterNode { uri: String, /// SyncSender to send the response from the external service back to the test method. result_sender: mpsc::SyncSender<Result<HttpResponse, OakError>>, authority: String, } impl Node for ClientTesterNode { fn node_type(&self) -> &'static str { "client-tester" } fn isolation(&self) -> NodeIsolation { // Even though this node is not actually sandboxed, we are simulating a Wasm node during // testing. NodeIsolation::Sandboxed } fn run( self: Box<Self>, runtime: RuntimeProxy, handle: oak_abi::Handle, _notify_receiver: oneshot::Receiver<()>, ) { // At start-of-day we need/expect to receive a write handle for an invocation channel // to use for all subsequent activity. let startup_receiver = Receiver::<HttpInvocationSender>::new(ReadHandle { handle }); let invocation_sender = startup_receiver .receive(&runtime) .and_then(|invocation_sender| { invocation_sender .sender .ok_or(OakError::OakStatus(OakStatus::ErrBadHandle)) }) .expect("Failed to retrieve invocation channel write handle"); if let Err(err) = startup_receiver.close(&runtime) { error!( "Failed to close initial inbound channel {}: {:?}", handle, err ); } // create request. // NOTE: Method is case sensitive. let request = HttpRequest { uri: self.uri.clone(), method: http::Method::GET.to_string(), body: vec![], headers: None, }; let label = if self.authority.is_empty() { Label::public_untrusted() } else { confidentiality_label(tls_endpoint_tag(&self.authority)) }; // create channel let pipe = Pipe::new(&runtime, &label, &label).expect("Couldn't create the Pipe"); // send the request on invocation_sender pipe.insert_message(&runtime, request) .expect("Couldn't insert HTTP request in the pipe"); // send the invocation to the HTTP client pseudo-node pipe.send_invocation(&runtime, invocation_sender.handle) .expect("Couldn't send the invocation"); // wait for the response to come let response = pipe.response_receiver.receive(&runtime); pipe.close(&runtime); // notify the test self.result_sender .send(response) .expect("could not send the result"); } }
use crate::chunk::{Chunk, OpCode}; pub fn disassemble_chunk(chunk: &Chunk, name: String) { println!("== {} ==", name); for (index, op_code) in chunk.code.iter().enumerate() { disassemble_instruction(index, op_code) } } fn disassemble_instruction(index: usize, instruction: &OpCode) { println!( // "{:04} L{:03} {:?}", "{:04} {:?}", index, instruction ); }
use std::io; /*str es el tipo nativo de string */ /* no es el objeto y son inmutables*/ fn find_tdl_in_string(string : &String) -> &str { let bytes = string.as_bytes(); for (i, &item) in bytes.iter().enumerate() { if item == b'L' { return &string[0..i+1]; } } &string[..] } fn main() -> io::Result<()> { let mut string = String::from("TDLLLLLLLLLL"); let tdl = find_tdl_in_string(&string); println!("{}", tdl); string.push_str("HOLA"); println!("{}", string); return Ok(()) }
use druid::{FontDescriptor, FontFamily, TextLayout}; pub struct LapceFont { font: FontDescriptor, } impl LapceFont { pub fn new(font_name: &str, size: f64) { let font = FontDescriptor::new(FontFamily::new_unchecked("Cascadia Code")) .with_size(size); let mut text_layout = TextLayout::new("W"); text_layout.set_font(font.clone()); } }
#[macro_use] extern crate log; pub fn log_a() { debug!("a"); } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
struct Space_Station{ } impl Space_Station{ }
use std::iter::{Peekable}; use tokens::*; use position::*; use ast::*; use utils::*; pub fn parse<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut T) -> Tree { let mut peekable = tokens.peekable(); let s = parse_tree(&mut peekable); expect_eof(&mut peekable); s } fn parse_tree<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Tree { match tokens.peek() { Some(&Token::Let(_)) => parse_let(tokens), Some(&Token::If(_)) => parse_if(tokens), Some(&Token::While(_)) => parse_while(tokens), Some(&Token::For(_)) => parse_for(tokens), Some(&Token::LeftBrace(_)) => parse_block(tokens), _ => parse_expr(parse_simple_expr(tokens), tokens) } } fn parse_block<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Tree { let pos = expect_leftbrace(tokens); let mut stmts = Vec::new(); loop { match tokens.peek() { Some(&Token::RightBrace(_)) => { tokens.next(); let len = stmts.len(); return match len { 0 => Tree::Unit(pos), 1 => { match stmts.first() { Some(&Tree::DeclByName(_, _, _)) => Tree::Block(pos, stmts), _ => stmts.pop().unwrap() } } _ => Tree::Block(pos, stmts) } }, _ => stmts.push(parse_tree(tokens)), } } } fn parse_let<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Tree { let pos = expect_let(tokens); let name = expect_ident(tokens); expect_assign(tokens); Tree::DeclByName(pos, name, box_(parse_tree(tokens))) } fn parse_if<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Tree { let pos = expect_if(tokens); let cond = box_(parse_tree(tokens)); let th = box_(parse_tree(tokens)); let el = box_( match tokens.peek().cloned() { Some(Token::Else(_)) => { tokens.next(); parse_tree(tokens) }, _ => Tree::Unit(pos.clone()) }); Tree::If(pos, cond, th, el) } fn parse_while<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Tree { let pos = expect_while(tokens); let cond = box_(parse_tree(tokens)); Tree::While(pos, cond, box_(parse_tree(tokens))) } fn parse_for<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Tree { let pos = expect_for(tokens); let name = expect_ident(tokens); expect_colon(tokens); let lst = box_(parse_tree(tokens)); Tree::ForByName(pos, name, lst, box_(parse_tree(tokens))) } // ----------------------------------------- Tree ----------------------------------------- fn parse_simple_expr<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Tree { let e = match tokens.next() { Some(Token::StrLit(p, val)) => Tree::StrLit(p, val.to_string()), Some(Token::NumLit(p, val)) => Tree::NumLit(p, val.parse().unwrap()), Some(Token::Ident(p, name)) => { let lhs = parse_ident(name, p, tokens); try_parse_def(vec![lhs], tokens) } Some(Token::LeftPar(_)) => { let lst = parse_paren(tokens); expect_rightpar(tokens); try_parse_def(lst, tokens) }, Some(Token::LeftBracket(p)) => { let lst = parse_list(tokens); expect_rightbracket(tokens); Tree::ListLit(p, lst) }, x => fatal_tk("Expected identifier, number or '('", x) }; if let Some(Token::LeftPar(p)) = tokens.peek().cloned() { tokens.next(); let args = parse_paren(tokens); expect_rightpar(tokens); Tree::Call(p.clone(), box_(e), args) } else { e } } fn try_parse_def<'a, T: Iterator<Item = Token<'a>>>(args: Vec<Tree>, tokens: &mut Peekable<T>) -> Tree { let t = tokens.peek().cloned(); if let Some(Token::Arrow(pos)) = t { tokens.next(); let args = args.into_iter().map(|arg| { match arg { Tree::IdentByName(_, n) => n, t => fatal_pos("Expected identifier, got expression", t.position()) } }).collect(); Tree::FuncByName(pos, args, box_(parse_tree(tokens))) } else { match args.len() { 1 => args.into_iter().next().unwrap(), _ => fatal_tk("Expected '=>'", t) } } } fn parse_ident<'a, T: Iterator<Item = Token<'a>>>(name: &str, pos: Position, tokens: &mut Peekable<T>) -> Tree { let tk = tokens.peek().cloned(); if let Some(Token::Assign(p)) = tk { tokens.next(); Tree::AssignByName(p, name.to_owned(), box_(parse_tree(tokens))) } else { Tree::IdentByName(pos, name.to_owned()) } } fn parse_paren<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Vec<Tree> { if let Some(&Token::RightPar(_)) = tokens.peek() { return Vec::new() } let mut lst = Vec::new(); loop { lst.push(parse_tree(tokens)); match tokens.peek().cloned() { Some(Token::Comma(_)) => tokens.next(), Some(Token::RightPar(_)) => return lst, x => fatal_tk("Expected ',' or ')'", x) }; } } fn parse_list<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Vec<Tree> { if let Some(&Token::RightBracket(_)) = tokens.peek() { return Vec::new() } let mut lst = Vec::new(); loop { lst.push(parse_tree(tokens)); match tokens.peek().cloned() { Some(Token::Comma(_)) => tokens.next(), Some(Token::RightBracket(_)) => return lst, x => fatal_tk("Expected ',' or ']'", x) }; } } fn parse_expr<'a, T: Iterator<Item = Token<'a>>>(lhs: Tree, tokens: &mut Peekable<T>) -> Tree { fn is_op(tk: &Option<Token>) -> bool { match tk { &Some(Token::Plus(_)) | &Some(Token::Minus(_)) | &Some(Token::Times(_)) | &Some(Token::Div(_)) => true, &Some(Token::Eq(_)) | &Some(Token::Neq(_)) => true, &Some(Token::Assign(_)) => true, _ => false } } fn create_op(lhs: Tree, rhs: Tree, tk: Option<Token>) -> Tree { match tk { Some(Token::Plus(p)) => Tree::Add(p, box_(lhs), box_(rhs)), Some(Token::Minus(p)) => Tree::Sub(p, box_(lhs), box_(rhs)), Some(Token::Times(p)) => Tree::Mul(p, box_(lhs), box_(rhs)), Some(Token::Div(p)) => Tree::Div(p, box_(lhs), box_(rhs)), Some(Token::Eq(p)) => Tree::Eq(p, box_(lhs), box_(rhs)), Some(Token::Neq(p)) => Tree::Neq(p, box_(lhs), box_(rhs)), Some(Token::Assign(pos)) => { match lhs { Tree::IdentByName(_, name) => Tree::AssignByName(pos, name, box_(rhs)), t => fatal_pos("Expected identifier as left operand of assignation", t.position()) } }, _ => unreachable!() } } fn assoc(tk: &Option<Token>) -> i32 { match tk { &Some(Token::Eq(_)) | &Some(Token::Neq(_)) => 0, &Some(Token::Plus(_)) | &Some(Token::Minus(_)) => 1, &Some(Token::Times(_)) | &Some(Token::Div(_)) => 2, _ => unreachable!() } } let mut first_op = tokens.peek().cloned(); if !is_op(&first_op) { return lhs; } tokens.next(); let mut lhs = lhs; let mut mhs = parse_simple_expr(tokens); loop { let second_op = tokens.peek().cloned(); if is_op(&second_op) { tokens.next(); } else { return create_op(lhs, mhs, first_op) } let rhs = parse_simple_expr(tokens); if assoc(&second_op) > assoc(&first_op) { mhs = create_op(mhs, rhs, second_op); } else { lhs = create_op(lhs, mhs, first_op); mhs = rhs; first_op = second_op; } } } // ----------------------------------------- HELPERS ----------------------------------------- fn expect_ident<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> String { match tokens.next() { Some(Token::Ident(_, name)) => name.to_string(), x => fatal_tk("Expected identifier", x) } } fn expect_let<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Position { match tokens.next() { Some(Token::Let(p)) => p, x => fatal_tk("Expected 'let'", x) } } fn expect_if<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Position { match tokens.next() { Some(Token::If(p)) => p, x => fatal_tk("Expected 'if'", x) } } fn expect_while<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Position { match tokens.next() { Some(Token::While(p)) => p, x => fatal_tk("Expected 'while'", x) } } fn expect_for<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Position { match tokens.next() { Some(Token::For(p)) => p, x => fatal_tk("Expected 'ForByName'", x) } } fn expect_colon<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) { match tokens.next() { Some(Token::Colon(_)) => {}, x => fatal_tk("Expected ':'", x) } } fn expect_rightpar<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) { match tokens.next() { Some(Token::RightPar(_)) => {}, x => fatal_tk("Expected ')'", x) } } fn expect_rightbracket<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) { match tokens.next() { Some(Token::RightBracket(_)) => {}, x => fatal_tk("Expected ']'", x) } } fn expect_leftbrace<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) -> Position { match tokens.next() { Some(Token::LeftBrace(p)) => p, x => fatal_tk("Expected '{{'", x) } } fn expect_assign<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) { match tokens.next() { Some(Token::Assign(_)) => {} x => fatal_tk("Expected '='", x) } } fn expect_eof<'a, T: Iterator<Item = Token<'a>>>(tokens: &mut Peekable<T>) { match tokens.next() { Some(x) => fatal(&format!("Unexpected {:?} at end of stream", x)), None => {} } }
//! Parse HTML documents into [HtmlDocuments](HtmlDocument). //! //! # Example: parse HTML text into a document //! ```rust //! use skyscraper::html::{self, parse::ParseError}; //! # fn main() -> Result<(), ParseError> { //! let html_text = r##" //! <html> //! <body> //! <div>Hello world</div> //! </body> //! </html>"##; //! //! let document = html::parse(html_text)?; //! # Ok(()) //! # } pub mod parse; mod tokenizer; use std::{collections::HashMap, fmt}; use indextree::{Arena, NodeId}; pub use crate::html::parse::parse; lazy_static! { /// List of HTML tags that do not have end tags and cannot have any content. static ref VOID_TAGS: Vec<&'static str> = vec![ "meta", "link", "img", "input", "br", "hr", "col", "area", "base", "embed", "keygen", "param", "source", "track", "wbr" ]; } type TagAttributes = HashMap<String, String>; /// An HTML tag and its attributes. #[derive(Debug, PartialEq, Clone)] pub struct HtmlTag { /// Name of the tag. pub name: String, /// Map of the tag's attributes and their corresponding values. /// Example: Attributes of <div class="hello" id="world"/> pub attributes: TagAttributes, } impl HtmlTag { /// Creates a new tag with the given name and no attributes. pub fn new(name: String) -> HtmlTag { HtmlTag { name, attributes: HashMap::new(), } } /// Gets any direct HtmlNode::Text children and concatenates them into a single string /// separated by a space character if no whitespace already separates them. pub fn get_text(&self, doc_node: &DocumentNode, document: &HtmlDocument) -> Option<String> { self.internal_get_text(doc_node, document, false) } /// Gets all HtmlNode::Text children and concatenates them into a single string separated /// by a space character if no whitespace already separates them. pub fn get_all_text(&self, doc_node: &DocumentNode, document: &HtmlDocument) -> Option<String> { self.internal_get_text(doc_node, document, true) } fn internal_get_text( &self, doc_node: &DocumentNode, document: &HtmlDocument, recurse: bool, ) -> Option<String> { let mut o_text: Option<String> = None; let children = doc_node.children(document); // Iterate through this tag's children for child in children { let child_node = document.get_html_node(&child); if let Some(child_node) = child_node { match child_node { HtmlNode::Text(text) => { // If the child is a text, simply append its text. o_text = Some(HtmlTag::append_text(o_text, text.to_string())); } HtmlNode::Tag(_) => { // If the child is a tag, only append its text if recurse=true was passed, // otherwise skip this node. if recurse { let o_child_text = child_node.internal_get_text(&child, document, true); if let Some(child_text) = o_child_text { o_text = Some(HtmlTag::append_text(o_text, child_text)); } } } } } } o_text } fn append_text(o_text: Option<String>, append_text: String) -> String { match o_text { Some(t) => { // If whitespace is already separating them, do not add another. if t.ends_with(|ch: char| ch.is_whitespace()) || append_text.starts_with(|ch: char| ch.is_whitespace()) { format!("{}{}", t, append_text) } else { format!("{} {}", t, append_text) } } None => append_text, } } } /// An HTML node can be either a tag or raw text. #[derive(Clone)] pub enum HtmlNode { /// An HTML tag. Tag(HtmlTag), /// Text content contained within [HtmlNode::Tag]. /// /// Kept as separate enum value rather than a field on [HtmlTag] so /// that order can be maintained in nodes containing a mix of text /// and tags. /// /// # Example: order of mixed text and tag contents is preserved /// ```html /// <div> /// Hello <span style="bold">world</span>! /// </div> /// ``` /// Where the inner contents of `div` would be: `Text("Hello ")`, `Tag(span)`, `Text("!")`. /// Text(String), } impl HtmlNode { /// Gets any direct HtmlNode::Text children and concatenates them into a single string /// separated by a space character if no whitespace already separates them. pub fn get_text(&self, doc_node: &DocumentNode, document: &HtmlDocument) -> Option<String> { self.internal_get_text(doc_node, document, false) } /// Gets all HtmlNode::Text children and concatenates them into a single string separated /// by a space character if no whitespace already separates them. pub fn get_all_text(&self, doc_node: &DocumentNode, document: &HtmlDocument) -> Option<String> { self.internal_get_text(doc_node, document, true) } /// Gets any direct HtmlNode::Text children and concatenates them into a single string /// separated by a space character if no whitespace already separates them. fn internal_get_text( &self, doc_node: &DocumentNode, document: &HtmlDocument, recurse: bool, ) -> Option<String> { match self { HtmlNode::Tag(tag) => { if recurse { tag.get_all_text(doc_node, document) } else { tag.get_text(doc_node, document) } } HtmlNode::Text(text) => Some(text.to_string()), } } /// Gets attributes. /// If Node is a `Text` return None pub fn get_attributes(&self) -> Option<&TagAttributes> { match self { HtmlNode::Tag(tag) => Some(&tag.attributes), &HtmlNode::Text(_) => None, } } /// If this is node is an [HtmlNode::Tag], unwrap and return it, otherwise panic. /// /// Prefer using a `match`; only use this if you *know* this is a [HtmlNode::Tag], or /// if panicking is ok, such as during testing. pub fn unwrap_tag(&self) -> &HtmlTag { match self { HtmlNode::Tag(tag) => tag, _ => panic!("node is not a tag"), } } /// If this is node is an [HtmlNode::Text], unwrap and return it, otherwise panic. /// /// Prefer using a `match`; only use this if you *know* this is a [HtmlNode::Text], or /// if panicking is ok, such as during testing. pub fn unwrap_text(&self) -> &str { match self { HtmlNode::Text(text) => text, _ => panic!("node is not text"), } } } /// HTML document tree represented by an indextree arena and a root node. /// /// Documents must have a single root node to be valid. #[derive(Clone)] pub struct HtmlDocument { arena: Arena<HtmlNode>, /// The root node of the document. pub root_node: DocumentNode, } impl HtmlDocument { /// Create a new [HtmlDocument] with the given `arena` contents and `root_node`. pub fn new(arena: Arena<HtmlNode>, root_node: DocumentNode) -> HtmlDocument { HtmlDocument { arena, root_node } } /// Get the [HtmlNode] associated with the given [DocumentNode]. pub fn get_html_node(&self, node: &DocumentNode) -> Option<&HtmlNode> { self.arena.get(node.id).map(|x| x.get()) } } impl fmt::Display for HtmlDocument { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn display_indent(indent: u8, f: &mut fmt::Formatter<'_>) -> fmt::Result { for _ in 0..indent { write!(f, " ")?; } Ok(()) } fn display_node( indent: u8, doc: &HtmlDocument, doc_node: &DocumentNode, f: &mut fmt::Formatter<'_>, ) -> fmt::Result { let html_node = doc.get_html_node(doc_node).unwrap(); match html_node { HtmlNode::Tag(tag) => { // display begin tag display_indent(indent, f)?; write!(f, "<{}", tag.name)?; for attribute in &tag.attributes { write!(f, r#" {}="{}""#, attribute.0, attribute.1)?; } writeln!(f, ">")?; // self-closing tags cannot have content or an end tag if !VOID_TAGS.contains(&tag.name.as_str()) { // recursively display all children let children = doc_node.children(doc); for child in children { display_node(indent + 1, doc, &child, f)?; } // display end tag display_indent(indent, f)?; writeln!(f, "</{}>", tag.name)?; } Ok(()) } HtmlNode::Text(text) => { display_indent(indent, f)?; writeln!(f, "{}", text) } } } display_node(0, self, &self.root_node, f) } } /// A key representing a single [HtmlNode] contained in a [HtmlDocument]. /// /// Contains tree information such as parents and children. /// /// Implements [Copy] so that it can be easily passed around, unlike its associated [HtmlNode]. /// /// # Example: get associated [HtmlNode] /// /// ```rust /// # use skyscraper::html::{self, DocumentNode, HtmlNode, parse::ParseError}; /// # fn main() -> Result<(), ParseError> { /// // Parse the HTML text into a document /// let text = r#"<div/>"#; /// let document = html::parse(text)?; /// /// // Get the root document node's associated HTML node /// let doc_node: DocumentNode = document.root_node; /// let html_node = document.get_html_node(&doc_node).expect("root node must be in document"); /// /// // Check we got the right node /// match html_node { /// HtmlNode::Tag(tag) => assert_eq!(String::from("div"), tag.name), /// HtmlNode::Text(_) => panic!("expected tag, got text instead") /// } /// # Ok(()) /// # } /// ``` /// /// # Example: get children and parents /// /// ```rust /// # use skyscraper::html::{self, DocumentNode, HtmlNode, parse::ParseError}; /// # fn main() -> Result<(), ParseError> { /// // Parse the HTML text into a document /// let text = r#"<parent><child/><child/></parent>"#; /// let document = html::parse(text)?; /// /// // Get the children of the root node /// let parent_node: DocumentNode = document.root_node; /// let children: Vec<DocumentNode> = parent_node.children(&document).collect(); /// assert_eq!(2, children.len()); /// /// // Get the parent of both child nodes /// let parent_of_child0: DocumentNode = children[0].parent(&document).expect("parent of child 0 missing"); /// let parent_of_child1: DocumentNode = children[1].parent(&document).expect("parent of child 1 missing"); /// /// assert_eq!(parent_node, parent_of_child0); /// assert_eq!(parent_node, parent_of_child1); /// # Ok(()) /// # } /// ``` #[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Debug, Hash)] pub struct DocumentNode { id: NodeId, } impl DocumentNode { /// Create a new [DocumentNode] from the given arena key `id`. pub fn new(id: NodeId) -> DocumentNode { DocumentNode { id } } /// Get the concatenated text of this node and all of its children. /// /// Adds a space between elements for better readability. /// /// # Example: get the text of a node /// /// ```rust /// use skyscraper::html::{self, parse::ParseError}; /// # fn main() -> Result<(), ParseError> { /// // Parse the text into a document. /// let text = r##"<parent>foo<child>bar</child>baz</parent>"##; /// let document = html::parse(text)?; /// /// // Get all text of the root node. /// let doc_node = document.root_node; /// let text = doc_node.get_all_text(&document).expect("text missing"); /// /// assert_eq!("foo bar baz", text); /// # Ok(()) /// # } pub fn get_all_text(&self, document: &HtmlDocument) -> Option<String> { match document.get_html_node(self) { Some(html_node) => html_node.get_all_text(self, document), None => None, } } /// Get the concatenated text of this node. /// /// Adds a space between elements for better readability. /// /// # Example: get the text of a node /// /// ```rust /// use skyscraper::html::{self, parse::ParseError}; /// # fn main() -> Result<(), ParseError> { /// // Parse the text into a document. /// let html_text = r##"<parent>foo<child>bar</child>baz</parent>"##; /// let document = html::parse(html_text)?; /// /// // Get all text of the root node. /// let doc_node = document.root_node; /// let text = doc_node.get_text(&document).expect("text missing"); /// /// assert_eq!("foo baz", text); /// # Ok(()) /// # } pub fn get_text(&self, document: &HtmlDocument) -> Option<String> { match document.get_html_node(self) { Some(html_node) => html_node.get_text(self, document), None => None, } } /// Get attributes. /// /// If Node is a `Text` return None /// /// ```rust /// use skyscraper::html::{self, parse::ParseError}; /// # fn main() -> Result<(), ParseError> { /// // Parse the text into a document. /// let html_text = r##"<div attr1="attr1_value"></div>"##; /// let document = html::parse(html_text)?; /// /// // Get root node. /// let doc_node = document.root_node; /// let attributes = doc_node.get_attributes(&document).expect("No attributes"); /// /// assert_eq!("attr1_value", attributes["attr1"]); /// # Ok(()) /// # } pub fn get_attributes<'a>(&'a self, document: &'a HtmlDocument) -> Option<&'a TagAttributes> { match document.get_html_node(self) { Some(html_node) => html_node.get_attributes(), None => None, } } /// Get the children of this node as an iterator. pub fn children<'a>( &self, document: &'a HtmlDocument, ) -> impl Iterator<Item = DocumentNode> + 'a { Box::new(self.id.children(&document.arena).map(DocumentNode::new)) } /// Get the parent of this node if it exists. pub fn parent(&self, document: &HtmlDocument) -> Option<DocumentNode> { self.id .ancestors(&document.arena) .nth(1) .map(DocumentNode::new) } } #[cfg(test)] mod tests { use indoc::indoc; use super::*; #[test] fn html_node_get_text_should_work_on_text_node() { // arrange let mut arena = Arena::new(); let text_node = HtmlNode::Text(String::from("hello world")); let text_doc_node = DocumentNode::new(arena.new_node(text_node)); let document = HtmlDocument::new(arena, text_doc_node); // act let text_node = document.get_html_node(&text_doc_node).unwrap(); let result = text_node.get_text(&text_doc_node, &document).unwrap(); // assert assert_eq!("hello world", result); } #[test] fn html_node_get_text_should_work_on_tag_node_with_one_text_child() { // arrange let mut arena = Arena::new(); let text_node = HtmlNode::Text(String::from("hello world")); let text_node_id = arena.new_node(text_node); let tag_node = HtmlNode::Tag(HtmlTag::new(String::from("tag"))); let tag_node_id = arena.new_node(tag_node); let tag_doc_node = DocumentNode::new(tag_node_id); tag_node_id.append(text_node_id, &mut arena); let document = HtmlDocument::new(arena, tag_doc_node); // act let tag_node = document.get_html_node(&tag_doc_node).unwrap(); let result = tag_node.get_text(&tag_doc_node, &document).unwrap(); // assert assert_eq!("hello world", result); } #[test] fn html_node_get_text_should_work_on_tag_node_with_two_text_children() { // arrange let mut arena = Arena::new(); let text_node = HtmlNode::Text(String::from("hello")); let text_node_id = arena.new_node(text_node); let text_node2 = HtmlNode::Text(String::from("world")); let text_node2_id = arena.new_node(text_node2); let tag_node = HtmlNode::Tag(HtmlTag::new(String::from("tag"))); let tag_node_id = arena.new_node(tag_node); tag_node_id.append(text_node_id, &mut arena); tag_node_id.append(text_node2_id, &mut arena); let tag_doc_node = DocumentNode::new(tag_node_id); let document = HtmlDocument::new(arena, tag_doc_node); // act let tag_node = document.get_html_node(&tag_doc_node).unwrap(); let result = tag_node.get_text(&tag_doc_node, &document).unwrap(); // assert assert_eq!("hello world", result); } #[test] fn html_node_get_text_should_ignore_nested_text() { // arrange let mut arena = Arena::new(); let text_node = HtmlNode::Text(String::from("hello")); let text_node_id = arena.new_node(text_node); let text_node2 = HtmlNode::Text(String::from("world")); let text_node2_id = arena.new_node(text_node2); let tag_node = HtmlNode::Tag(HtmlTag::new(String::from("tag"))); let tag_node_id = arena.new_node(tag_node); tag_node_id.append(text_node_id, &mut arena); let tag_node2 = HtmlNode::Tag(HtmlTag::new(String::from("tag2"))); let tag_node2_id = arena.new_node(tag_node2); tag_node2_id.append(text_node2_id, &mut arena); tag_node_id.append(tag_node2_id, &mut arena); let tag_doc_node = DocumentNode::new(tag_node_id); let document = HtmlDocument::new(arena, tag_doc_node); // act let tag_node = document.get_html_node(&tag_doc_node).unwrap(); let result = tag_node.get_text(&tag_doc_node, &document).unwrap(); // assert assert_eq!("hello", result); } #[test] fn html_node_get_all_text_should_include_nested_text() { // arrange let mut arena = Arena::new(); let text_node = HtmlNode::Text(String::from("hello")); let text_node_id = arena.new_node(text_node); let text_node2 = HtmlNode::Text(String::from("world")); let text_node2_id = arena.new_node(text_node2); let tag_node = HtmlNode::Tag(HtmlTag::new(String::from("tag"))); let tag_node_id = arena.new_node(tag_node); tag_node_id.append(text_node_id, &mut arena); let tag_node2 = HtmlNode::Tag(HtmlTag::new(String::from("tag2"))); let tag_node2_id = arena.new_node(tag_node2); tag_node2_id.append(text_node2_id, &mut arena); tag_node_id.append(tag_node2_id, &mut arena); let tag_doc_node = DocumentNode::new(tag_node_id); let document = HtmlDocument::new(arena, tag_doc_node); // act let tag_node = document.get_html_node(&tag_doc_node).unwrap(); let result = tag_node.get_all_text(&tag_doc_node, &document).unwrap(); // assert assert_eq!("hello world", result); } #[test] fn html_node_get_attributes_for_tag() { // arrange let node = HtmlNode::Tag(HtmlTag { name: "div".to_string(), attributes: HashMap::from([("attr_name".to_string(), "attr_value".to_string())]), }); // assert assert!(node.get_attributes().is_some()); assert_eq!(node.get_attributes().unwrap()["attr_name"], "attr_value"); } #[test] fn html_node_get_attributes_for_text() { // arrange let node = HtmlNode::Text(String::from("hello world")); // assert assert!(node.get_attributes().is_none()) } #[test] fn document_node_get_attributes_for_tag() { // arrange let mut arena = Arena::new(); let html_node = HtmlNode::Tag(HtmlTag { name: "div".to_string(), attributes: HashMap::from([("attr_name".to_string(), "attr_value".to_string())]), }); let doc_node = DocumentNode::new(arena.new_node(html_node)); let html_document = HtmlDocument::new(arena, doc_node); // act let node = html_document.get_html_node(&doc_node).unwrap(); let attributes = node.get_attributes(); // assert assert!(attributes.is_some()); assert_eq!(attributes.unwrap()["attr_name"], "attr_value"); } #[test] fn document_node_get_attributes_for_text() { // arrange let mut arena = Arena::new(); let html_node = HtmlNode::Text(String::from("hello world")); let doc_node = DocumentNode::new(arena.new_node(html_node)); let html_document = HtmlDocument::new(arena, doc_node); // act let node = html_document.get_html_node(&doc_node).unwrap(); let attributes = node.get_attributes(); // assert assert!(attributes.is_none()); } #[test] fn html_document_display_should_output_same_text() { // arrange let text = indoc!( r#" <html> <a> the </a> <b> quick <c> brown </c> fox </b> jumps over <d> </d> the lazy <f> dog </f> </html> "#, ); let document = parse(&text).unwrap(); // act let html_output = document.to_string(); // assert assert_eq!(html_output, text); } #[test] fn html_document_display_should_handle_attributes() { // arrange let text = indoc!( r#" <html @class="foo" @id="bar"> </html> "#, ); let document = parse(&text).unwrap(); // act let html_output = document.to_string(); // assert // the order of the attributes is undefined, so it must be deserialized and compared programatically let result_document = parse(&html_output).unwrap(); let node = result_document .get_html_node(&result_document.root_node) .unwrap() .unwrap_tag(); assert_eq!("html", node.name); assert_eq!("foo", node.attributes["@class"]); assert_eq!("bar", node.attributes["@id"]); } #[test] fn html_document_display_should_expand_self_closing_tags() { // arrange let text = indoc!( r#" <html> <a /> </html> "#, ); let document = parse(&text).unwrap(); // act let html_output = document.to_string(); // assert let expected_text = indoc!( r#" <html> <a> </a> </html> "#, ); assert_eq!(html_output, expected_text); } #[test] fn html_document_display_should_handle_void_tags() { // arrange let text = indoc!( r#" <html> <br> </html> "#, ); let document = parse(&text).unwrap(); // act let html_output = document.to_string(); // assert assert_eq!(html_output, text); } }
#[doc = "Register `FMC_BCHPBR1` reader"] pub type R = crate::R<FMC_BCHPBR1_SPEC>; #[doc = "Field `BCHPB` reader - BCHPB"] pub type BCHPB_R = crate::FieldReader<u32>; impl R { #[doc = "Bits 0:31 - BCHPB"] #[inline(always)] pub fn bchpb(&self) -> BCHPB_R { BCHPB_R::new(self.bits) } } #[doc = "These registers contain the BCH parity bits (BCHPB). For the BCH 4-bit, only BCHPB\\[51:0\\] are significant and for the BCH 8-bit BCHPB\\[103:0\\] are significant.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fmc_bchpbr1::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct FMC_BCHPBR1_SPEC; impl crate::RegisterSpec for FMC_BCHPBR1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`fmc_bchpbr1::R`](R) reader structure"] impl crate::Readable for FMC_BCHPBR1_SPEC {} #[doc = "`reset()` method sets FMC_BCHPBR1 to value 0"] impl crate::Resettable for FMC_BCHPBR1_SPEC { const RESET_VALUE: Self::Ux = 0; }
use crate::ethcore_network::*; use ed25519_dalek::{Keypair}; #[allow(deprecated)] use super::error::*; use parking_lot::{ Mutex }; use std; use std::cell::{Cell as StdCell, RefCell}; use std::collections::{HashMap, VecDeque}; use std::convert::From; use std::clone::Clone; use std::path::PathBuf; use std::sync::mpsc; use std::sync::{ Arc }; use std::thread::{JoinHandle}; use std::time::{Duration, SystemTime, UNIX_EPOCH}; use ton_labs_assembler::compile_code; use ton_block::{ Account, BlkPrevInfo, Block, CommonMsgInfo, CurrencyCollection, ExtBlkRef, ExternalInboundMessageHeader, GetRepresentationHash, Grams, InternalMessageHeader, Message, MsgAddressExt, MsgAddressInt, Serializable, Deserializable, ShardStateUnsplit, ShardIdent, StateInit, Transaction, SignedBlock, }; use ton_types::{ Cell, SliceData }; use ton_types::types::{ UInt256, AccountId, ByteOrderRead }; pub mod block_builder; pub use self::block_builder::*; pub mod file_based_storage; use self::file_based_storage::*; pub mod messages; pub use self::messages::*; pub mod new_block_applier; use self::new_block_applier::*; pub mod blocks_finality; use self::blocks_finality::*; pub mod ton_node_engine; use self::ton_node_engine::*; pub mod ton_node_handlers; pub mod config; use self::config::*; pub mod routing_table; use self::routing_table::*; use std::{io::Read, thread}; pub struct StubReceiver { stop_tx: Option<mpsc::Sender<bool>>, join_handle: Option<JoinHandle<()>>, workchain_id: i8, block_seqno: u32, timeout: u64 } lazy_static! { static ref ACCOUNTS: Mutex<Vec<AccountId>> = Mutex::new(vec![]); static ref SUPER_ACCOUNT_ID: AccountId = AccountId::from([0;32]); } static ACCOUNTS_COUNT: u8 = 255; impl MessagesReceiver for StubReceiver { fn run(&mut self, queue: Arc<InMessagesQueue>) -> NodeResult<()> { if self.block_seqno == 1 { Self::deploy_contracts(self.workchain_id, &queue)?; } if self.timeout == 0 { return Ok(()); } for acc in 0..ACCOUNTS_COUNT { ACCOUNTS.lock().push(AccountId::from([acc + 1;32])); } if self.join_handle.is_none() { let (tx, rx) = mpsc::channel(); self.stop_tx = Some(tx); let mut log_time_gen = LogicalTimeGenerator::with_init_value(0); let workchain_id = self.workchain_id; let timeout = self.timeout; self.join_handle = Some(std::thread::spawn(move || { loop { if let Some(msg) = Self::try_receive_message(workchain_id, &mut log_time_gen) { let _res = queue.queue(QueuedMessage::with_message(msg).unwrap()); } if rx.try_recv().is_ok() { println!("append message loop break"); break; } thread::sleep(Duration::from_micros(timeout)); } // Creation of special account zero to give money for new accounts queue.queue(QueuedMessage::with_message(Self::create_transfer_message( workchain_id, SUPER_ACCOUNT_ID.clone(), SUPER_ACCOUNT_ID.clone(), 1_000_000, log_time_gen.get_current_time() )).unwrap()).unwrap(); queue.queue(QueuedMessage::with_message(Self::create_account_message_with_code( workchain_id, SUPER_ACCOUNT_ID.clone() )).unwrap()).unwrap(); })); } Ok(()) } } const GIVER_BALANCE: u128 = 5_000_000_000_000_000_000; const MULTISIG_BALANCE: u128 = 1_000_000_000_000_000; const GIVER_ABI1_DEPLOY_MSG: &[u8] = include_bytes!("../../data/giver_abi1_deploy_msg.boc"); const DEPRECATED_GIVER_ABI2_DEPLOY_MSG: &[u8] = include_bytes!("../../data/deprecated_giver_abi2_deploy_msg.boc"); const GIVER_ABI2_DEPLOY_MSG: &[u8] = include_bytes!("../../data/giver_abi2_deploy_msg.boc"); const MULTISIG_DEPLOY_MSG: &[u8] = include_bytes!("../../data/safemultisig_deploy_msg.boc"); #[allow(dead_code)] impl StubReceiver { pub fn with_params(workchain_id: i8, block_seqno: u32, timeout: u64) -> Self { StubReceiver { stop_tx: None, join_handle: None, workchain_id, block_seqno, timeout } } pub fn stop(&mut self) { if self.join_handle.is_some() { if let Some(ref stop_tx) = self.stop_tx { stop_tx.send(true).unwrap(); self.join_handle.take().unwrap().join().unwrap(); } } } fn create_account_message_with_code(workchain_id: i8, account_id: AccountId) -> Message { let code = " ; s0 - function selector ; s1 - body slice IFNOTRET ACCEPT DUP SEMPTY IFRET ACCEPT BLOCKLT LTIME INC ; increase logical time by 1 PUSH s2 ; body to top PUSHINT 96 ; internal header in body, cut unixtime and lt SDSKIPLAST NEWC STSLICE STU 64 ; store tr lt STU 32 ; store unixtime STSLICECONST 0 ; no init STSLICECONST 0 ; body (Either X) ENDC PUSHINT 0 SENDRAWMSG "; Self::create_account_message(workchain_id, account_id, code, SliceData::new_empty().into_cell(), None) } fn deploy_contracts(workchain_id: i8, queue: &InMessagesQueue) -> NodeResult<()> { Self::deploy_contract(workchain_id, GIVER_ABI1_DEPLOY_MSG, GIVER_BALANCE, 1, queue)?; Self::deploy_contract(workchain_id, GIVER_ABI2_DEPLOY_MSG, GIVER_BALANCE, 3, queue)?; Self::deploy_contract(workchain_id, MULTISIG_DEPLOY_MSG, MULTISIG_BALANCE, 5, queue)?; Self::deploy_contract(workchain_id, DEPRECATED_GIVER_ABI2_DEPLOY_MSG, GIVER_BALANCE, 7, queue)?; Ok(()) } fn deploy_contract( workchain_id: i8, deploy_msg_boc: &[u8], initial_balance: u128, transfer_lt: u64, queue: &InMessagesQueue ) -> NodeResult<AccountId> { let (deploy_msg, deploy_addr) = Self::create_contract_deploy_message(workchain_id, deploy_msg_boc); let transfer_msg = Self::create_transfer_message( workchain_id, deploy_addr.clone(), deploy_addr.clone(), initial_balance, transfer_lt ); Self::queue_with_retry(queue, transfer_msg)?; Self::queue_with_retry(queue, deploy_msg)?; Ok(deploy_addr) } fn queue_with_retry(queue: &InMessagesQueue, message: Message) -> NodeResult<()> { let mut message = QueuedMessage::with_message(message)?; while let Err(msg) = queue.queue(message) { message = msg; std::thread::sleep(std::time::Duration::from_micros(100)); } Ok(()) } fn create_contract_deploy_message(workchain_id: i8, msg_boc: &[u8]) -> (Message, AccountId) { let mut msg = Message::construct_from_bytes(msg_boc).unwrap(); if let CommonMsgInfo::ExtInMsgInfo(ref mut header) = msg.header_mut() { match header.dst { MsgAddressInt::AddrStd(ref mut addr) => addr.workchain_id = workchain_id, _ => panic!("Contract deploy message has invalid destination address") } } let address = msg.int_dst_account_id().unwrap(); (msg, address) } // create external message with init field, so-called "constructor message" pub fn create_account_message( workchain_id: i8, account_id: AccountId, code: &str, data: Cell, body: Option<SliceData> ) -> Message { let code_cell = compile_code(code).unwrap().into_cell(); let mut msg = Message::with_ext_in_header( ExternalInboundMessageHeader { src: MsgAddressExt::default(), dst: MsgAddressInt::with_standart(None, workchain_id, account_id.clone()).unwrap(), import_fee: Grams::zero(), } ); let mut state_init = StateInit::default(); state_init.set_code(code_cell); state_init.set_data(data); *msg.state_init_mut() = Some(state_init); *msg.body_mut() = body; msg } // create transfer funds message for initialize balance pub fn create_transfer_message( workchain_id: i8, src: AccountId, dst: AccountId, value: u128, lt: u64 ) -> Message { let mut balance = CurrencyCollection::default(); balance.grams = value.into(); let mut msg = Message::with_int_header( InternalMessageHeader::with_addresses_and_bounce( MsgAddressInt::with_standart(None, workchain_id, src).unwrap(), MsgAddressInt::with_standart(None, workchain_id, dst).unwrap(), balance, false, ) ); msg.set_at_and_lt(SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() as u32, lt); msg } // Create message "from wallet" to transfer some funds // from one account to another pub fn create_external_transfer_funds_message( workchain_id: i8, src: AccountId, dst: AccountId, value: u128, _lt: u64 ) -> Message { let mut msg = Message::with_ext_in_header( ExternalInboundMessageHeader { src: MsgAddressExt::default(), dst: MsgAddressInt::with_standart(None, workchain_id, src.clone()).unwrap(), import_fee: Grams::zero(), } ); *msg.body_mut() = Some(Self::create_transfer_int_header(workchain_id, src, dst, value) .serialize() .unwrap() .into() ); msg } pub fn create_transfer_int_header( workchain_id: i8, src: AccountId, dest: AccountId, value: u128 ) -> InternalMessageHeader { let msg = Self::create_transfer_message(workchain_id, src, dest, value, 0); match msg.withdraw_header() { CommonMsgInfo::IntMsgInfo(int_hdr) => int_hdr, _ => panic!("must be internal message header"), } } fn try_receive_message(workchain_id: i8, log_time_gen: &mut LogicalTimeGenerator) -> Option<Message> { let time = log_time_gen.get_next_time(); Some(match time - 1 { x if x < (ACCOUNTS_COUNT as u64) => { Self::create_transfer_message( workchain_id, SUPER_ACCOUNT_ID.clone(), ACCOUNTS.lock()[x as usize].clone(), 1000000, log_time_gen.get_current_time() ) }, x if x >= (ACCOUNTS_COUNT as u64) && x < (ACCOUNTS_COUNT as u64)*2 => { Self::create_transfer_message( workchain_id, SUPER_ACCOUNT_ID.clone(), ACCOUNTS.lock()[(x - ACCOUNTS_COUNT as u64) as usize].clone(), 1000000, log_time_gen.get_current_time() ) } x if x >= (ACCOUNTS_COUNT as u64)*2 && x < (ACCOUNTS_COUNT as u64)*3 => { let index = (x - (ACCOUNTS_COUNT as u64)*2) as usize; Self::create_account_message_with_code(workchain_id, ACCOUNTS.lock()[index].clone()) } x => { // send funds from 1 to 2, after from 2 to 3 and etc let acc_src = (x%ACCOUNTS_COUNT as u64) as usize; let acc_dst = (acc_src + 1) % ACCOUNTS_COUNT as usize; let src_acc_id = ACCOUNTS.lock()[acc_src].clone(); let dst_acc_id = ACCOUNTS.lock()[acc_dst].clone(); Self::create_external_transfer_funds_message( workchain_id, src_acc_id, dst_acc_id, rand::random::<u8>() as u128, log_time_gen.get_current_time() ) } }) } } /// /// Information about last block in shard /// #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct ShardStateInfo { /// Last block sequence number pub seq_no: u64, /// Last block end logical time pub lt: u64, /// Last block hash pub hash: UInt256, } impl ShardStateInfo { pub fn with_params(seq_no: u64, lt: u64, hash: UInt256) -> Self { Self { seq_no, lt, hash, } } pub fn serialize(&self) -> Vec<u8> { let mut data = vec![]; data.extend_from_slice(&(self.seq_no).to_be_bytes()); data.extend_from_slice(&(self.lt).to_be_bytes()); data.append(&mut self.hash.as_slice().to_vec()); data } pub fn deserialize<R: Read>(rdr: &mut R) -> NodeResult<Self> { let seq_no = rdr.read_be_u64()?; let lt = rdr.read_be_u64()?; let hash = UInt256::from(rdr.read_u256()?); Ok(ShardStateInfo {seq_no, lt, hash}) } } /// Trait for shard state storage pub trait ShardStateStorage { fn shard_state(&self) -> NodeResult<ShardStateUnsplit>; fn shard_bag(&self) -> NodeResult<Cell>; fn save_shard_state(&self, shard_state: &ShardStateUnsplit) -> NodeResult<()>; fn serialized_shardstate(&self) -> NodeResult<Vec<u8>>; fn save_serialized_shardstate(&self, data: Vec<u8>) -> NodeResult<()>; fn save_serialized_shardstate_ex(&self, shard_state: &ShardStateUnsplit, shard_data: Option<Vec<u8>>, shard_hash: &UInt256, shard_state_info: ShardStateInfo) -> NodeResult<()>; } // Trait for blocks storage (key-value) pub trait BlocksStorage { fn block(&self, seq_no: u32, vert_seq_no: u32 ) -> NodeResult<SignedBlock>; fn raw_block(&self, seq_no: u32, vert_seq_no: u32 ) -> NodeResult<Vec<u8>>; fn save_block(&self, block: &SignedBlock) -> NodeResult<()>; fn save_raw_block(&self, block: &SignedBlock, block_data: Option<&Vec<u8>>) -> NodeResult<()>; } /// Trait for transactions storage (this storage have to support difficult queries) pub trait TransactionsStorage { fn save_transaction(&self, tr: Arc<Transaction>) -> NodeResult<()>; fn find_by_lt(&self, _lt: u64, _acc_id: &AccountId) -> NodeResult<Option<Transaction>> {unimplemented!()} } /// Trait for save finality states blockchain pub trait BlockFinality { fn finalize_without_new_block(&mut self, finality_hash: Vec<UInt256>) -> NodeResult<()>; fn put_block_with_info(&mut self, sblock: SignedBlock, sblock_data:Option<Vec<u8>>, block_hash: Option<UInt256>, shard_state: Arc<ShardStateUnsplit>, finality_hashes: Vec<UInt256>, is_sync: bool, ) -> NodeResult<()>; fn get_last_seq_no(&self) -> u32; fn get_last_block_info(&self) -> NodeResult<BlkPrevInfo>; fn get_last_shard_state(&self) -> Arc<ShardStateUnsplit>; fn find_block_by_hash(&self, hash: &UInt256) -> u64; fn rollback_to(&mut self, hash: &UInt256) -> NodeResult<()>; fn get_raw_block_by_seqno(&self, seq_no: u32, vert_seq_no: u32 ) -> NodeResult<Vec<u8>>; fn get_last_finality_shard_hash(&self) -> NodeResult<(u64, UInt256)>; fn reset(&mut self) -> NodeResult<()>; } pub trait MessagesReceiver: Send { fn run(&mut self, queue: Arc<InMessagesQueue>) -> NodeResult<()>; } pub trait DocumentsDb: Send + Sync { fn put_account(&self, acc: Account) -> NodeResult<()>; fn put_deleted_account(&self, workchain_id: i32, account_id: AccountId) -> NodeResult<()>; fn put_block(&self, block: Block) -> NodeResult<()>; fn put_message( &self, msg: Message, transaction_id: Option<UInt256>, transaction_now: Option<u32>, block_id: Option<UInt256> ) -> NodeResult<()>; fn put_transaction( &self, tr: Transaction, block_id: Option<UInt256>, workchain_id: i32 ) -> NodeResult<()>; fn has_delivery_problems(&self) -> bool; } pub struct DocumentsDbMock; impl DocumentsDb for DocumentsDbMock { fn put_account(&self, _: Account) -> NodeResult<()> { Ok(()) } fn put_deleted_account(&self, _: i32, _: AccountId) -> NodeResult<()> { Ok(()) } fn put_block(&self, _: Block) -> NodeResult<()> { Ok(()) } fn put_message(&self, _: Message, _: Option<UInt256>, _: Option<u32>, _: Option<UInt256>) -> NodeResult<()> { Ok(()) } fn put_transaction(&self, _: Transaction, _: Option<UInt256>, _: i32) -> NodeResult<()> { Ok(()) } fn has_delivery_problems(&self) -> bool { false } } struct TestStorage { shard_ident: ShardIdent, shard_state: StdCell<ShardStateUnsplit>, blocks: RefCell<HashMap<UInt256, SignedBlock>>, transactions: RefCell<Vec<Transaction>>, finality_by_hash: RefCell<HashMap<UInt256, Vec<u8>>>, finality_by_no: RefCell<HashMap<u64, Vec<u8>>>, finality_by_str: RefCell<HashMap<String, Vec<u8>>>, } impl TestStorage { #[allow(dead_code)] pub fn new(shard_ident: ShardIdent) -> Self { TestStorage { shard_ident, shard_state: StdCell::new(ShardStateUnsplit::default()), blocks: RefCell::new(HashMap::new()), transactions: RefCell::new(Vec::new()), finality_by_hash: RefCell::new(HashMap::new()), finality_by_no: RefCell::new(HashMap::new()), finality_by_str: RefCell::new(HashMap::new()), } } /// /// Get hash-identifier form shard ident and sequence numbers /// fn get_hash_from_ident_and_seq(shard_ident: &ShardIdent, seq_no: u32, vert_seq_no: u32) -> UInt256 { let mut hash = vec![]; // TODO: check here hash.extend_from_slice(&(shard_ident.shard_prefix_with_tag()).to_be_bytes()); hash.extend_from_slice(&(seq_no).to_be_bytes()); hash.extend_from_slice(&(vert_seq_no).to_be_bytes()); UInt256::from_slice(&hash) } } impl ShardStateStorage for TestStorage { fn shard_state( &self ) -> NodeResult<ShardStateUnsplit> { let ss = self.shard_state.take(); self.shard_state.set(ss.clone()); Ok(ss) } fn shard_bag( &self ) -> NodeResult<Cell> { let ss = self.shard_state.take(); self.shard_state.set(ss.clone()); Ok(Cell::default()) } fn save_shard_state(&self, shard_state: &ShardStateUnsplit) -> NodeResult<()> { self.shard_state.set(shard_state.clone()); Ok(()) } fn serialized_shardstate(&self) -> NodeResult<Vec<u8>>{ Ok(vec![]) } fn save_serialized_shardstate(&self, _data: Vec<u8>) -> NodeResult<()>{ Ok(()) } fn save_serialized_shardstate_ex(&self, _shard_state: &ShardStateUnsplit, _shard_data: Option<Vec<u8>>, _shard_hash: &UInt256, _shard_state_info: ShardStateInfo) -> NodeResult<()>{ Ok(()) } } impl BlocksStorage for TestStorage { /// /// Get block from memory storage by ID /// fn block(&self, seq_no: u32, vert_seq_no: u32) -> NodeResult<SignedBlock>{ let hash = Self::get_hash_from_ident_and_seq(&self.shard_ident, seq_no, vert_seq_no); match self.blocks.borrow().get(&hash) { Some(b) => Ok(b.clone()), _ => Err(NodeError::from_kind(NodeErrorKind::NotFound)) } } fn raw_block(&self, _seq_no: u32, _vert_seq_no: u32 ) -> NodeResult<Vec<u8>>{ Ok(vec![]) } /// /// Save block to memory storage /// fn save_block(&self, block: &SignedBlock) -> NodeResult<()>{ let info = block.block().read_info()?; let hash = Self::get_hash_from_ident_and_seq(&info.shard(), info.seq_no(), info.vert_seq_no()); self.blocks.try_borrow_mut().unwrap().insert(hash, block.clone()); Ok(()) } fn save_raw_block(&self, _block: &SignedBlock, _block_data: Option<&Vec<u8>>) -> NodeResult<()> { info!(target: "node", "save block with seq_no: {}", _block.block().read_info()?.seq_no()); Ok(()) } } impl TransactionsStorage for TestStorage { fn save_transaction(&self, tr: Arc<Transaction>) -> NodeResult<()>{ self.transactions.borrow_mut().push((*tr).clone()); Ok(()) } fn find_by_lt(&self, lt: u64, _acc_id: &AccountId) -> NodeResult<Option<Transaction>> { for tr in self.transactions.borrow().iter() { if tr.logical_time() == lt { return Ok(Some(tr.clone())) } } Ok(None) } } impl FinalityStorage for TestStorage { fn save_non_finalized_block(&self, hash: UInt256, seq_no: u64, data: Vec<u8>) -> NodeResult<()> { println!("save block {:?}", hash); self.finality_by_hash.try_borrow_mut().unwrap().insert(hash, data.clone()); self.finality_by_no.try_borrow_mut().unwrap().insert(seq_no, data); Ok(()) } fn load_non_finalized_block_by_seq_no(&self, seq_no: u64) -> NodeResult<Vec<u8>> { println!("load block {:?}", seq_no); if self.finality_by_no.borrow().contains_key(&seq_no) { Ok(self.finality_by_no.try_borrow_mut().unwrap().get(&seq_no).unwrap().clone()) } else { Err(NodeError::from_kind(NodeErrorKind::NotFound)) } } fn load_non_finalized_block_by_hash(&self, hash: UInt256) -> NodeResult<Vec<u8>> { println!("load block {:?}", hash); if self.finality_by_hash.borrow().contains_key(&hash) { Ok(self.finality_by_hash.try_borrow_mut().unwrap().get(&hash).unwrap().clone()) } else { Err(NodeError::from_kind(NodeErrorKind::NotFound)) } } fn remove_form_finality_storage(&self, hash: UInt256) -> NodeResult<()> { println!("remove block {:?}", hash); self.finality_by_hash.try_borrow_mut().unwrap().remove(&hash).unwrap(); Ok(()) } fn save_custom_finality_info(&self, key: String, data: Vec<u8>) -> NodeResult<()> { println!("save custom {}", key); self.finality_by_str.try_borrow_mut().unwrap().insert(key, data); Ok(()) } fn load_custom_finality_info(&self, key: String) -> NodeResult<Vec<u8>> { println!("load custom {}", key); if self.finality_by_str.borrow().contains_key(&key) { Ok(self.finality_by_str.try_borrow_mut().unwrap().remove(&key).unwrap()) } else { Err(NodeError::from_kind(NodeErrorKind::NotFound)) } } } pub fn hexdump(d: &[u8]) { let mut str = String::new(); for i in 0..d.len() { str.push_str(&format!("{:02x}{}", d[i], if (i + 1) % 16 == 0 { '\n' } else { ' ' })); } debug!(target: "node", "{}", str); } /// /// Struct LogicalTime Generator /// pub struct LogicalTimeGenerator { current: Arc<Mutex<u64>> } impl Default for LogicalTimeGenerator { fn default() -> Self { LogicalTimeGenerator { current: Arc::new(Mutex::new(0)) } } } /// /// Implementation of Logical Time Generator /// impl LogicalTimeGenerator { /// /// Initialize new instance with current value /// pub fn with_init_value(current: u64) -> Self { LogicalTimeGenerator { current : Arc::new(Mutex::new(current)) } } /// /// Get next value of logical time /// pub fn get_next_time(&mut self) -> u64 { let mut current = self.current.lock(); *current += 1; *current } /// /// Get current value of logical time /// pub fn get_current_time(&self) -> u64 { let current = self.current.lock(); *current } }
use mongodb::{options::ClientOptions, Client}; use std::sync::Arc; use warp::Filter; mod api; mod user; mod generate_maps; use api::*; const MONGODB_URL: &str = "mongodb://localhost:27017"; const DATABASE: &str = "mvp"; #[tokio::main] async fn main() { // prints initial map data generate_maps::generate(); let client_options = ClientOptions::parse(MONGODB_URL).unwrap(); let client = Client::with_options(client_options).unwrap(); let db = Arc::new(client.database(DATABASE)); let db = warp::any().map(move || Arc::clone(&db)); // /GET / serves index.html let index = warp::get() .and(warp::path::end()) .and(warp::fs::file("./static/index.html")); // /GET /static/* serves from ./static dir let static_res = warp::path("static").and(warp::fs::dir("./static/")); let register = warp::post() .and(warp::path("register")) .and(warp::body::json()) .and(db.clone()) .and_then(register); let login = warp::post() .and(warp::path("login")) .and(warp::body::json()) .and(db.clone()) .and_then(login); let logout = warp::post().and(warp::path("logout")).and_then(logout); let api = register.or(login).or(logout); let api = warp::path("api").and(api); let routes = index.or(static_res).or(api); warp::serve(routes).run(([127, 0, 0, 1], 3030)).await; }
use failure; use failure::ResultExt; use serde_json; use std::collections::HashMap; use std::fs; #[derive(Deserialize, Debug)] struct City { location: (f64, f64), neighbours: Vec<String>, } #[derive(Deserialize, Debug)] pub struct Data(HashMap<String, City>); pub fn from_file(path: &str) -> Result<Data, failure::Error> { let contents = fs::read_to_string(path).context(path.to_string())?; let cities = serde_json::from_str(&contents)?; Ok(cities) }
use ethane::contract::{CallOpts, CallResult, Caller}; use ethane::types::{Address, H256, U256}; use ethane::{Connection, Http}; use ethane_abi::*; use std::convert::TryFrom; use std::path::Path; use test_helper::{ deploy_contract, wait_for_transaction, ConnectionWrapper, TEST_ERC20_NAME, TEST_ERC20_PATH, }; const ADDRESS1: &str = "0x007ccffb7916f37f7aeef05e8096ecfbe55afc2f"; const ADDRESS2: &str = "0x99429f64cf4d5837620dcc293c1a537d58729b68"; #[test] fn test_eth_call_contract() { // deploy contract let mut client = ConnectionWrapper::new_from_env(None); let address = Address::try_from(ADDRESS1).unwrap(); let (contract_address, _) = deploy_contract( &mut client, address, &Path::new(TEST_ERC20_PATH), TEST_ERC20_NAME, ); let conn = Connection::new(Http::new("http://localhost:8545", None)); let mut caller = Caller::new_from_path( conn, "../ethane/test-helper/src/fixtures/TestERC20.abi", contract_address, ); let result = caller.call("balanceOf", vec![Parameter::from(address)], None); match result { CallResult::Transaction(_) => panic!("Should be eth_call"), CallResult::Call(r) => match r[0] { Parameter::Uint(data, 256) => { assert_eq!(data, H256::from_int_unchecked(1000000000_u64)) } _ => panic!("Invalid data received!"), }, } } #[test] fn test_eth_call_contract_transfer() { // deploy contract let mut client = ConnectionWrapper::new_from_env(None); let address = Address::try_from(ADDRESS1).unwrap(); let to_address = Address::try_from(ADDRESS2).unwrap(); let (contract_address, _) = deploy_contract( &mut client, address, &Path::new(TEST_ERC20_PATH), TEST_ERC20_NAME, ); let conn = Connection::new(Http::new("http://localhost:8545", None)); let mut caller = Caller::new_from_path( conn, "../ethane/test-helper/src/fixtures/TestERC20.abi", contract_address, ); let result = caller.call( "transfer", vec![ Parameter::from(to_address), Parameter::from(U256::from_int_unchecked(1000_u16)), ], Some(CallOpts { force_call_type: None, from: Some(address), }), ); match result { CallResult::Call(_) => panic!("Should be a transaction"), CallResult::Transaction(tx_hash) => { wait_for_transaction(&mut client, tx_hash); let result = caller.call("balanceOf", vec![Parameter::from(to_address)], None); match result { CallResult::Transaction(_) => panic!("Should be eth_call"), CallResult::Call(r) => match r[0] { Parameter::Uint(data, 256) => { assert_eq!(data, H256::from_int_unchecked(1000_u64)) } _ => panic!("Invalid data received!"), }, } } } }
use std::sync::Arc; use eyre::Report; use hashbrown::HashMap; use rosu_v2::prelude::{GameMode, OsuError, Username}; use twilight_model::{ application::interaction::{ application_command::{CommandDataOption, CommandOptionValue}, ApplicationCommand, }, id::{marker::UserMarker, Id}, }; use crate::{ commands::{ check_user_mention, osu::{get_user_and_scores, ScoreArgs, UserArgs}, parse_discord, parse_mode_option, DoubleResultCow, MyCommand, MyCommandOption, }, database::UserConfig, embeds::{EmbedData, TopEmbed}, pagination::{Pagination, TopPagination}, tracking::process_osu_tracking, util::{ constants::{ common_literals::{DISCORD, MODE, NAME}, GENERAL_ISSUE, OSU_API_ISSUE, }, matcher, numbers, osu::ScoreOrder, ApplicationCommandExt, CowUtils, InteractionExt, MessageExt, }, Args, BotResult, CommandData, Context, Error, MessageBuilder, }; use super::{option_discord, option_mode, option_name, TopOrder}; async fn _mapper(ctx: Arc<Context>, data: CommandData<'_>, args: MapperArgs) -> BotResult<()> { let MapperArgs { config, mapper } = args; let mode = config.mode.unwrap_or(GameMode::STD); let user = match config.into_username() { Some(name) => name, None => return super::require_link(&ctx, &data).await, }; let mapper = mapper.cow_to_ascii_lowercase(); // Retrieve the user and their top scores let user_args = UserArgs::new(user.as_str(), mode); let score_args = ScoreArgs::top(100).with_combo(); let (mut user, mut scores) = match get_user_and_scores(&ctx, user_args, &score_args).await { Ok((user, scores)) => (user, scores), Err(OsuError::NotFound) => { let content = format!("User `{user}` was not found"); return data.error(&ctx, content).await; } Err(err) => { let _ = data.error(&ctx, OSU_API_ISSUE).await; return Err(err.into()); } }; // Overwrite default mode user.mode = mode; // Process user and their top scores for tracking process_osu_tracking(&ctx, &mut scores, Some(&user)).await; let scores: Vec<_> = scores .into_iter() .enumerate() .filter(|(_, score)| { let map = &score.map.as_ref().unwrap(); let mapset = &score.mapset.as_ref().unwrap(); // Filter converts if map.mode != mode { return false; } // Either the version contains the mapper name (guest diff'd by mapper) // or the map is created by mapper name and not guest diff'd by someone else let version = map.version.to_lowercase(); version.contains(mapper.as_ref()) || (mapset.creator_name.to_lowercase().as_str() == mapper.as_ref() && !matcher::is_guest_diff(&version)) }) .collect(); // Accumulate all necessary data let content = match mapper.as_ref() { "sotarks" => { let amount = scores.len(); let mut content = format!( "I found {amount} Sotarks map{plural} in `{name}`'s top100, ", amount = amount, plural = if amount != 1 { "s" } else { "" }, name = user.username, ); let to_push = match amount { 0 => "I'm proud \\:)", 1..=4 => "that's already too many...", 5..=8 => "kinda sad \\:/", 9..=15 => "pretty sad \\:(", 16..=25 => "this is so sad \\:((", 26..=35 => "this needs to stop this", 36..=49 => "that's a serious problem...", 50 => "that's half. HALF.", 51..=79 => "how do you sleep at night...", 80..=89 => "so close to ultimate disaster...", 90..=99 => "i'm not even mad, that's just impressive", 100 => "you did it. \"Congrats\".", _ => "wait how did you do that", }; content.push_str(to_push); content } _ => format!( "{} of `{}`'{} top score maps were mapped by `{mapper}`", scores.len(), user.username, if user.username.ends_with('s') { "" } else { "s" }, ), }; let sort_by = ScoreOrder::Pp; let farm = HashMap::new(); let builder = if scores.is_empty() { MessageBuilder::new().embed(content) } else { let pages = numbers::div_euclid(5, scores.len()); let embed_fut = TopEmbed::new( &user, scores.iter().take(5), &ctx, sort_by, &farm, (1, pages), ); let data = embed_fut.await; let embed = data.into_builder().build(); MessageBuilder::new().content(content).embed(embed) }; let response_raw = data.create_message(&ctx, builder).await?; // Skip pagination if too few entries if scores.len() <= 5 { return Ok(()); } let response = response_raw.model().await?; // Pagination let sort_by = TopOrder::Other(sort_by); let pagination = TopPagination::new(response, user, scores, sort_by, farm, Arc::clone(&ctx)); let owner = data.author()?.id; tokio::spawn(async move { if let Err(err) = pagination.start(&ctx, owner, 60).await { warn!("{:?}", Report::new(err)); } }); Ok(()) } #[command] #[short_desc("How many maps of a user's top100 are made by the given mapper?")] #[long_desc( "Display the top plays of a user which were mapped by the given mapper.\n\ Specify the __user first__ and the __mapper second__.\n\ Unlike the mapper count of the profile command, this command considers not only \ the map's creator, but also tries to check if the map is a guest difficulty." )] #[usage("[username] [mapper]")] #[example("badewanne3 \"Hishiro Chizuru\"", "monstrata monstrata")] pub async fn mapper(ctx: Arc<Context>, data: CommandData) -> BotResult<()> { match data { CommandData::Message { msg, mut args, num } => { match MapperArgs::args(&ctx, &mut args, msg.author.id, None).await { Ok(Ok(mut mapper_args)) => { mapper_args.config.mode.get_or_insert(GameMode::STD); _mapper(ctx, CommandData::Message { msg, args, num }, mapper_args).await } Ok(Err(content)) => msg.error(&ctx, content).await, Err(why) => { let _ = msg.error(&ctx, GENERAL_ISSUE).await; Err(why) } } } CommandData::Interaction { command } => super::slash_top(ctx, *command).await, } } #[command] #[short_desc("How many maps of a mania user's top100 are made by the given mapper?")] #[long_desc( "Display the top plays of a mania user which were mapped by the given mapper.\n\ Specify the __user first__ and the __mapper second__.\n\ Unlike the mapper count of the profile command, this command considers not only \ the map's creator, but also tries to check if the map is a guest difficulty.\n\ If the `-convert` / `-c` argument is specified, I will __not__ count any maps \ that aren't native mania maps." )] #[usage("[username] [mapper] [-convert]")] #[example("badewanne3 \"Hishiro Chizuru\"", "monstrata monstrata")] #[aliases("mapperm")] pub async fn mappermania(ctx: Arc<Context>, data: CommandData) -> BotResult<()> { match data { CommandData::Message { msg, mut args, num } => { match MapperArgs::args(&ctx, &mut args, msg.author.id, None).await { Ok(Ok(mut mapper_args)) => { mapper_args.config.mode = Some(GameMode::MNA); _mapper(ctx, CommandData::Message { msg, args, num }, mapper_args).await } Ok(Err(content)) => msg.error(&ctx, content).await, Err(why) => { let _ = msg.error(&ctx, GENERAL_ISSUE).await; Err(why) } } } CommandData::Interaction { command } => super::slash_top(ctx, *command).await, } } #[command] #[short_desc("How many maps of a taiko user's top100 are made by the given mapper?")] #[long_desc( "Display the top plays of a taiko user which were mapped by the given mapper.\n\ Specify the __user first__ and the __mapper second__.\n\ Unlike the mapper count of the profile command, this command considers not only \ the map's creator, but also tries to check if the map is a guest difficulty.\n\ If the `-convert` / `-c` argument is specified, I will __not__ count any maps \ that aren't native taiko maps." )] #[usage("[username] [mapper] [-convert]")] #[example("badewanne3 \"Hishiro Chizuru\"", "monstrata monstrata")] #[aliases("mappert")] pub async fn mappertaiko(ctx: Arc<Context>, data: CommandData) -> BotResult<()> { match data { CommandData::Message { msg, mut args, num } => { match MapperArgs::args(&ctx, &mut args, msg.author.id, None).await { Ok(Ok(mut mapper_args)) => { mapper_args.config.mode = Some(GameMode::TKO); _mapper(ctx, CommandData::Message { msg, args, num }, mapper_args).await } Ok(Err(content)) => msg.error(&ctx, content).await, Err(why) => { let _ = msg.error(&ctx, GENERAL_ISSUE).await; Err(why) } } } CommandData::Interaction { command } => super::slash_top(ctx, *command).await, } } #[command] #[short_desc("How many maps of a ctb user's top100 are made by the given mapper?")] #[long_desc( "Display the top plays of a ctb user which were mapped by the given mapper.\n\ Specify the __user first__ and the __mapper second__.\n\ Unlike the mapper count of the profile command, this command considers not only \ the map's creator, but also tries to check if the map is a guest difficulty.\n\ If the `-convert` / `-c` argument is specified, I will __not__ count any maps \ that aren't native ctb maps." )] #[usage("[username] [mapper] [-convert]")] #[example("badewanne3 \"Hishiro Chizuru\"", "monstrata monstrata")] #[aliases("mapperc")] async fn mapperctb(ctx: Arc<Context>, data: CommandData) -> BotResult<()> { match data { CommandData::Message { msg, mut args, num } => { match MapperArgs::args(&ctx, &mut args, msg.author.id, None).await { Ok(Ok(mut mapper_args)) => { mapper_args.config.mode = Some(GameMode::CTB); _mapper(ctx, CommandData::Message { msg, args, num }, mapper_args).await } Ok(Err(content)) => msg.error(&ctx, content).await, Err(why) => { let _ = msg.error(&ctx, GENERAL_ISSUE).await; Err(why) } } } CommandData::Interaction { command } => super::slash_top(ctx, *command).await, } } #[command] #[short_desc("How many maps of a user's top100 are made by Sotarks?")] #[long_desc( "How many maps of a user's top100 are made by Sotarks?\n\ Unlike the mapper count of the profile command, this command considers not only \ the map's creator, but also tries to check if the map is a guest difficulty." )] #[usage("[username]")] #[example("badewanne3")] pub async fn sotarks(ctx: Arc<Context>, data: CommandData) -> BotResult<()> { match data { CommandData::Message { msg, mut args, num } => { match MapperArgs::args(&ctx, &mut args, msg.author.id, Some("sotarks")).await { Ok(Ok(mut mapper_args)) => { mapper_args.config.mode.get_or_insert(GameMode::STD); _mapper(ctx, CommandData::Message { msg, args, num }, mapper_args).await } Ok(Err(content)) => msg.error(&ctx, content).await, Err(why) => { let _ = msg.error(&ctx, GENERAL_ISSUE).await; Err(why) } } } CommandData::Interaction { command } => super::slash_top(ctx, *command).await, } } pub async fn slash_mapper(ctx: Arc<Context>, mut command: ApplicationCommand) -> BotResult<()> { let options = command.yoink_options(); match MapperArgs::slash(&ctx, &command, options).await? { Ok(args) => _mapper(ctx, command.into(), args).await, Err(content) => command.error(&ctx, content).await, } } struct MapperArgs { config: UserConfig, mapper: Username, } impl MapperArgs { async fn args( ctx: &Context, args: &mut Args<'_>, author_id: Id<UserMarker>, mapper: Option<&str>, ) -> DoubleResultCow<Self> { let mut config = ctx.user_config(author_id).await?; let (name, mapper) = match args.next() { Some(first) => match mapper { Some(mapper) => (Some(first), mapper), None => match args.next() { Some(second) => (Some(first), second), None => (None, first), }, }, None => match mapper { Some(mapper) => (None, mapper), None => { let content = "You need to specify at least one osu username for the mapper. \ If you're not linked, you must specify at least two names."; return Ok(Err(content.into())); } }, }; if let Some(name) = name { match check_user_mention(ctx, name).await? { Ok(osu) => config.osu = Some(osu), Err(content) => return Ok(Err(content)), } } let mapper = match check_user_mention(ctx, mapper).await? { Ok(osu) => osu.into_username(), Err(content) => return Ok(Err(content)), }; Ok(Ok(Self { config, mapper })) } async fn slash( ctx: &Context, command: &ApplicationCommand, options: Vec<CommandDataOption>, ) -> DoubleResultCow<Self> { let mut config = ctx.user_config(command.user_id()?).await?; let mut mapper = None; for option in options { match option.value { CommandOptionValue::String(value) => match option.name.as_str() { NAME => config.osu = Some(value.into()), "mapper" => mapper = Some(value.into()), MODE => config.mode = parse_mode_option(&value), _ => return Err(Error::InvalidCommandOptions), }, CommandOptionValue::User(value) => match option.name.as_str() { DISCORD => match parse_discord(ctx, value).await? { Ok(osu) => config.osu = Some(osu), Err(content) => return Ok(Err(content)), }, _ => return Err(Error::InvalidCommandOptions), }, _ => return Err(Error::InvalidCommandOptions), } } let args = Self { mapper: mapper.ok_or(Error::InvalidCommandOptions)?, config, }; Ok(Ok(args)) } } pub fn define_mapper() -> MyCommand { let mapper = MyCommandOption::builder("mapper", "Specify a mapper username").string(Vec::new(), false); let mode = option_mode(); let name = option_name(); let discord = option_discord(); let mapper_help = "Count the top plays on maps of the given mapper.\n\ It will try to consider guest difficulties so that if a map was created by someone else \ but the given mapper made the guest diff, it will count.\n\ Similarly, if the given mapper created the mapset but someone else guest diff'd, \ it will not count.\n\ This does not always work perfectly, like when mappers renamed or when guest difficulties don't have \ common difficulty labels like `X's Insane`"; MyCommand::new("mapper", "Count the top plays on maps of the given mapper") .help(mapper_help) .options(vec![mapper, mode, name, discord]) }
use super::Point; use std::io::{self, Write}; pub fn write_gpx<'a>(mut w: impl Write, segments: impl Iterator<Item=&'a [Point]>) -> io::Result<()> { writeln!(w, r#"<?xml version="1.0" encoding="utf-8"?> <gpx xmlns="http://www.topografix.com/GPX/1/1" version="1.1" creator="alpinereplay-rs/1">"#)?; for seg in segments { writeln!(w, "<trk><trkseg>")?; for point in seg { let time = chrono::offset::TimeZone::timestamp(&chrono::Utc, point.time.floor() as i64, (point.time.fract() * 1_000_000_000.).floor() as u32); writeln!(w, r#"<trkpt lat="{}" lon="{}"><ele>{}</ele><time>{}</time></trkpt>"#, point.lat, point.lon, point.alt, time.to_rfc3339_opts(chrono::SecondsFormat::Millis, true))?; } writeln!(w, "</trkseg></trk>")?; } writeln!(w, "</gpx>")?; Ok(()) }
use util::*; fn main() -> Result<(), Box<dyn std::error::Error>> { let timer = Timer::new(); let count = input::vec::<String>(&std::env::args().nth(1).unwrap(), "\n\n") .iter() .map(|s| s.replace("\n", " ")) .filter(|s| { let mut res = true; for field in vec!["byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"] { res = res && s.contains(field); } res }) .count(); println!("{}", count); timer.print(); Ok(()) }
use serde::Serialize; use uvm_core::unity::{Version, Manifest, Modules, Module}; use std::collections::HashSet; use std::iter::FromIterator; use std::path::Path; use stringreader::StringReader; mod fixures; fn save_json_output<P: AsRef<Path>, T: ?Sized + Serialize>(dir:P, file_name: &str, value:&T) -> std::io::Result<()> { use std::fs::OpenOptions; use std::fs::DirBuilder; use std::io::{Error,ErrorKind}; let base = dir.as_ref(); DirBuilder::new().recursive(true).create(base)?; OpenOptions::new().write(true).create(true).open(base.join(file_name)) .and_then(|f| serde_json::to_writer_pretty(f, value).map_err(|err| Error::new(ErrorKind::Other, err))) } macro_rules! generate_modules_json { ($($id:ident, $version:expr, $fixture_name:ident),*) => { $( #[test] fn $id() { let version = $version; let reader = StringReader::new(fixures::manifest::$fixture_name); let manifest = Manifest::from_reader(&version, reader).expect("a manifest"); let mut a:Modules = manifest.into_modules(); let mut b:Modules = serde_json::from_str(fixures::module::$fixture_name).expect("a deserialized module"); a.sort(); b.sort(); let name = format!("outputs/{}", stringify!($id)); let base = Path::new(&name); save_json_output(&base, &format!("{}_l.json", &version), &a).expect("a saved output file"); save_json_output(&base, &format!("{}_r.json", &version), &b).expect("a saved output file"); let hash_set_a:HashSet<Module> = HashSet::from_iter(a.into_iter()); let hash_set_b:HashSet<Module> = HashSet::from_iter(b.into_iter()); let diff = hash_set_a.difference(&hash_set_b); assert_eq!(hash_set_a.len(), hash_set_b.len(), "has same length"); assert_eq!(diff.count(), 0, "has no differences"); } )* }; } #[cfg(not(target_os = "linux"))] generate_modules_json![ generates_modules_2022_2, Version::f(2022, 2, 6, 1), UNITY_2022_2_6_F_1, generates_modules_2022_1, Version::a(2022, 1, 0, 13), UNITY_2022_1_0_A_13, generates_modules_2021_1, Version::f(2021, 1, 28, 1), UNITY_2021_1_28_F_1, generates_modules_2021_2, Version::f(2021, 2, 2, 1), UNITY_2021_2_2_F_1, generates_modules_2020_1, Version::f(2020, 1, 17, 1), UNITY_2020_1_17_F_1, generates_modules_2020_2, Version::f(2020, 2, 7, 1), UNITY_2020_2_7_F_1, generates_modules_2020_3, Version::f(2020, 3, 22, 1), UNITY_2020_3_22_F_1, generates_modules_2019_1, Version::f(2019, 1, 14, 1), UNITY_2019_1_14_F_1, generates_modules_2019_2, Version::f(2019, 2, 21, 1), UNITY_2019_2_21_F_1, generates_modules_2019_3, Version::f(2019, 3, 15, 1), UNITY_2019_3_15_F_1, generates_modules_2019_4, Version::f(2019, 4, 32, 1), UNITY_2019_4_32_F_1, generates_modules_2018_4, Version::f(2018, 4, 36, 1), UNITY_2018_4_36_F_1, generates_modules_2018_3, Version::f(2018, 3, 14, 1), UNITY_2018_3_14_F_1, generates_modules_2018_2, Version::f(2018, 2, 21, 1), UNITY_2018_2_21_F_1, generates_modules_2018_1, Version::f(2018, 1, 9, 2), UNITY_2018_1_9_F_2, generates_modules_2017_4, Version::f(2017, 4, 40, 1), UNITY_2017_4_40_F_1, generates_modules_2017_3, Version::f(2017, 3, 1, 1), UNITY_2017_3_1_F_1, generates_modules_2017_2, Version::f(2017, 2, 5, 1), UNITY_2017_2_5_F_1, generates_modules_2017_1, Version::f(2017, 1, 5, 1), UNITY_2017_1_5_F_1 ]; #[cfg(target_os = "linux")] generate_modules_json![ generates_modules_2022_2, Version::f(2022, 2, 6, 1), UNITY_2022_2_6_F_1, generates_modules_2022_1, Version::a(2022, 1, 0, 13), UNITY_2022_1_0_A_13, generates_modules_2021_1, Version::f(2021, 1, 28, 1), UNITY_2021_1_28_F_1, generates_modules_2021_2, Version::f(2021, 2, 2, 1), UNITY_2021_2_2_F_1, generates_modules_2020_1, Version::f(2020, 1, 17, 1), UNITY_2020_1_17_F_1, generates_modules_2020_2, Version::f(2020, 2, 7, 1), UNITY_2020_2_7_F_1, generates_modules_2020_3, Version::f(2020, 3, 22, 1), UNITY_2020_3_22_F_1, generates_modules_2019_1, Version::f(2019, 1, 14, 1), UNITY_2019_1_14_F_1, generates_modules_2019_2, Version::f(2019, 2, 21, 1), UNITY_2019_2_21_F_1, generates_modules_2019_3, Version::f(2019, 3, 15, 1), UNITY_2019_3_15_F_1, generates_modules_2019_4, Version::f(2019, 4, 32, 1), UNITY_2019_4_32_F_1, generates_modules_2018_4, Version::f(2018, 4, 36, 1), UNITY_2018_4_36_F_1, generates_modules_2018_3, Version::f(2018, 3, 14, 1,), UNITY_2018_3_14_F_1, generates_modules_2018_2, Version::f(2018, 2, 21, 1,), UNITY_2018_2_21_F_1, generates_modules_2018_1, Version::f(2018, 1, 9, 2,), UNITY_2018_1_9_F_2, generates_modules_2017_4, Version::f(2017, 4, 40, 1), UNITY_2017_4_40_F_1 ];
pub(crate) mod sponge; pub(crate) mod poseidon; pub(crate) mod rescue; pub(crate) mod rescue_prime; mod sbox; mod matrix; #[cfg(test)] mod tests;
//! # libinjection //! //! Rust bindings for (libinjection)][1] //! //! [1]: https://github.com/client9/libinjection //! //! ## How to Use //! //! ``` //! extern crate libinjection; //! //! use libinjection::{sqli, xss}; //! //! fn main() { //! let (is_sqli, fingerprint) = sqli("' OR '1'='1' --").unwrap(); //! assert!(is_sqli); //! assert_eq!("s&sos", fingerprint); //! //! let is_xss = xss("<script type='text/javascript'>alert('xss');</script>").unwrap(); //! assert!(is_xss); //! } //! ``` //! #![doc(html_root_url = "https://docs.rs/libinjection")] #![deny(missing_docs)] #![allow(non_upper_case_globals)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] mod bindings; mod wrapper; pub use wrapper::{sqli, xss};
#[doc = "Register `PDCRB` reader"] pub type R = crate::R<PDCRB_SPEC>; #[doc = "Register `PDCRB` writer"] pub type W = crate::W<PDCRB_SPEC>; #[doc = "Field `PD6` reader - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] pub type PD6_R = crate::BitReader; #[doc = "Field `PD6` writer - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] pub type PD6_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `PD7` reader - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] pub type PD7_R = crate::BitReader; #[doc = "Field `PD7` writer - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] pub type PD7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 6 - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] #[inline(always)] pub fn pd6(&self) -> PD6_R { PD6_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] #[inline(always)] pub fn pd7(&self) -> PD7_R { PD7_R::new(((self.bits >> 7) & 1) != 0) } } impl W { #[doc = "Bit 6 - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] #[inline(always)] #[must_use] pub fn pd6(&mut self) -> PD6_W<PDCRB_SPEC, 6> { PD6_W::new(self) } #[doc = "Bit 7 - Port B pull-down bit i (i = 15 to 0) Setting PDi bit while the APC bit of the PWR_CR3 register is set activates a pull-down device on the PB\\[i\\] I/O. On STM32C011xx, only PD7 and PD6 are available"] #[inline(always)] #[must_use] pub fn pd7(&mut self) -> PD7_W<PDCRB_SPEC, 7> { PD7_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "PWR Port B pull-down control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pdcrb::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pdcrb::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct PDCRB_SPEC; impl crate::RegisterSpec for PDCRB_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`pdcrb::R`](R) reader structure"] impl crate::Readable for PDCRB_SPEC {} #[doc = "`write(|w| ..)` method takes [`pdcrb::W`](W) writer structure"] impl crate::Writable for PDCRB_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets PDCRB to value 0"] impl crate::Resettable for PDCRB_SPEC { const RESET_VALUE: Self::Ux = 0; }
#[macro_use] extern crate log; #[macro_use] extern crate serde_derive; use rsocket_rust::extension::MimeType; use rsocket_rust_messaging::*; fn init() { let _ = env_logger::builder() .format_timestamp_millis() .is_test(true) .try_init(); } #[derive(Serialize, Deserialize, Debug, Default)] pub struct Token { app: String, access: String, } #[derive(Serialize, Deserialize, Debug, Default)] pub struct Student { id: i64, name: String, birth: String, } #[derive(Serialize, Deserialize, Debug, Default)] pub struct Tracing { id: String, ts: i64, } #[derive(Serialize, Deserialize, Debug)] pub struct Response<T> { code: i32, message: Option<String>, data: T, } #[tokio::main] #[test] #[ignore] async fn test_messaging() { init(); let token = || Token { app: "xxx".to_owned(), access: "yyy".to_owned(), }; let requester = Requester::builder() .setup_metadata(token(), MimeType::APPLICATION_JSON) .setup_data(token()) .connect_tcp("127.0.0.1", 7878) .build() .await .expect("Connect failed!"); // TEST MONO BEGIN let res: Response<Student> = requester .route("student.v1.upsert") .metadata(Tracing::default(), "application/json") .metadata_raw("foobar", "message/x.rsocket.authentication.bearer.v0") .data(next_post()) .retrieve_mono() .await .block() .expect("Retrieve failed!") .expect("Empty result!"); info!("------> RESPONSE: {:?}", res); // TEST FLUX BEGIN let res: Vec<Student> = requester .route("students.v1") .data(next_post()) .retrieve_flux() .block() .await .expect("Retrieve failed!"); for it in res.iter() { info!("===> NEXT: {:?}", it); } requester .route("students.v1") .data(next_post()) .retrieve_flux() .foreach(|it: Student| { info!("===> FOREACH: {:?}", it); }) .await .expect("Retrieve failed!"); } fn next_post() -> Student { Student { id: 1234, name: "Jeffsky".to_owned(), birth: "2020-01-01".to_owned(), } }
table! { actor (actor_id) { actor_id -> Int4, first_name -> Text, last_name -> Text, last_update -> Timestamptz, } } table! { address (address_id) { address_id -> Int4, #[sql_name = "address"] address_name -> Text, address2 -> Nullable<Text>, district -> Text, city_id -> Int4, postal_code -> Nullable<Text>, phone -> Text, last_update -> Timestamptz, } } table! { category (category_id) { category_id -> Int4, name -> Text, last_update -> Timestamptz, } } table! { city (city_id) { city_id -> Int4, #[sql_name = "city"] city_name -> Text, country_id -> Int4, last_update -> Timestamptz, } } table! { country (country_id) { country_id -> Int4, #[sql_name = "country"] country_name -> Text, last_update -> Timestamptz, } } table! { customer (customer_id) { customer_id -> Int4, store_id -> Int4, first_name -> Text, last_name -> Text, email -> Nullable<Text>, address_id -> Int4, activebool -> Bool, create_date -> Date, last_update -> Nullable<Timestamptz>, active -> Nullable<Int4>, } } table! { use crate::models::MyEnumMapping; use diesel::sql_types::*; film (film_id) { film_id ->Int4, title -> Text, description -> Nullable<Text>, release_year -> Nullable<Int4>, language_id -> Int4, original_language_id -> Nullable<Int4>, rental_duration -> Int4, rental_rate -> Numeric, length -> Nullable<Int4>, replacement_cost -> Numeric, rating -> MyEnumMapping, last_update -> Timestamptz, special_features -> Nullable<Array<Text>>, fulltext -> Text, } } table! { film_actor (actor_id, film_id) { actor_id -> Int4, film_id -> Int4, last_update -> Timestamptz, } } table! { film_category (film_id, category_id) { film_id -> Int4, category_id -> Int4, last_update -> Timestamptz, } } table! { inventory (inventory_id) { inventory_id -> Int4, film_id -> Int4, store_id -> Int4, last_update -> Timestamptz, } } table! { language (language_id) { language_id -> Int4, name -> Bpchar, last_update -> Timestamptz, } } table! { rental (rental_id) { rental_id -> Int4, rental_date -> Timestamptz, inventory_id -> Int4, customer_id -> Int4, return_date -> Nullable<Timestamptz>, staff_id -> Int4, last_update -> Timestamptz, } } table! { staff (staff_id) { staff_id -> Int4, first_name -> Text, last_name -> Text, address_id -> Int4, email -> Nullable<Text>, store_id -> Int4, active -> Bool, username -> Text, password -> Nullable<Text>, last_update -> Timestamptz, picture -> Nullable<Bytea>, } } table! { store (store_id) { store_id -> Int4, manager_staff_id -> Int4, address_id -> Int4, last_update -> Timestamptz, } } joinable!(address -> city (city_id)); joinable!(city -> country (country_id)); joinable!(customer -> address (address_id)); joinable!(customer -> store (store_id)); joinable!(film_actor -> actor (actor_id)); joinable!(film_actor -> film (film_id)); joinable!(film_category -> category (category_id)); joinable!(film_category -> film (film_id)); joinable!(inventory -> film (film_id)); joinable!(inventory -> store (store_id)); joinable!(rental -> customer (customer_id)); joinable!(rental -> inventory (inventory_id)); joinable!(rental -> staff (staff_id)); joinable!(staff -> address (address_id)); joinable!(staff -> store (store_id)); joinable!(store -> address (address_id)); allow_tables_to_appear_in_same_query!( actor, address, category, city, country, customer, film, film_actor, film_category, inventory, language, rental, staff, store, );
use crate::tokenizer::{PreTokenizedString, PreTokenizer, Result, SplitDelimiterBehavior}; use unicode_categories::UnicodeCategories; fn is_bert_punc(x: char) -> bool { char::is_ascii_punctuation(&x) || x.is_punctuation() } #[derive(Copy, Clone, Debug)] pub struct BertPreTokenizer; impl_serde_unit_struct!(BertVisitor, BertPreTokenizer); impl PreTokenizer for BertPreTokenizer { fn pre_tokenize(&self, pretokenized: &mut PreTokenizedString) -> Result<()> { pretokenized.split(|_, sub| { Ok(sub .split(char::is_whitespace, SplitDelimiterBehavior::Removed)? .into_iter() .flat_map(|sub| { let result = sub.split(is_bert_punc, SplitDelimiterBehavior::Isolated); if let Err(e) = result { itertools::Either::Left(std::iter::once(Err(e))) } else { itertools::Either::Right(result.unwrap().into_iter().map(Ok)) } }) .collect::<Result<Vec<_>>>()?) }) } } #[cfg(test)] mod tests { use super::*; use crate::OffsetReferential; #[test] fn basic() { let pretok = BertPreTokenizer; let mut pretokenized: PreTokenizedString = "Hey friend! How are you?!?".into(); pretok.pre_tokenize(&mut pretokenized).unwrap(); assert_eq!( pretokenized.get_normalized(OffsetReferential::Original), vec![ ("Hey", (0, 3)), ("", (3, 4)), ("friend", (4, 10)), ("!", (10, 11)), ("", (11, 12)), ("", (12, 13)), ("", (13, 14)), ("", (14, 15)), ("", (15, 16)), ("How", (16, 19)), ("", (19, 20)), ("are", (20, 23)), ("", (23, 24)), ("you", (24, 27)), ("?", (27, 28)), ("!", (28, 29)), ("?", (29, 30)), ] ); } }
use crate::cars::data::CarObjective::{Route, Simple, Temporary}; use crate::cars::data::{CarComponent, CarObjective}; use crate::engine_interaction::TimeInfo; use crate::map_model::{Map, NavMesh}; use crate::physics::PhysicsWorld; use crate::physics::{Kinematics, Transform}; use cgmath::MetricSpace; use cgmath::{Angle, InnerSpace, Vector2}; use specs::prelude::*; use specs::shred::PanicHandler; #[derive(Default)] pub struct CarDecision; pub const CAR_ACCELERATION: f32 = 3.0; pub const CAR_DECELERATION: f32 = 9.0; pub const MIN_TURNING_RADIUS: f32 = 3.0; pub const OBJECTIVE_OK_DIST: f32 = 4.0; pub const ANG_ACC: f32 = 1.0; #[derive(SystemData)] pub struct CarDecisionSystemData<'a> { map: Read<'a, Map, PanicHandler>, time: Read<'a, TimeInfo>, coworld: Read<'a, PhysicsWorld, PanicHandler>, transforms: WriteStorage<'a, Transform>, kinematics: WriteStorage<'a, Kinematics>, cars: WriteStorage<'a, CarComponent>, } impl<'a> System<'a> for CarDecision { type SystemData = CarDecisionSystemData<'a>; fn run(&mut self, mut data: Self::SystemData) { let cow = data.coworld; let navmesh = &data.map.navmesh; let time = data.time; (&mut data.transforms, &mut data.kinematics, &mut data.cars) .join() .for_each(|(trans, kin, car)| { car_objective_update(car, &time, trans, &navmesh); car_physics(&cow, &navmesh, &time, trans, kin, car); }); } } fn car_objective_update( car: &mut CarComponent, time: &TimeInfo, trans: &Transform, navmesh: &NavMesh, ) { match car.objective { CarObjective::None | Simple(_) | Route(_) => { car.objective = navmesh .closest_node(trans.position()) .map_or(CarObjective::None, Temporary); } CarObjective::Temporary(x) => { if let Some(p) = navmesh.get(x).map(|x| x.pos) { if p.distance2(trans.position()) < OBJECTIVE_OK_DIST * OBJECTIVE_OK_DIST && !navmesh[&x].control.get_behavior(time.time_seconds).is_red() { let neighs = navmesh.get_neighs(x); let r = rand::random::<f32>() * (neighs.len() as f32); if neighs.is_empty() { return; } let new_obj = &neighs[r as usize].to; car.objective = Temporary(*new_obj); } } else { car.objective = CarObjective::None; } } } } fn car_physics( coworld: &PhysicsWorld, navmesh: &NavMesh, time: &TimeInfo, trans: &mut Transform, kin: &mut Kinematics, car: &mut CarComponent, ) { let speed: f32 = kin.velocity.magnitude() * kin.velocity.dot(car.direction).signum(); let dot = (kin.velocity / speed).dot(car.direction); if speed > 1.0 && dot.abs() < 0.9 { let coeff = speed.max(1.0).min(9.0) / 9.0; kin.acceleration -= kin.velocity / coeff; return; } let pos = trans.position(); let danger_length = (speed * speed / (2.0 * CAR_DECELERATION)).min(40.0); let neighbors = coworld.query_around(pos, 10.0 + danger_length); let objs = neighbors.map(|obj| (obj.pos, coworld.get_obj(obj.id))); car.calc_decision(navmesh, speed, time, pos, objs); let speed = speed + ((car.desired_speed - speed) .min(time.delta * CAR_ACCELERATION) .max(-time.delta * CAR_DECELERATION)); let max_ang_vel = (speed.abs() / MIN_TURNING_RADIUS).min(2.0); let delta_ang = car.direction.angle(car.desired_dir); let mut ang = Vector2::unit_x().angle(car.direction); car.ang_velocity += time.delta * ANG_ACC; car.ang_velocity = car .ang_velocity .min(max_ang_vel) .min(3.0 * delta_ang.0.abs()); ang.0 += delta_ang .0 .min(car.ang_velocity * time.delta) .max(-car.ang_velocity * time.delta); car.direction = Vector2::new(ang.cos(), ang.sin()); trans.set_direction(car.direction); kin.velocity = car.direction * speed; }
fn fib(x: u32) -> u32 { if x < 2 { 1 } else { fib(x - 1) + fib(x - 2) } } fn main() { println!("{}", fib(40)); }
pub struct Timer { divider_register: u8, // div timer_counter: u8, // tima timer_modulo: u8, // tma timer_control: u8, // tac internal_counter: u32, divider_counter: u32, } impl Timer { pub fn new() -> Timer { Timer { divider_register: 0, timer_counter: 0, timer_modulo: 0, timer_control: 0, internal_counter: 0, divider_counter: 0, } } pub fn read_byte(&self, address: u16) -> u8 { match address { 0xFF04 => self.divider_register, 0xFF05 => self.timer_counter, 0xFF06 => self.timer_modulo, 0xFF07 => self.timer_control, _ => unreachable!("Invalid address accessed in timer: {}", address) } } pub fn write_byte(&mut self, address: u16, value: u8) { match address { 0xFF04 => self.divider_register = 0, 0xFF05 => self.timer_counter = value, 0xFF06 => self.timer_modulo = value, 0xFF07 => self.timer_control = value, _ => unreachable!("Invalid address accessed in timer: {}", address) } } pub fn tick(&mut self, elapsed: u32) -> bool { self.divider_counter += elapsed; self.divider_register = self.divider_register.wrapping_add((self.divider_counter / 256) as u8); self.divider_counter %= 256; if self.is_enabled() { self.internal_counter += elapsed; let (updated, overflow) = self.timer_counter.overflowing_add((self.internal_counter / self.get_speed()) as u8); self.internal_counter %= self.get_speed(); self.timer_counter = if overflow { self.timer_modulo + self.internal_counter as u8 } else { updated }; if overflow { return true } } false } fn is_enabled(&self) -> bool { self.timer_control & 0b100 != 0 } fn get_speed(&self) -> u32 { match self.timer_control & 0b11 { 0b00 => 256, 0b01 => 4, 0b10 => 16, 0b11 => 64, _ => unreachable!() } } }
use std::{ collections::HashSet, net::IpAddr, sync::{Arc, Mutex}, thread::{Builder, JoinHandle}, }; use tokio::{ runtime::Runtime, sync::mpsc::{self, Sender}, }; use crate::network::dns::{resolver::Lookup, IpTable}; type PendingAddrs = HashSet<IpAddr>; const CHANNEL_SIZE: usize = 1_000; pub struct Client { cache: Arc<Mutex<IpTable>>, pending: Arc<Mutex<PendingAddrs>>, tx: Option<Sender<Vec<IpAddr>>>, handle: Option<JoinHandle<()>>, } impl Client { pub fn new<R>(resolver: R, runtime: Runtime) -> anyhow::Result<Self> where R: Lookup + Send + Sync + 'static, { let cache = Arc::new(Mutex::new(IpTable::new())); let pending = Arc::new(Mutex::new(PendingAddrs::new())); let (tx, mut rx) = mpsc::channel::<Vec<IpAddr>>(CHANNEL_SIZE); let handle = Builder::new().name("resolver".into()).spawn({ let cache = cache.clone(); let pending = pending.clone(); move || { runtime.block_on(async { let resolver = Arc::new(resolver); while let Some(ips) = rx.recv().await { for ip in ips { tokio::spawn({ let resolver = resolver.clone(); let cache = cache.clone(); let pending = pending.clone(); async move { if let Some(name) = resolver.lookup(ip).await { cache.lock().unwrap().insert(ip, name); } pending.lock().unwrap().remove(&ip); } }); } } }); } })?; Ok(Self { cache, pending, tx: Some(tx), handle: Some(handle), }) } pub fn resolve(&mut self, ips: Vec<IpAddr>) { // Remove ips that are already being resolved let ips = ips .into_iter() .filter(|ip| self.pending.lock().unwrap().insert(*ip)) .collect::<Vec<_>>(); if !ips.is_empty() { // Discard the message if the channel is full; it will be retried eventually let _ = self.tx.as_mut().unwrap().try_send(ips); } } pub fn cache(&mut self) -> IpTable { let cache = self.cache.lock().unwrap(); cache.clone() } } impl Drop for Client { fn drop(&mut self) { // Do the Option dance to be able to drop the sender so that the receiver finishes and the thread can be joined drop(self.tx.take().unwrap()); self.handle.take().unwrap().join().unwrap(); } }
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2016 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. #[repr(C)] #[derive(Debug, Copy)] pub struct mxm_ib_ep_opts { pub tl: mxm_tl_ep_opts_t, pub map_mode: mxm_ib_map_mode_t, pub drain_cq: c_int, pub cq_wmark: c_uint, pub resize_cq: c_int, pub first_sl: c_int, pub num_sls: c_uint, pub int_mode: c_uint, pub int_thresh: f64, pub exp_connectib: mxm_ternary_value_t, pub wc_mode: c_uint, pub cq_stall: mxm_ternary_value_t, pub cq_stall_loops: c_uint, pub lid_path: mxm_ib_ep_opts__bindgen_ty_1, pub max_path_bits: c_uint, pub lid_path_policy: mxm_ib_lid_path_policy_t, pub rx: mxm_ib_ep_opts__bindgen_ty_2, pub tx: mxm_ib_ep_opts__bindgen_ty_3, pub min_chunk: c_int, pub gid_index: c_uint, pub use_grh: mxm_ternary_value_t, }
use cosmwasm_std::{Decimal, HumanAddr, Uint128}; use cw20::Cw20ReceiveMsg; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; // We define a custom struct for each query response #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct ConfigInfo { pub owner: HumanAddr, pub terraswap_factory: HumanAddr, pub spectrum_token: HumanAddr, pub spectrum_gov: HumanAddr, pub anchor_token: HumanAddr, pub anchor_staking: HumanAddr, pub anchor_gov: HumanAddr, pub platform: Option<HumanAddr>, pub controller: Option<HumanAddr>, pub base_denom: String, pub community_fee: Decimal, pub platform_fee: Decimal, pub controller_fee: Decimal, pub deposit_fee: Decimal, pub lock_start: u64, pub lock_end: u64, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub enum HandleMsg { receive(Cw20ReceiveMsg), // Bond lp token // Update config update_config { owner: Option<HumanAddr>, platform: Option<HumanAddr>, controller: Option<HumanAddr>, community_fee: Option<Decimal>, platform_fee: Option<Decimal>, controller_fee: Option<Decimal>, deposit_fee: Option<Decimal>, lock_start: Option<u64>, lock_end: Option<u64>, }, // Unbond lp token unbond { asset_token: HumanAddr, amount: Uint128, }, register_asset { asset_token: HumanAddr, staking_token: HumanAddr, weight: u32, auto_compound: bool, }, // Withdraw rewards withdraw { // If the asset token is not given, then all rewards are withdrawn asset_token: Option<HumanAddr>, }, stake { asset_token: HumanAddr, }, compound {} } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub enum Cw20HookMsg { bond { staker_addr: Option<HumanAddr>, asset_token: HumanAddr, compound_rate: Option<Decimal>, }, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub enum QueryMsg { config {}, // get config // get all vault settings pools {}, // get deposited balances reward_info { staker_addr: HumanAddr, height: u64, }, state {}, } // We define a custom struct for each query response #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct PoolsResponse { pub pools: Vec<PoolItem>, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct PoolItem { pub asset_token: HumanAddr, pub staking_token: HumanAddr, pub total_auto_bond_share: Uint128, // share auto bond pub total_stake_bond_share: Uint128, pub total_stake_bond_amount: Uint128, // amount stake pub weight: u32, pub auto_compound: bool, pub farm_share: Uint128, // MIR share pub state_spec_share_index: Decimal, pub farm_share_index: Decimal, // per stake bond share pub stake_spec_share_index: Decimal, // per stake bond share pub auto_spec_share_index: Decimal, // per auto bond share pub reinvest_allowance: Uint128, } // We define a custom struct for each query response #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct RewardInfoResponse { pub staker_addr: HumanAddr, pub reward_infos: Vec<RewardInfoResponseItem>, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct RewardInfoResponseItem { pub asset_token: HumanAddr, pub farm_share_index: Decimal, pub auto_spec_share_index: Decimal, pub stake_spec_share_index: Decimal, pub bond_amount: Uint128, pub auto_bond_amount: Uint128, pub stake_bond_amount: Uint128, pub farm_share: Uint128, pub spec_share: Uint128, pub auto_bond_share: Uint128, pub stake_bond_share: Uint128, pub pending_farm_reward: Uint128, pub pending_spec_reward: Uint128, pub accum_spec_share: Uint128, pub locked_spec_share: Uint128, pub locked_spec_reward: Uint128, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, JsonSchema)] pub struct StateInfo { pub previous_spec_share: Uint128, pub spec_share_index: Decimal, // per weight pub total_farm_share: Uint128, pub total_weight: u32, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct MigrateMsg {}
use crate::data::{Primitive, Value}; use crate::prelude::*; use derive_new::new; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use std::cmp::{Ordering, PartialOrd}; use std::fmt; #[derive(Debug, Default, Eq, PartialEq, Serialize, Deserialize, Clone, new)] pub struct Dictionary { pub entries: IndexMap<String, Tagged<Value>>, } impl PartialOrd for Dictionary { fn partial_cmp(&self, other: &Dictionary) -> Option<Ordering> { let this: Vec<&String> = self.entries.keys().collect(); let that: Vec<&String> = other.entries.keys().collect(); if this != that { return this.partial_cmp(&that); } let this: Vec<&Value> = self.entries.values().map(|v| v.item()).collect(); let that: Vec<&Value> = self.entries.values().map(|v| v.item()).collect(); this.partial_cmp(&that) } } impl From<IndexMap<String, Tagged<Value>>> for Dictionary { fn from(input: IndexMap<String, Tagged<Value>>) -> Dictionary { let mut out = IndexMap::default(); for (key, value) in input { out.insert(key, value); } Dictionary::new(out) } } impl Ord for Dictionary { fn cmp(&self, other: &Dictionary) -> Ordering { let this: Vec<&String> = self.entries.keys().collect(); let that: Vec<&String> = other.entries.keys().collect(); if this != that { return this.cmp(&that); } let this: Vec<&Value> = self.entries.values().map(|v| v.item()).collect(); let that: Vec<&Value> = self.entries.values().map(|v| v.item()).collect(); this.cmp(&that) } } impl PartialOrd<Value> for Dictionary { fn partial_cmp(&self, _other: &Value) -> Option<Ordering> { Some(Ordering::Less) } } impl PartialEq<Value> for Dictionary { fn eq(&self, other: &Value) -> bool { match other { Value::Row(d) => self == d, _ => false, } } } impl Dictionary { pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> { match self.entries.get(desc) { Some(v) => MaybeOwned::Borrowed(v), None => MaybeOwned::Owned(Value::Primitive(Primitive::Nothing)), } } pub(crate) fn get_data_by_key(&self, name: &str) -> Option<&Tagged<Value>> { match self .entries .iter() .find(|(desc_name, _)| *desc_name == name) { Some((_, v)) => Some(v), None => None, } } pub(crate) fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut debug = f.debug_struct("Dictionary"); for (desc, value) in self.entries.iter() { debug.field(desc, &value.debug()); } debug.finish() } } #[derive(Debug)] pub struct TaggedListBuilder { tag: Tag, list: Vec<Tagged<Value>>, } impl TaggedListBuilder { pub fn new(tag: impl Into<Tag>) -> TaggedListBuilder { TaggedListBuilder { tag: tag.into(), list: vec![], } } pub fn push(&mut self, value: impl Into<Value>) { self.list.push(value.into().tagged(self.tag)); } pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) { self.list.push(value.into()); } pub fn into_tagged_value(self) -> Tagged<Value> { Value::Table(self.list).tagged(self.tag) } } impl From<TaggedListBuilder> for Tagged<Value> { fn from(input: TaggedListBuilder) -> Tagged<Value> { input.into_tagged_value() } } #[derive(Debug)] pub struct TaggedDictBuilder { tag: Tag, dict: IndexMap<String, Tagged<Value>>, } impl TaggedDictBuilder { pub fn new(tag: impl Into<Tag>) -> TaggedDictBuilder { TaggedDictBuilder { tag: tag.into(), dict: IndexMap::default(), } } pub fn with_capacity(tag: impl Into<Tag>, n: usize) -> TaggedDictBuilder { TaggedDictBuilder { tag: tag.into(), dict: IndexMap::with_capacity(n), } } pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) { self.dict.insert(key.into(), value.into().tagged(self.tag)); } pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) { self.dict.insert(key.into(), value.into()); } pub fn into_tagged_value(self) -> Tagged<Value> { self.into_tagged_dict().map(Value::Row) } pub fn into_tagged_dict(self) -> Tagged<Dictionary> { Dictionary { entries: self.dict }.tagged(self.tag) } pub fn is_empty(&self) -> bool { self.dict.is_empty() } } impl From<TaggedDictBuilder> for Tagged<Value> { fn from(input: TaggedDictBuilder) -> Tagged<Value> { input.into_tagged_value() } }
#[doc = "Register `VCTR11` reader"] pub type R = crate::R<VCTR11_SPEC>; #[doc = "Register `VCTR11` writer"] pub type W = crate::W<VCTR11_SPEC>; #[doc = "Field `B352` reader - B352"] pub type B352_R = crate::BitReader; #[doc = "Field `B352` writer - B352"] pub type B352_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B353` reader - B353"] pub type B353_R = crate::BitReader; #[doc = "Field `B353` writer - B353"] pub type B353_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B354` reader - B354"] pub type B354_R = crate::BitReader; #[doc = "Field `B354` writer - B354"] pub type B354_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B355` reader - B355"] pub type B355_R = crate::BitReader; #[doc = "Field `B355` writer - B355"] pub type B355_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B356` reader - B356"] pub type B356_R = crate::BitReader; #[doc = "Field `B356` writer - B356"] pub type B356_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B357` reader - B357"] pub type B357_R = crate::BitReader; #[doc = "Field `B357` writer - B357"] pub type B357_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B358` reader - B358"] pub type B358_R = crate::BitReader; #[doc = "Field `B358` writer - B358"] pub type B358_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B359` reader - B359"] pub type B359_R = crate::BitReader; #[doc = "Field `B359` writer - B359"] pub type B359_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B360` reader - B360"] pub type B360_R = crate::BitReader; #[doc = "Field `B360` writer - B360"] pub type B360_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B361` reader - B361"] pub type B361_R = crate::BitReader; #[doc = "Field `B361` writer - B361"] pub type B361_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B362` reader - B362"] pub type B362_R = crate::BitReader; #[doc = "Field `B362` writer - B362"] pub type B362_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B363` reader - B363"] pub type B363_R = crate::BitReader; #[doc = "Field `B363` writer - B363"] pub type B363_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B364` reader - B364"] pub type B364_R = crate::BitReader; #[doc = "Field `B364` writer - B364"] pub type B364_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B365` reader - B365"] pub type B365_R = crate::BitReader; #[doc = "Field `B365` writer - B365"] pub type B365_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B366` reader - B366"] pub type B366_R = crate::BitReader; #[doc = "Field `B366` writer - B366"] pub type B366_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B367` reader - B367"] pub type B367_R = crate::BitReader; #[doc = "Field `B367` writer - B367"] pub type B367_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B368` reader - B368"] pub type B368_R = crate::BitReader; #[doc = "Field `B368` writer - B368"] pub type B368_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B369` reader - B369"] pub type B369_R = crate::BitReader; #[doc = "Field `B369` writer - B369"] pub type B369_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B370` reader - B370"] pub type B370_R = crate::BitReader; #[doc = "Field `B370` writer - B370"] pub type B370_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B371` reader - B371"] pub type B371_R = crate::BitReader; #[doc = "Field `B371` writer - B371"] pub type B371_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B372` reader - B372"] pub type B372_R = crate::BitReader; #[doc = "Field `B372` writer - B372"] pub type B372_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B373` reader - B373"] pub type B373_R = crate::BitReader; #[doc = "Field `B373` writer - B373"] pub type B373_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B374` reader - B374"] pub type B374_R = crate::BitReader; #[doc = "Field `B374` writer - B374"] pub type B374_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B375` reader - B375"] pub type B375_R = crate::BitReader; #[doc = "Field `B375` writer - B375"] pub type B375_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B376` reader - B376"] pub type B376_R = crate::BitReader; #[doc = "Field `B376` writer - B376"] pub type B376_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B377` reader - B377"] pub type B377_R = crate::BitReader; #[doc = "Field `B377` writer - B377"] pub type B377_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B378` reader - B378"] pub type B378_R = crate::BitReader; #[doc = "Field `B378` writer - B378"] pub type B378_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B379` reader - B379"] pub type B379_R = crate::BitReader; #[doc = "Field `B379` writer - B379"] pub type B379_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B380` reader - B380"] pub type B380_R = crate::BitReader; #[doc = "Field `B380` writer - B380"] pub type B380_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B381` reader - B381"] pub type B381_R = crate::BitReader; #[doc = "Field `B381` writer - B381"] pub type B381_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B382` reader - B382"] pub type B382_R = crate::BitReader; #[doc = "Field `B382` writer - B382"] pub type B382_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `B383` reader - B383"] pub type B383_R = crate::BitReader; #[doc = "Field `B383` writer - B383"] pub type B383_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - B352"] #[inline(always)] pub fn b352(&self) -> B352_R { B352_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - B353"] #[inline(always)] pub fn b353(&self) -> B353_R { B353_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - B354"] #[inline(always)] pub fn b354(&self) -> B354_R { B354_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - B355"] #[inline(always)] pub fn b355(&self) -> B355_R { B355_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - B356"] #[inline(always)] pub fn b356(&self) -> B356_R { B356_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - B357"] #[inline(always)] pub fn b357(&self) -> B357_R { B357_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - B358"] #[inline(always)] pub fn b358(&self) -> B358_R { B358_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - B359"] #[inline(always)] pub fn b359(&self) -> B359_R { B359_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - B360"] #[inline(always)] pub fn b360(&self) -> B360_R { B360_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - B361"] #[inline(always)] pub fn b361(&self) -> B361_R { B361_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - B362"] #[inline(always)] pub fn b362(&self) -> B362_R { B362_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - B363"] #[inline(always)] pub fn b363(&self) -> B363_R { B363_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - B364"] #[inline(always)] pub fn b364(&self) -> B364_R { B364_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - B365"] #[inline(always)] pub fn b365(&self) -> B365_R { B365_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - B366"] #[inline(always)] pub fn b366(&self) -> B366_R { B366_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - B367"] #[inline(always)] pub fn b367(&self) -> B367_R { B367_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - B368"] #[inline(always)] pub fn b368(&self) -> B368_R { B368_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - B369"] #[inline(always)] pub fn b369(&self) -> B369_R { B369_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - B370"] #[inline(always)] pub fn b370(&self) -> B370_R { B370_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - B371"] #[inline(always)] pub fn b371(&self) -> B371_R { B371_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - B372"] #[inline(always)] pub fn b372(&self) -> B372_R { B372_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - B373"] #[inline(always)] pub fn b373(&self) -> B373_R { B373_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - B374"] #[inline(always)] pub fn b374(&self) -> B374_R { B374_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - B375"] #[inline(always)] pub fn b375(&self) -> B375_R { B375_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 24 - B376"] #[inline(always)] pub fn b376(&self) -> B376_R { B376_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - B377"] #[inline(always)] pub fn b377(&self) -> B377_R { B377_R::new(((self.bits >> 25) & 1) != 0) } #[doc = "Bit 26 - B378"] #[inline(always)] pub fn b378(&self) -> B378_R { B378_R::new(((self.bits >> 26) & 1) != 0) } #[doc = "Bit 27 - B379"] #[inline(always)] pub fn b379(&self) -> B379_R { B379_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 28 - B380"] #[inline(always)] pub fn b380(&self) -> B380_R { B380_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - B381"] #[inline(always)] pub fn b381(&self) -> B381_R { B381_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - B382"] #[inline(always)] pub fn b382(&self) -> B382_R { B382_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - B383"] #[inline(always)] pub fn b383(&self) -> B383_R { B383_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - B352"] #[inline(always)] #[must_use] pub fn b352(&mut self) -> B352_W<VCTR11_SPEC, 0> { B352_W::new(self) } #[doc = "Bit 1 - B353"] #[inline(always)] #[must_use] pub fn b353(&mut self) -> B353_W<VCTR11_SPEC, 1> { B353_W::new(self) } #[doc = "Bit 2 - B354"] #[inline(always)] #[must_use] pub fn b354(&mut self) -> B354_W<VCTR11_SPEC, 2> { B354_W::new(self) } #[doc = "Bit 3 - B355"] #[inline(always)] #[must_use] pub fn b355(&mut self) -> B355_W<VCTR11_SPEC, 3> { B355_W::new(self) } #[doc = "Bit 4 - B356"] #[inline(always)] #[must_use] pub fn b356(&mut self) -> B356_W<VCTR11_SPEC, 4> { B356_W::new(self) } #[doc = "Bit 5 - B357"] #[inline(always)] #[must_use] pub fn b357(&mut self) -> B357_W<VCTR11_SPEC, 5> { B357_W::new(self) } #[doc = "Bit 6 - B358"] #[inline(always)] #[must_use] pub fn b358(&mut self) -> B358_W<VCTR11_SPEC, 6> { B358_W::new(self) } #[doc = "Bit 7 - B359"] #[inline(always)] #[must_use] pub fn b359(&mut self) -> B359_W<VCTR11_SPEC, 7> { B359_W::new(self) } #[doc = "Bit 8 - B360"] #[inline(always)] #[must_use] pub fn b360(&mut self) -> B360_W<VCTR11_SPEC, 8> { B360_W::new(self) } #[doc = "Bit 9 - B361"] #[inline(always)] #[must_use] pub fn b361(&mut self) -> B361_W<VCTR11_SPEC, 9> { B361_W::new(self) } #[doc = "Bit 10 - B362"] #[inline(always)] #[must_use] pub fn b362(&mut self) -> B362_W<VCTR11_SPEC, 10> { B362_W::new(self) } #[doc = "Bit 11 - B363"] #[inline(always)] #[must_use] pub fn b363(&mut self) -> B363_W<VCTR11_SPEC, 11> { B363_W::new(self) } #[doc = "Bit 12 - B364"] #[inline(always)] #[must_use] pub fn b364(&mut self) -> B364_W<VCTR11_SPEC, 12> { B364_W::new(self) } #[doc = "Bit 13 - B365"] #[inline(always)] #[must_use] pub fn b365(&mut self) -> B365_W<VCTR11_SPEC, 13> { B365_W::new(self) } #[doc = "Bit 14 - B366"] #[inline(always)] #[must_use] pub fn b366(&mut self) -> B366_W<VCTR11_SPEC, 14> { B366_W::new(self) } #[doc = "Bit 15 - B367"] #[inline(always)] #[must_use] pub fn b367(&mut self) -> B367_W<VCTR11_SPEC, 15> { B367_W::new(self) } #[doc = "Bit 16 - B368"] #[inline(always)] #[must_use] pub fn b368(&mut self) -> B368_W<VCTR11_SPEC, 16> { B368_W::new(self) } #[doc = "Bit 17 - B369"] #[inline(always)] #[must_use] pub fn b369(&mut self) -> B369_W<VCTR11_SPEC, 17> { B369_W::new(self) } #[doc = "Bit 18 - B370"] #[inline(always)] #[must_use] pub fn b370(&mut self) -> B370_W<VCTR11_SPEC, 18> { B370_W::new(self) } #[doc = "Bit 19 - B371"] #[inline(always)] #[must_use] pub fn b371(&mut self) -> B371_W<VCTR11_SPEC, 19> { B371_W::new(self) } #[doc = "Bit 20 - B372"] #[inline(always)] #[must_use] pub fn b372(&mut self) -> B372_W<VCTR11_SPEC, 20> { B372_W::new(self) } #[doc = "Bit 21 - B373"] #[inline(always)] #[must_use] pub fn b373(&mut self) -> B373_W<VCTR11_SPEC, 21> { B373_W::new(self) } #[doc = "Bit 22 - B374"] #[inline(always)] #[must_use] pub fn b374(&mut self) -> B374_W<VCTR11_SPEC, 22> { B374_W::new(self) } #[doc = "Bit 23 - B375"] #[inline(always)] #[must_use] pub fn b375(&mut self) -> B375_W<VCTR11_SPEC, 23> { B375_W::new(self) } #[doc = "Bit 24 - B376"] #[inline(always)] #[must_use] pub fn b376(&mut self) -> B376_W<VCTR11_SPEC, 24> { B376_W::new(self) } #[doc = "Bit 25 - B377"] #[inline(always)] #[must_use] pub fn b377(&mut self) -> B377_W<VCTR11_SPEC, 25> { B377_W::new(self) } #[doc = "Bit 26 - B378"] #[inline(always)] #[must_use] pub fn b378(&mut self) -> B378_W<VCTR11_SPEC, 26> { B378_W::new(self) } #[doc = "Bit 27 - B379"] #[inline(always)] #[must_use] pub fn b379(&mut self) -> B379_W<VCTR11_SPEC, 27> { B379_W::new(self) } #[doc = "Bit 28 - B380"] #[inline(always)] #[must_use] pub fn b380(&mut self) -> B380_W<VCTR11_SPEC, 28> { B380_W::new(self) } #[doc = "Bit 29 - B381"] #[inline(always)] #[must_use] pub fn b381(&mut self) -> B381_W<VCTR11_SPEC, 29> { B381_W::new(self) } #[doc = "Bit 30 - B382"] #[inline(always)] #[must_use] pub fn b382(&mut self) -> B382_W<VCTR11_SPEC, 30> { B382_W::new(self) } #[doc = "Bit 31 - B383"] #[inline(always)] #[must_use] pub fn b383(&mut self) -> B383_W<VCTR11_SPEC, 31> { B383_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "MPCBBx vector register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`vctr11::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`vctr11::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct VCTR11_SPEC; impl crate::RegisterSpec for VCTR11_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`vctr11::R`](R) reader structure"] impl crate::Readable for VCTR11_SPEC {} #[doc = "`write(|w| ..)` method takes [`vctr11::W`](W) writer structure"] impl crate::Writable for VCTR11_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets VCTR11 to value 0xffff_ffff"] impl crate::Resettable for VCTR11_SPEC { const RESET_VALUE: Self::Ux = 0xffff_ffff; }
pub fn roman_to_int(s: String) -> i32 { use std::collections::HashMap; let mut dict = HashMap::new(); dict.insert('I', 1); dict.insert('V', 5); dict.insert('X', 10); dict.insert('L', 50); dict.insert('C', 100); dict.insert('D', 500); dict.insert('M', 1000); let mut n = 0; let mut last_m = 0; for c in s.chars().into_iter().rev() { let m = dict.get(&c).unwrap().clone(); if m >= last_m { n += m; } else { n -= m; } last_m = m; } n } #[test] fn test_romain_to_int() { assert_eq!(roman_to_int("MCMXCIV".to_string()), 1994); }
pub trait NodeBasicMovement<T> { fn move_to_left(&mut self); fn move_to_right(&mut self); fn insert_to_left(&mut self, item: T); fn insert_to_right(&mut self, item: T); } #[derive(Debug)] pub struct NodeInSequence<T> { pub current_node: T, pub left_list: Vec<T>, pub right_list: Vec<T>, } impl<T> NodeInSequence<T> { #[allow(dead_code)] pub fn sequence(item: T) -> Self { NodeInSequence { current_node: item, left_list: vec![], right_list: vec![], } } #[allow(dead_code)] pub fn current_element(&self) -> Option<&T> { Some(&self.current_node) } } impl<T: Clone> NodeBasicMovement<T> for NodeInSequence<T> { fn move_to_left(&mut self) { let temp = self.current_node.clone(); self.right_list.insert(0, temp); self.current_node = self.left_list.remove(0); } fn move_to_right(&mut self) { let temp = self.current_node.clone(); self.left_list.insert(0, temp); self.current_node = self.right_list.remove(0); } fn insert_to_left(&mut self, item: T) { self.left_list.insert(0, item) } fn insert_to_right(&mut self, item: T) { self.right_list.insert(0, item) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_node_basic_movements() { let mut node = NodeInSequence { current_node: 6, left_list: vec![5, 4, 3, 2, 1], right_list: vec![7, 8, 9], }; assert_eq!(node.current_element(), Some(&6)); node.move_to_left(); assert_eq!(node.current_element(), Some(&5)); assert_eq!(node.left_list, vec![4, 3, 2, 1]); assert_eq!(node.right_list, vec![6, 7, 8, 9]); node.move_to_right(); // moving back to original formation node.move_to_right(); assert_eq!(node.current_element(), Some(&7)); assert_eq!(node.left_list, vec![6, 5, 4, 3, 2, 1]); assert_eq!(node.right_list, vec![8, 9]); node.move_to_left(); //moving back to original formation node.insert_to_left(13); assert_eq!(node.current_element(), Some(&6)); assert_eq!(node.right_list, vec![7, 8, 9]); assert_eq!(node.left_list, vec![13, 5, 4, 3, 2, 1]); node.insert_to_right(13); assert_eq!(node.current_element(), Some(&6)); assert_eq!(node.right_list, vec![13, 7, 8, 9]); assert_eq!(node.left_list, vec![13, 5, 4, 3, 2, 1]); } }
// Copyright 2020 <盏一 w@hidva.com> // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use kuiba::access::wal; fn main() { let ctl = wal::Ctl::load().unwrap(); println!("kb_control version number: {}", wal::KB_CTL_VER); println!("Catalog version number: {}", wal::KB_CAT_VER); println!("kb_control last modified: {:?}", ctl.time); let ckpt = ctl.ckpt; println!("Latest checkpoint location: {}", ckpt); let v = ctl.ckptcpy.redo; println!("Latest checkpoint's REDO location: {}", v); let v = ctl.ckptcpy.curtli; println!("Latest checkpoint's TimeLineID: {}", v); let v = ctl.ckptcpy.prevtli; println!("Latest checkpoint's PrevTimeLineID: {}", v); let v = ctl.ckptcpy.nextxid; println!("Latest checkpoint's NextXID: {}", v); let v = ctl.ckptcpy.nextoid; println!("Latest checkpoint's NextOID: {}", v); println!("Time of latest checkpoint: {:?}", ctl.ckptcpy.time); }
use super::*; use crate::model::enums::Era; use serenity::model::id::UserId; #[test] fn remove_started_server() { let db_conn = &DbConnection::test(); let alias = "foo".to_owned(); db_conn .insert_game_server(&GameServer { alias: alias.clone(), state: GameServerState::StartedState( StartedState { address: "foo.bar:3000".to_owned(), last_seen_turn: 23, }, Some(LobbyState { owner: UserId(0), era: Era::Early, player_count: 12, description: None, }), ), }) .unwrap(); db_conn .insert_game_server(&GameServer { alias: "foo2".to_owned(), state: GameServerState::StartedState( StartedState { address: "foo2.bar:3000".to_owned(), last_seen_turn: 23, }, Some(LobbyState { owner: UserId(0), era: Era::Early, player_count: 12, description: None, }), ), }) .unwrap(); db_conn.remove_started_state(&alias).unwrap(); let result = db_conn.game_for_alias(&alias).unwrap(); if let GameServerState::StartedState(_, _) = result.state { panic!("Did not stop started state!") } if let GameServerState::Lobby(_) = db_conn.game_for_alias("foo2").unwrap().state { panic!("messed with a different game!"); } } #[test] fn should_error_when_game_not_found() { let db_conn = &DbConnection::test(); let alias = "foo".to_owned(); db_conn .insert_game_server(&GameServer { alias: "foo2".to_owned(), state: GameServerState::StartedState( StartedState { address: "foo2.bar:3000".to_owned(), last_seen_turn: 23, }, Some(LobbyState { owner: UserId(0), era: Era::Early, player_count: 12, description: None, }), ), }) .unwrap(); assert!(db_conn.remove_started_state(&alias).is_err()); if let GameServerState::Lobby(_) = db_conn.game_for_alias("foo2").unwrap().state { panic!("messed with a different game!"); } } #[test] fn should_error_when_game_not_lobby() { let db_conn = &DbConnection::test(); let alias = "foo".to_owned(); db_conn .insert_game_server(&GameServer { alias: alias.clone(), state: GameServerState::StartedState( StartedState { address: "foo.bar:3000".to_owned(), last_seen_turn: 23, }, None, ), }) .unwrap(); db_conn .insert_game_server(&GameServer { alias: "foo2".to_owned(), state: GameServerState::StartedState( StartedState { address: "foo2.bar:3000".to_owned(), last_seen_turn: 23, }, Some(LobbyState { owner: UserId(0), era: Era::Early, player_count: 12, description: None, }), ), }) .unwrap(); assert!(db_conn.remove_started_state(&alias).is_err()); if let GameServerState::Lobby(_) = db_conn.game_for_alias("foo2").unwrap().state { panic!("messed with a different game!"); } } #[test] fn should_not_error_when_game_not_started() { let db_conn = &DbConnection::test(); let alias = "foo".to_owned(); db_conn .insert_game_server(&GameServer { alias: alias.clone(), state: GameServerState::Lobby(LobbyState { owner: UserId(0), era: Era::Early, player_count: 12, description: None, }), }) .unwrap(); db_conn .insert_game_server(&GameServer { alias: "foo2".to_owned(), state: GameServerState::StartedState( StartedState { address: "foo2.bar:3000".to_owned(), last_seen_turn: 23, }, Some(LobbyState { owner: UserId(0), era: Era::Early, player_count: 12, description: None, }), ), }) .unwrap(); db_conn.remove_started_state(&alias).unwrap(); if let GameServerState::Lobby(_) = db_conn.game_for_alias("foo2").unwrap().state { panic!("messed with a different game!"); } }
use serenity::prelude::*; use serenity::model::channel::Message; use serenity::framework::standard::{ CommandResult, Args, macros::{ command, group } }; group!({ name: "frack_you", options: { prefix: "frack" }, commands: [frack_you] }); #[command("you")] fn frack_you(context: &mut Context, message: &Message, _args: Args) -> CommandResult { // match args.current() // { // Some("you") | Some("you!") => { message.reply(context, "no, frack _you_")?; }, // _ => () // } message.reply(context, "no, frack _you_")?; Ok(()) }
use macros::list; fn main() { let mut l = list![8]; println!("{:?}", l); }
fn first_word(s: &str) -> &str { for (i, &v) in s.as_bytes().iter().enumerate() { if v == b' ' { return &s[..i]; } } return &s[..]; } fn main() { let my_string = String::from("sss asdf sdfd"); let my_str = "sdfsld alksdjflka a"; let _word = first_word(&my_string[..5]); let _word = first_word(&my_string[..]); let _word = first_word(&my_string); let _word = first_word(&my_str[..5]); let _word = first_word(&my_str[..]); let _word = first_word(my_str); }
use opencv::prelude::*; use opencv::imgproc::*; use opencv::core::*; use vision_traits::{Configurable, DynErrResult, input::InputSingular, Node, output::OutputSingular, types::range::RangeU8}; #[derive(Configurable)] pub struct HsvFilterS { hue: RangeU8<0, 180>, saturation: RangeU8<0, 255>, value: RangeU8<0, 255>, } pub struct HsvFilter { settings: HsvFilterS, } impl Node for HsvFilter { const NAME: &'static str = "HsvFilter"; type S = HsvFilterS; type I<'a> = InputSingular<'a, Mat>; type O = OutputSingular<Mat>; fn make(settings: Self::S) -> DynErrResult<Self> { Ok(Self { settings }) } fn process(&mut self, mat: Self::I<'_>) -> DynErrResult<Self::O> { let mut hsv = Mat::default()?; cvt_color(&(mat.val), &mut hsv, COLOR_BGR2HSV, 0)?; let mut filtered = Mat::default()?; in_range(&hsv, &Mat::from_slice(&[self.settings.hue.min, self.settings.saturation.min, self.settings.value.min])?, &Mat::from_slice(&[self.settings.hue.max, self.settings.saturation.max, self.settings.value.max])?, &mut filtered)?; Ok(filtered.into()) } }
use serde_json::{Value}; use crate::ofn_2_thick::axiom_translation as axiom_translation; pub fn ofn_2_thick(v : &Value) -> Value { match v[0].as_str() { Some("SubClassOf") => axiom_translation::translate_subclass_of_axiom(v), Some("DisjointClasses") => axiom_translation::translate_disjoint_classes_axiom(v), Some("DisjointUnionOf") => axiom_translation::translate_disjoint_union_of_axiom(v), Some("EquivalentClasses") => axiom_translation::translate_equivalent_classes_axiom(v), Some(_) => panic!(), None => panic!(), } }
pub use core::f32::consts::PI; pub use core::f32::consts::FRAC_PI_2 as PI_HALF; pub const PI_TWO:f32 = PI * 2f32; pub const PI_SQUARED:f32 = PI * PI; pub const TAU:f32 = core::f32::consts::TAU; pub const TAU_SQUARED:f32 = TAU * TAU; pub const fn cos_approx_a1(a:f32) -> f32 { let a2 = a*a; let a4 = a2*a2; return 1.0 - a2/2.0 + a4/24.0; } /// Bhaskara I's sine approximation formula /// /// Source: https://en.wikipedia.org/wiki/Bhaskara_I%27s_sine_approximation_formula pub const fn cos_bhaskara_1(a:f32) -> f32 { const FIVE_PI:f32 = 5f32 * PI_SQUARED; let a = a + PI_HALF; let a_times_pi_minus_a = a * (PI - a); return ( 16.0 * a_times_pi_minus_a ) / ( FIVE_PI - 4.0 * a_times_pi_minus_a ); } pub const fn cos_bhaskara_2(a:f32) -> f32 { let a2 = a*a; return ( PI_SQUARED - 4.0 * a2 ) / ( PI_SQUARED + a2 ); } pub const fn cos_bhaskara_3(a:f32) -> f32 { let a2 = a*a; return 1.0 - ( 20.0 * a2 ) / ( 4.0 * a2 + TAU_SQUARED ); } pub fn sin32(a:f32) -> f32 { cos32(PI_HALF - a) } pub fn cos32(a:f32) -> f32 { let a = a.abs(); // cos(-x) = cos(x) // let a = unsafe { intrinsics::fabsf32(a) }; let a = a % PI_TWO; // 0 <= a <= 2*PI let quad = (a / PI_HALF) as u8; return match quad { 1 => -cos73s(PI-a), 2 => -cos73s(a-PI), 3 => cos73s(PI_TWO-a), _ => cos73s(a), }; } pub const fn cos32s(a:f32) -> f32 { const C1:f32 = 0.99940307; const C2:f32 = -0.49558072; const C3:f32 = 0.03679168; let a2 = a*a; return C1 + a2 * (C2 + a2 * C3); } pub const fn cos52s(a:f32) -> f32 { const C1:f32 = 0.9999932946; const C2:f32 = -0.4999124376; const C3:f32 = 0.0414877472; const C4:f32 = -0.0012712095; let a2 = a*a; return C1 + a2 * (C2 + a2 * (C3 + a2 * C4)); } pub const fn cos73s(a:f32) -> f32 { const C1:f32 = 0.999999953464; const C2:f32 = -0.4999999053455; const C3:f32 = 0.0416635846769; const C4:f32 = -0.0013853704264; const C5:f32 = 0.000023233; let a2 = a*a; return C1 + a2 * (C2 + a2 * (C3 + a2 * (C4 + a2 * C5))); } mod benchmarks { extern crate test; use test::Bencher; use super::*; #[bench] fn bench_cos_std(b: &mut Bencher) { b.iter(|| accumulate(f32::cos)); } #[bench] fn bench_cos_32(b: &mut Bencher) { b.iter(|| accumulate(cos32)); } const ITERATIONS:u32 = 1000; fn accumulate(f:fn(a:f32)->f32) -> u32 { let mut ret = 0; let mut number = 0f32; for _ in 0..ITERATIONS { ret ^= f(number).to_bits(); number += 0.01f32; } return ret; } }
use specs::{self, Component}; use components::InitFromBlueprint; #[derive(Clone, Debug, Deserialize)] pub struct Attacker { pub damage: u32, } impl Component for Attacker { type Storage = specs::VecStorage<Self>; } impl InitFromBlueprint for Attacker {}
use super::link::Link; use eos::*; use prelude::*; use std::cmp::min; use views::svg; pub struct DonationList { props: Props, donations: EosData<Vec<Donation>>, eos_agent: Box<Bridge<EosAgent>>, } pub enum Msg { Eos(EosOutput), } #[derive(PartialEq, Clone, Default)] pub struct Props { pub context: Context, pub lower_bound: Option<String>, pub upper_bound: Option<String>, pub limit: Option<usize>, pub chain: Chain, } impl Component for DonationList { type Message = Msg; type Properties = Props; fn create(props: Self::Properties, mut link: ComponentLink<Self>) -> Self { let mut eos_agent = EosAgent::new(props.chain.clone(), link.send_back(Msg::Eos)); eos_agent.send(EosInput::GetNewDonations); DonationList { props, donations: EosData::default(), eos_agent, } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::Eos(output) => match output { EosOutput::NewDonations(donations) => { self.donations = donations; true } _ => false, }, } } fn change(&mut self, props: Self::Properties) -> ShouldRender { let chain = props.chain.clone(); self.donations = EosData::Loading; self.props = props; self.eos_agent.send(EosInput::Configure(chain)); self.eos_agent.send(EosInput::GetNewDonations); true } } impl Renderable<DonationList> for DonationList { fn view(&self) -> Html<Self> { match &self.donations { EosData::NotAsked => self.view_empty(), EosData::Loading => self.view_loading(), EosData::Success(data) => { if data.is_empty() { self.view_empty() } else { self.view_items(&data) } } EosData::Failure(error) => self.view_error(error), } } } impl DonationList { fn view_loading(&self) -> Html<Self> { html! { <div class="donation_list -loading", > { "Loading..." } </div> } } fn view_error(&self, error: &EosError) -> Html<Self> { html! { <div class="donation_list -error", > { svg::link_cross() } </div> } } fn view_items(&self, donations: &[Donation]) -> Html<Self> { let limit = min(donations.len(), self.props.limit.unwrap_or_else(|| 10)); html! { <ul class="donation_list_items", > { for donations[0..limit].iter().map(|donation| self.view_item(donation)) } </ul> } } fn view_item(&self, donation: &Donation) -> Html<Self> { let donor_route = Route::Profile( self.props.chain.to_chain_id_prefix(), donation.account.clone(), ); let donated = donation.donated as f64; html! { <li class="donation_list_item", > <Link: class="donation_account", route=donor_route, text=donation.account.clone(), /> <div class="donation_donated", > { format!("{:.*} {}", 4, donated / 10000., self.props.chain.core_symbol) } </div> </li> } } fn view_empty(&self) -> Html<Self> { html! { <div class="donation_list -empty", > { svg::eos() } </div> } } }
pub mod hash; pub mod merkle;
//! Chia proof of space implementation use crate::chiapos::Tables; #[cfg(any(feature = "parallel", test))] use crate::chiapos::TablesCache; use crate::{PosTableType, Quality, Table, TableGenerator}; use core::mem; use subspace_core_primitives::{PosProof, PosQualityBytes, PosSeed}; const K: u8 = 17; /// Abstraction that represents quality of the solution in the table. /// /// Chia implementation. #[derive(Debug)] #[must_use] pub struct ChiaQuality<'a> { bytes: PosQualityBytes, challenge: [u8; 32], tables: &'a Tables<K>, } impl<'a> Quality for ChiaQuality<'a> { fn to_bytes(&self) -> PosQualityBytes { self.bytes } fn create_proof(&self) -> PosProof { self.tables .find_proof(&self.challenge) .next() .map(PosProof::from) .expect("Proof always exists if quality exists; qed") } } /// Subspace proof of space table generator. /// /// Chia implementation. #[derive(Debug, Default, Clone)] pub struct ChiaTableGenerator { tables_cache: TablesCache<K>, } impl TableGenerator<ChiaTable> for ChiaTableGenerator { fn generate(&mut self, seed: &PosSeed) -> ChiaTable { ChiaTable { tables: Tables::<K>::create((*seed).into(), &mut self.tables_cache), } } fn generate_parallel(&mut self, seed: &PosSeed) -> ChiaTable { ChiaTable { tables: Tables::<K>::create_parallel((*seed).into(), &mut self.tables_cache), } } } /// Subspace proof of space table. /// /// Chia implementation. #[derive(Debug)] pub struct ChiaTable { tables: Tables<K>, } impl Table for ChiaTable { const TABLE_TYPE: PosTableType = PosTableType::Chia; type Generator = ChiaTableGenerator; type Quality<'a> = ChiaQuality<'a>; fn generate(seed: &PosSeed) -> ChiaTable { Self { tables: Tables::<K>::create_simple((*seed).into()), } } #[cfg(any(feature = "parallel", test))] fn generate_parallel(seed: &PosSeed) -> ChiaTable { Self { tables: Tables::<K>::create_parallel((*seed).into(), &mut TablesCache::default()), } } fn find_quality(&self, challenge_index: u32) -> Option<Self::Quality<'_>> { let mut challenge = [0; 32]; challenge[..mem::size_of::<u32>()].copy_from_slice(&challenge_index.to_le_bytes()); let maybe_quality = self.tables.find_quality(&challenge).next(); maybe_quality.map(|quality| ChiaQuality { bytes: PosQualityBytes::from(quality), challenge, tables: &self.tables, }) } fn is_proof_valid( seed: &PosSeed, challenge_index: u32, proof: &PosProof, ) -> Option<PosQualityBytes> { let mut challenge = [0; 32]; challenge[..mem::size_of::<u32>()].copy_from_slice(&challenge_index.to_le_bytes()); Tables::<K>::verify(**seed, &challenge, proof).map(PosQualityBytes::from) } } #[cfg(test)] mod tests { use super::*; #[test] fn basic() { let seed = PosSeed::from([ 35, 2, 52, 4, 51, 55, 23, 84, 91, 10, 111, 12, 13, 222, 151, 16, 228, 211, 254, 45, 92, 198, 204, 10, 9, 10, 11, 129, 139, 171, 15, 23, ]); let table = ChiaTable::generate(&seed); let table_parallel = ChiaTable::generate_parallel(&seed); assert!(table.find_quality(1232460437).is_none()); assert!(table_parallel.find_quality(1232460437).is_none()); { let challenge_index = 124537303; let quality = table.find_quality(challenge_index).unwrap(); assert_eq!( quality.to_bytes(), table_parallel .find_quality(challenge_index) .unwrap() .to_bytes() ); let proof = quality.create_proof(); let maybe_quality = ChiaTable::is_proof_valid(&seed, challenge_index, &proof); assert_eq!(maybe_quality, Some(quality.to_bytes())); } } }
#![no_std] #![feature(const_in_array_repeat_expressions)] pub mod lis3dsh;
use fst::raw::{Fst, Node, Output}; /// Wraps a Map to add the `get_before` method. pub struct Map<D> { map: fst::Map<D>, } impl Map<Vec<u8>> { pub fn from_iter(iter: impl Iterator<Item = (u64, u64)>) -> Result<Self, fst::Error> { let map = fst::Map::from_iter(iter.map(|(key, value)| (key.to_be_bytes(), value)))?; Ok(Self { map }) } } type Backtrack<'a> = Option<(Node<'a>, Output, usize)>; #[derive(Debug, Clone, Copy)] struct CompareNext<'a> { node: Node<'a>, output: Output, backtrack: Backtrack<'a>, input: &'a [u8], } #[derive(Debug, Clone, Copy)] struct ChooseBiggest<'a> { node: Node<'a>, output: Output, } #[derive(Debug, Clone, Copy)] enum Outcome<'a> { CompareNext(CompareNext<'a>), ChooseBiggest(ChooseBiggest<'a>), /// Go back to the first byte that is lowest, then ChooseBiggest Backtrack(Backtrack<'a>), /// Key is lower than every key in the map, abort the search Abort, /// Reached a final state, output contains the value Final(Output), } impl<D: AsRef<[u8]>> Map<D> { fn backtrack<'a>(raw: &'a Fst<D>, state: Backtrack<'a>) -> Outcome<'a> { match state { Some((node, output, index)) => { let t = node.transition(index); let output = output.cat(t.out); let next = raw.node(t.addr); Outcome::ChooseBiggest(ChooseBiggest { node: next, output }) } None => Outcome::Abort, } } fn choose_biggest<'a>(raw: &'a Fst<D>, state: ChooseBiggest<'a>) -> Outcome<'a> { if state.node.len() == 0 { return Outcome::Final(state.output.cat(state.node.final_output())); } let t = state.node.transition(state.node.len() - 1); let output = state.output.cat(t.out); let next = raw.node(t.addr); return Outcome::ChooseBiggest(ChooseBiggest { node: next, output }); } fn compare_next<'a>(raw: &'a Fst<D>, state: CompareNext<'a>) -> Outcome<'a> { let input = match state.input.first() { Some(&input) => input, None => return Outcome::Final(state.output.cat(state.node.final_output())), }; match state.node.find_input(input) { None => { if state.node.len() == 0 { return Outcome::Abort; } let mut it = state.node.transitions().enumerate(); let index = loop { if let Some((index, t)) = it.next() { if t.inp > input { break index; } } else { break state.node.len(); } }; if index == 0 { // none is greater than b, either we are equal to t(0), which would have caused find_input to work, // or we are lower than t(0), in which case we should backtrace to the previous byte return Outcome::Backtrack(state.backtrack); } else { let t = state.node.transition(index - 1); let output = state.output.cat(t.out); let next = raw.node(t.addr); return Outcome::ChooseBiggest(ChooseBiggest { node: next, output }); } } Some(index) => { let backtrack = if index == 0 { state.backtrack } else { Some((state.node, state.output, index - 1)) }; let t = state.node.transition(index); let output = state.output.cat(t.out); let next = raw.node(t.addr); return Outcome::CompareNext(CompareNext { node: next, output, backtrack, input: &state.input[1..], }); } } } /// Returns the value exactly corresponding to the passed key, if it exists. pub fn get_exact(&self, key: u64) -> Option<u64> { let transition = key.to_be_bytes(); self.map.get(transition) } /// Returns the value corresponding to the biggest key that is less or equal to the passed key. /// /// If no key is less or equal to the passed key, returns `None`. pub fn get_before(&self, key: u64) -> Option<u64> { let key = key.to_be_bytes(); let raw = self.map.as_fst(); let mut outcome = Outcome::CompareNext(CompareNext { node: raw.root(), output: Output::zero(), backtrack: None, input: &key, }); loop { outcome = match outcome { Outcome::CompareNext(state) => Self::compare_next(raw, state), Outcome::ChooseBiggest(state) => Self::choose_biggest(raw, state), Outcome::Backtrack(state) => Self::backtrack(raw, state), Outcome::Final(output) => return Some(output.value()), Outcome::Abort => return None, } } } } #[cfg(test)] mod test { use super::Map; #[test] fn test_exact() { let map = Map::from_iter(vec![(0, 0), (1, 42), (100, 1000)].into_iter()) .expect("could not build map"); assert_eq!(map.get_exact(0), Some(0)); assert_eq!(map.get_before(0), Some(0)); assert_eq!(map.get_exact(1), Some(42)); assert_eq!(map.get_before(1), Some(42)); assert_eq!(map.get_exact(100), Some(1000)); assert_eq!(map.get_before(100), Some(1000)); } #[test] fn test_before() { let map = Map::from_iter(vec![(1, 42), (100, 1000)].into_iter()).expect("could not build map"); assert_eq!(map.get_before(0), None); assert_eq!(map.get_before(2), Some(42)); assert_eq!(map.get_before(110), Some(1000)); } #[test] fn test_multiple_bytes() { let map = Map::from_iter(vec![(10, 42), (100, 1000), (4444, 28), (70_000, 2074)].into_iter()) .expect("could not build map"); assert_eq!(map.get_before(258), Some(1000)); for i in 0..10 { assert_eq!(map.get_before(i), None); } for i in 10..100 { assert_eq!(map.get_before(i), Some(42)); } for i in 100..4444 { assert_eq!(map.get_before(i), Some(1000)); } for i in 4444..70_000 { assert_eq!(map.get_before(i), Some(28)); } for i in 70_000..200_000 { assert_eq!(map.get_before(i), Some(2074)); } } }
// --- paritytech --- use cumulus_pallet_xcmp_queue::Config; use xcm_executor::XcmExecutor; // --- darwinia-network --- use crate::*; impl Config for Runtime { type Event = Event; type XcmExecutor = XcmExecutor<XcmConfig>; type ChannelInfo = ParachainSystem; type VersionWrapper = (); }
use std::collections::HashMap; use std::io::Read; fn main() -> Result<(), Box<dyn std::error::Error>> { let mut resp = reqwest::blocking::get("https://ip.nyaa.gay/ip")?; let mut body = String::new(); resp.read_to_string(&mut body)?; println!("{}", body.trim()); Ok(()) }
/* chapter 4 syntax and semantics scope and shadowing */ fn main() { let n: i32 = 8; { println!("{}", n); // prints "8" let n = 12; println!("{}", n); // prints "12" } println!("{}", n); // prints "8" let n = 42; print!("{}", n); // prints "42" } // output should be: /* 8 12 8 42 */
use tonic::{transport::Server, Request, Response, Status}; use dapr::{ appcallback::*, dapr::dapr::proto::runtime::v1::app_callback_server::{AppCallback, AppCallbackServer}, }; #[derive(Default)] pub struct AppCallbackService {} #[tonic::async_trait] impl AppCallback for AppCallbackService { /// Invokes service method with InvokeRequest. async fn on_invoke( &self, _request: Request<InvokeRequest>, ) -> Result<Response<InvokeResponse>, Status> { Ok(Response::new(InvokeResponse::default())) } /// Lists all topics subscribed by this app. /// /// NOTE: Dapr runtime will call this method to get /// the list of topics the app wants to subscribe to. /// In this example, the app is subscribing to topic `A`. async fn list_topic_subscriptions( &self, _request: Request<()>, ) -> Result<Response<ListTopicSubscriptionsResponse>, Status> { let topic = "A".to_string(); let pubsub_name = "pubsub".to_string(); let list_subscriptions = ListTopicSubscriptionsResponse::topic(pubsub_name, topic); Ok(Response::new(list_subscriptions)) } /// Subscribes events from Pubsub. async fn on_topic_event( &self, request: Request<TopicEventRequest>, ) -> Result<Response<TopicEventResponse>, Status> { let r = request.into_inner(); let data = &r.data; let data_content_type = &r.data_content_type; let message = String::from_utf8_lossy(&data); println!("Message: {}", &message); println!("Content-Type: {}", &data_content_type); Ok(Response::new(TopicEventResponse::default())) } /// Lists all input bindings subscribed by this app. async fn list_input_bindings( &self, _request: Request<()>, ) -> Result<Response<ListInputBindingsResponse>, Status> { Ok(Response::new(ListInputBindingsResponse::default())) } /// Listens events from the input bindings. async fn on_binding_event( &self, _request: Request<BindingEventRequest>, ) -> Result<Response<BindingEventResponse>, Status> { Ok(Response::new(BindingEventResponse::default())) } } #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { let addr = "[::]:50051".parse().unwrap(); let callback_service = AppCallbackService::default(); println!("AppCallback server listening on: {}", addr); // Create a gRPC server with the callback_service. Server::builder() .add_service(AppCallbackServer::new(callback_service)) .serve(addr) .await?; Ok(()) }
use std::fmt::Display; pub trait Response: Display {}
use yaml_rust::YamlLoader; use std::env; use std::path; use super::{Context, Module, RootModuleConfig}; use crate::configs::kubernetes::KubernetesConfig; use crate::formatter::StringFormatter; use crate::utils; fn get_kube_context(contents: &str) -> Option<(String, String)> { let yaml_docs = YamlLoader::load_from_str(&contents).ok()?; if yaml_docs.is_empty() { return None; } let conf = &yaml_docs[0]; let current_ctx = conf["current-context"].as_str()?; if current_ctx.is_empty() { return None; } let ns = conf["contexts"] .as_vec() .and_then(|contexts| { contexts .iter() .filter_map(|ctx| Some((ctx, ctx["name"].as_str()?))) .find(|(_, name)| *name == current_ctx) .and_then(|(ctx, _)| ctx["context"]["namespace"].as_str()) }) .unwrap_or(""); Some((current_ctx.to_string(), ns.to_string())) } fn parse_kubectl_file(filename: &path::PathBuf) -> Option<(String, String)> { let contents = utils::read_file(filename).ok()?; get_kube_context(&contents) } pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> { let kube_cfg = match context.get_env("KUBECONFIG") { Some(paths) => env::split_paths(&paths) .filter_map(|filename| parse_kubectl_file(&filename)) .next(), None => { let filename = dirs_next::home_dir()?.join(".kube").join("config"); parse_kubectl_file(&filename) } }; match kube_cfg { Some(kube_cfg) => { let (kube_ctx, kube_ns) = kube_cfg; let mut module = context.new_module("kubernetes"); let config: KubernetesConfig = KubernetesConfig::try_load(module.config); // As we default to disabled=true, we have to check here after loading our config module, // before it was only checking against whatever is in the config starship.toml if config.disabled { return None; }; let parsed = StringFormatter::new(config.format).and_then(|formatter| { formatter .map_meta(|variable, _| match variable { "symbol" => Some(config.symbol), _ => None, }) .map_style(|variable| match variable { "style" => Some(Ok(config.style)), _ => None, }) .map(|variable| match variable { "context" => match config.context_aliases.get(&kube_ctx) { None => Some(Ok(kube_ctx.as_str())), Some(&alias) => Some(Ok(alias)), }, _ => None, }) .map(|variable| match variable { "namespace" => { if kube_ns != "" { Some(Ok(kube_ns.as_str())) } else { None } } _ => None, }) .parse(None) }); module.set_segments(match parsed { Ok(segments) => segments, Err(error) => { log::warn!("Error in module `kubernetes`: \n{}", error); return None; } }); Some(module) } None => None, } } #[cfg(test)] mod tests { use super::*; #[test] fn parse_empty_config() { let input = ""; let result = get_kube_context(&input); let expected = None; assert_eq!(result, expected); } #[test] fn parse_no_config() { let input = r#" apiVersion: v1 clusters: [] contexts: [] current-context: "" kind: Config preferences: {} users: [] "#; let result = get_kube_context(&input); let expected = None; assert_eq!(result, expected); } #[test] fn parse_only_context() { let input = r#" apiVersion: v1 clusters: [] contexts: - context: cluster: test_cluster user: test_user name: test_context current-context: test_context kind: Config preferences: {} users: [] "#; let result = get_kube_context(&input); let expected = Some(("test_context".to_string(), "".to_string())); assert_eq!(result, expected); } #[test] fn parse_context_and_ns() { let input = r#" apiVersion: v1 clusters: [] contexts: - context: cluster: test_cluster user: test_user namespace: test_namespace name: test_context current-context: test_context kind: Config preferences: {} users: [] "#; let result = get_kube_context(&input); let expected = Some(("test_context".to_string(), "test_namespace".to_string())); assert_eq!(result, expected); } #[test] fn parse_multiple_contexts() { let input = r#" apiVersion: v1 clusters: [] contexts: - context: cluster: another_cluster user: another_user namespace: another_namespace name: another_context - context: cluster: test_cluster user: test_user namespace: test_namespace name: test_context current-context: test_context kind: Config preferences: {} users: [] "#; let result = get_kube_context(&input); let expected = Some(("test_context".to_string(), "test_namespace".to_string())); assert_eq!(result, expected); } #[test] fn parse_broken_config() { let input = r#" --- dummy_string "#; let result = get_kube_context(&input); let expected = None; assert_eq!(result, expected); } }
pub enum PointerEvent { ButtonPress, ButtonRelease, EnterWindow, LeaveWindow, PointerMotion, PointerMotionHint, Button1Motion, Button2Motion, Button3Motion, Button4Motion, Button5Motion, ButtonMotion, KeymapState, }
use regex::Regex; const INPUT: &str = include_str!("../input.txt"); fn spec_iter() -> impl Iterator<Item = (usize, usize, char, String)> { let re = Regex::new( r"(?x) (?P<min>\d+) - (?P<max>\d+) \s (?P<letter>[[:alpha:]]+) :\s (?P<pw>[[:alpha:]]+)", ) .unwrap(); INPUT.lines().into_iter().map(move |spec| { let cap = re.captures(spec).unwrap(); let min: usize = cap["min"].parse().unwrap(); let max: usize = cap["max"].parse().unwrap(); let letter = cap["letter"].chars().next().unwrap(); let pw = cap["pw"].to_string(); (min, max, letter, pw) }) } fn part1() -> usize { spec_iter() .filter(|(min, max, letter, pw)| { let char_count = pw.chars().filter(|c| c == letter).count(); char_count >= *min && char_count <= *max }) .count() } fn part2() -> usize { spec_iter() .filter(|(min, max, letter, pw)| { let min = min - 1; let max = max - 1; let mut pw_iter = pw.chars(); let first_matches = pw_iter.nth(min).unwrap() == *letter; let second_matches = pw_iter.nth(max - min - 1).unwrap() == *letter; (first_matches || second_matches) && !(first_matches && second_matches) }) .count() } fn main() { println!("part 1: {}", part1()); println!("part 2: {}", part2()); } #[cfg(test)] mod tests { use super::{part1, part2}; #[test] fn test_part1() { assert_eq!(part1(), 582); } #[test] fn test_part2() { assert_eq!(part2(), 729); } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - Peripheral group structure"] pub gr0: GR, _reserved1: [u8; 24usize], #[doc = "0x40 - Peripheral group structure"] pub gr1: GR, _reserved2: [u8; 24usize], #[doc = "0x80 - Peripheral group structure"] pub gr2: GR, _reserved3: [u8; 24usize], #[doc = "0xc0 - Peripheral group structure"] pub gr3: GR, _reserved4: [u8; 24usize], #[doc = "0x100 - Peripheral group structure"] pub gr4: GR, _reserved5: [u8; 24usize], #[doc = "0x140 - Peripheral group structure"] pub gr5: GR, _reserved6: [u8; 24usize], #[doc = "0x180 - Peripheral group structure"] pub gr6: GR, _reserved7: [u8; 24usize], #[doc = "0x1c0 - Peripheral group structure"] pub gr7: GR, _reserved8: [u8; 24usize], #[doc = "0x200 - Peripheral group structure"] pub gr8: GR, _reserved9: [u8; 24usize], #[doc = "0x240 - Peripheral group structure"] pub gr9: GR, _reserved10: [u8; 24usize], #[doc = "0x280 - Peripheral group structure"] pub gr10: GR, _reserved11: [u8; 344usize], #[doc = "0x400 - Divider command register"] pub div_cmd: DIV_CMD, _reserved12: [u8; 1020usize], #[doc = "0x800 - Divider control register (for 8.0 divider)"] pub div_8_ctl: [DIV_8_CTL; 64], #[doc = "0x900 - Divider control register (for 16.0 divider)"] pub div_16_ctl: [DIV_16_CTL; 64], #[doc = "0xa00 - Divider control register (for 16.5 divider)"] pub div_16_5_ctl: [DIV_16_5_CTL; 64], #[doc = "0xb00 - Divider control register (for 24.5 divider)"] pub div_24_5_ctl: [DIV_24_5_CTL; 63], _reserved16: [u8; 4usize], #[doc = "0xc00 - Clock control register"] pub clock_ctl: [CLOCK_CTL; 128], _reserved17: [u8; 512usize], #[doc = "0x1000 - Trigger command register"] pub tr_cmd: TR_CMD, _reserved18: [u8; 4092usize], #[doc = "0x2000 - Trigger group"] pub tr_gr: [TR_GR; 15], _reserved19: [u8; 512usize], #[doc = "0x4000 - PPU structure with programmable address"] pub ppu_pr0: PPU_PR, _reserved20: [u8; 24usize], #[doc = "0x4040 - PPU structure with programmable address"] pub ppu_pr1: PPU_PR, _reserved21: [u8; 24usize], #[doc = "0x4080 - PPU structure with programmable address"] pub ppu_pr2: PPU_PR, _reserved22: [u8; 24usize], #[doc = "0x40c0 - PPU structure with programmable address"] pub ppu_pr3: PPU_PR, _reserved23: [u8; 24usize], #[doc = "0x4100 - PPU structure with programmable address"] pub ppu_pr4: PPU_PR, _reserved24: [u8; 24usize], #[doc = "0x4140 - PPU structure with programmable address"] pub ppu_pr5: PPU_PR, _reserved25: [u8; 24usize], #[doc = "0x4180 - PPU structure with programmable address"] pub ppu_pr6: PPU_PR, _reserved26: [u8; 24usize], #[doc = "0x41c0 - PPU structure with programmable address"] pub ppu_pr7: PPU_PR, _reserved27: [u8; 24usize], #[doc = "0x4200 - PPU structure with programmable address"] pub ppu_pr8: PPU_PR, _reserved28: [u8; 24usize], #[doc = "0x4240 - PPU structure with programmable address"] pub ppu_pr9: PPU_PR, _reserved29: [u8; 24usize], #[doc = "0x4280 - PPU structure with programmable address"] pub ppu_pr10: PPU_PR, _reserved30: [u8; 24usize], #[doc = "0x42c0 - PPU structure with programmable address"] pub ppu_pr11: PPU_PR, _reserved31: [u8; 24usize], #[doc = "0x4300 - PPU structure with programmable address"] pub ppu_pr12: PPU_PR, _reserved32: [u8; 24usize], #[doc = "0x4340 - PPU structure with programmable address"] pub ppu_pr13: PPU_PR, _reserved33: [u8; 24usize], #[doc = "0x4380 - PPU structure with programmable address"] pub ppu_pr14: PPU_PR, _reserved34: [u8; 24usize], #[doc = "0x43c0 - PPU structure with programmable address"] pub ppu_pr15: PPU_PR, _reserved35: [u8; 3096usize], #[doc = "0x5000 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr0: PPU_GR, _reserved36: [u8; 24usize], #[doc = "0x5040 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr1: PPU_GR, _reserved37: [u8; 24usize], #[doc = "0x5080 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr2: PPU_GR, _reserved38: [u8; 24usize], #[doc = "0x50c0 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr3: PPU_GR, _reserved39: [u8; 24usize], #[doc = "0x5100 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr4: PPU_GR, _reserved40: [u8; 24usize], #[doc = "0x5140 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr5: PPU_GR, _reserved41: [u8; 24usize], #[doc = "0x5180 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr6: PPU_GR, _reserved42: [u8; 24usize], #[doc = "0x51c0 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr7: PPU_GR, _reserved43: [u8; 24usize], #[doc = "0x5200 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr8: PPU_GR, _reserved44: [u8; 24usize], #[doc = "0x5240 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr9: PPU_GR, _reserved45: [u8; 24usize], #[doc = "0x5280 - PPU structure with fixed/constant address for a peripheral group"] pub ppu_gr10: PPU_GR, } #[doc = r"Register block"] #[repr(C)] pub struct GR { #[doc = "0x00 - Clock control"] pub clock_ctl: self::gr::CLOCK_CTL, _reserved1: [u8; 28usize], #[doc = "0x20 - Slave control"] pub sl_ctl: self::gr::SL_CTL, #[doc = "0x24 - Timeout control"] pub timeout_ctl: self::gr::TIMEOUT_CTL, } #[doc = r"Register block"] #[doc = "Peripheral group structure"] pub mod gr; #[doc = r"Register block"] #[repr(C)] pub struct TR_GR { #[doc = "0x00 - Trigger control register"] pub tr_out_ctl: [self::tr_gr::TR_OUT_CTL; 128], } #[doc = r"Register block"] #[doc = "Trigger group"] pub mod tr_gr; #[doc = r"Register block"] #[repr(C)] pub struct PPU_PR { #[doc = "0x00 - PPU region address 0 (slave structure)"] pub addr0: self::ppu_pr::ADDR0, #[doc = "0x04 - PPU region attributes 0 (slave structure)"] pub att0: self::ppu_pr::ATT0, _reserved2: [u8; 24usize], #[doc = "0x20 - PPU region address 1 (master structure)"] pub addr1: self::ppu_pr::ADDR1, #[doc = "0x24 - PPU region attributes 1 (master structure)"] pub att1: self::ppu_pr::ATT1, } #[doc = r"Register block"] #[doc = "PPU structure with programmable address"] pub mod ppu_pr; #[doc = r"Register block"] #[repr(C)] pub struct PPU_GR { #[doc = "0x00 - PPU region address 0 (slave structure)"] pub addr0: self::ppu_gr::ADDR0, #[doc = "0x04 - PPU region attributes 0 (slave structure)"] pub att0: self::ppu_gr::ATT0, _reserved2: [u8; 24usize], #[doc = "0x20 - PPU region address 1 (master structure)"] pub addr1: self::ppu_gr::ADDR1, #[doc = "0x24 - PPU region attributes 1 (master structure)"] pub att1: self::ppu_gr::ATT1, } #[doc = r"Register block"] #[doc = "PPU structure with fixed/constant address for a peripheral group"] pub mod ppu_gr; #[doc = "Divider command register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [div_cmd](div_cmd) module"] pub type DIV_CMD = crate::Reg<u32, _DIV_CMD>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DIV_CMD; #[doc = "`read()` method returns [div_cmd::R](div_cmd::R) reader structure"] impl crate::Readable for DIV_CMD {} #[doc = "`write(|w| ..)` method takes [div_cmd::W](div_cmd::W) writer structure"] impl crate::Writable for DIV_CMD {} #[doc = "Divider command register"] pub mod div_cmd; #[doc = "Divider control register (for 8.0 divider)\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [div_8_ctl](div_8_ctl) module"] pub type DIV_8_CTL = crate::Reg<u32, _DIV_8_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DIV_8_CTL; #[doc = "`read()` method returns [div_8_ctl::R](div_8_ctl::R) reader structure"] impl crate::Readable for DIV_8_CTL {} #[doc = "`write(|w| ..)` method takes [div_8_ctl::W](div_8_ctl::W) writer structure"] impl crate::Writable for DIV_8_CTL {} #[doc = "Divider control register (for 8.0 divider)"] pub mod div_8_ctl; #[doc = "Divider control register (for 16.0 divider)\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [div_16_ctl](div_16_ctl) module"] pub type DIV_16_CTL = crate::Reg<u32, _DIV_16_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DIV_16_CTL; #[doc = "`read()` method returns [div_16_ctl::R](div_16_ctl::R) reader structure"] impl crate::Readable for DIV_16_CTL {} #[doc = "`write(|w| ..)` method takes [div_16_ctl::W](div_16_ctl::W) writer structure"] impl crate::Writable for DIV_16_CTL {} #[doc = "Divider control register (for 16.0 divider)"] pub mod div_16_ctl; #[doc = "Divider control register (for 16.5 divider)\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [div_16_5_ctl](div_16_5_ctl) module"] pub type DIV_16_5_CTL = crate::Reg<u32, _DIV_16_5_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DIV_16_5_CTL; #[doc = "`read()` method returns [div_16_5_ctl::R](div_16_5_ctl::R) reader structure"] impl crate::Readable for DIV_16_5_CTL {} #[doc = "`write(|w| ..)` method takes [div_16_5_ctl::W](div_16_5_ctl::W) writer structure"] impl crate::Writable for DIV_16_5_CTL {} #[doc = "Divider control register (for 16.5 divider)"] pub mod div_16_5_ctl; #[doc = "Divider control register (for 24.5 divider)\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [div_24_5_ctl](div_24_5_ctl) module"] pub type DIV_24_5_CTL = crate::Reg<u32, _DIV_24_5_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _DIV_24_5_CTL; #[doc = "`read()` method returns [div_24_5_ctl::R](div_24_5_ctl::R) reader structure"] impl crate::Readable for DIV_24_5_CTL {} #[doc = "`write(|w| ..)` method takes [div_24_5_ctl::W](div_24_5_ctl::W) writer structure"] impl crate::Writable for DIV_24_5_CTL {} #[doc = "Divider control register (for 24.5 divider)"] pub mod div_24_5_ctl; #[doc = "Clock control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [clock_ctl](clock_ctl) module"] pub type CLOCK_CTL = crate::Reg<u32, _CLOCK_CTL>; #[allow(missing_docs)] #[doc(hidden)] pub struct _CLOCK_CTL; #[doc = "`read()` method returns [clock_ctl::R](clock_ctl::R) reader structure"] impl crate::Readable for CLOCK_CTL {} #[doc = "`write(|w| ..)` method takes [clock_ctl::W](clock_ctl::W) writer structure"] impl crate::Writable for CLOCK_CTL {} #[doc = "Clock control register"] pub mod clock_ctl; #[doc = "Trigger command register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tr_cmd](tr_cmd) module"] pub type TR_CMD = crate::Reg<u32, _TR_CMD>; #[allow(missing_docs)] #[doc(hidden)] pub struct _TR_CMD; #[doc = "`read()` method returns [tr_cmd::R](tr_cmd::R) reader structure"] impl crate::Readable for TR_CMD {} #[doc = "`write(|w| ..)` method takes [tr_cmd::W](tr_cmd::W) writer structure"] impl crate::Writable for TR_CMD {} #[doc = "Trigger command register"] pub mod tr_cmd;
#![allow(non_snake_case)] use cgmath::{vec3, SquareMatrix, InnerSpace}; use crate::mesh::Vertex; use crate::camera::Camera; use crate::types::*; use crate::{SCR_WIDTH, SCR_HEIGHT, DRAW_DISTANCE}; pub static BOUNDING_BOX: [Vertex; 8] = [ Vertex { Position: Vector3 { x: -0.5, y: -0.5, z: -0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } }, Vertex { Position: Vector3 { x: 0.5, y: -0.5, z: -0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } }, Vertex { Position: Vector3 { x: 0.5, y: 0.5, z: -0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } }, Vertex { Position: Vector3 { x: -0.5, y: 0.5, z: -0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } }, Vertex { Position: Vector3 { x: -0.5, y: -0.5, z: 0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } }, Vertex { Position: Vector3 { x: 0.5, y: -0.5, z: 0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } }, Vertex { Position: Vector3 { x: 0.5, y: 0.5, z: 0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } }, Vertex { Position: Vector3 { x: -0.5, y: 0.5, z: 0.5 }, Normal: Vector3 { x: 0.0, y: 0.0, z: 0.0 }, TexCoords: Vector2 { x: 0.0, y: 0.0 } } ]; pub static BOUNDING_BOX_INDICES: [u32; 16] = [ 0, 1, 2, 3, 4, 5, 6, 7, 0, 4, 1, 5, 2, 6, 3, 7 ]; fn getNormalisedDeviceCoords(mouseX: f32, mouseY: f32) -> Vector2 { Vector2 { x: (mouseX*2.0 / SCR_WIDTH as f32) - 1.0, y: 1.0 - (mouseY*2.0 / SCR_HEIGHT as f32) } } fn toEyeCoords(clipCoords: Vector4, projectionMatrix: Matrix4) -> Vector4 { let invProjection = projectionMatrix.invert().unwrap(); let transformedV = invProjection * clipCoords; Vector4{ x: transformedV.x, y: transformedV.y, z: -1.0, w: 0.0 } } fn toWorldCoords(eyeCoords: Vector4, viewMatrix: Matrix4) -> Vector3 { let invView = viewMatrix.invert().unwrap(); let transformedV = invView * eyeCoords; let result = Vector3{ x: transformedV.x, y: transformedV.y, z: transformedV.z }; result.normalize() } pub fn translateCoords(xpos: f32, ypos: f32, projectionMatrix: &Matrix4, cam: &Camera) -> (Vector3, Vector3) { let normalisedCoords = getNormalisedDeviceCoords(xpos, ypos); let clipCoords = Vector4 { x: normalisedCoords.x, y: normalisedCoords.y, z: -1.0, w: 1.0 }; let eyeCoords = toEyeCoords(clipCoords, *projectionMatrix); let worldCoords = toWorldCoords(eyeCoords, cam.getViewMatrix()); let scaledWorld = worldCoords * DRAW_DISTANCE; let start = Vector3 { x: cam.position.x, y: cam.position.y, z: cam.position.z }; let end = Vector3 { x: cam.position.x + scaledWorld.x, y: cam.position.y + scaledWorld.y, z: cam.position.z + scaledWorld.z }; (start, end) } pub fn computeBoundingBox(vertices: &[Vertex]) -> (Vector4, Vector4) { let mut min = Vector4 { x: f32::MAX, y: f32::MAX, z: f32::MAX, w: 1.0 }; let mut max = Vector4 { x: f32::MIN, y: f32::MIN, z: f32::MIN, w: 1.0 }; for v in vertices { if v.Position.x < min.x { min.x = v.Position.x }; if v.Position.y < min.y { min.y = v.Position.y }; if v.Position.z < min.z { min.z = v.Position.z }; if v.Position.x > max.x { max.x = v.Position.x }; if v.Position.y > max.y { max.y = v.Position.y }; if v.Position.z > max.z { max.z = v.Position.z }; } (min, max) } pub fn computeBoundingBoxTransform(min: Vector4, max: Vector4) -> Matrix4 { let size = vec3(max.x-min.x, max.y-min.y, max.z-min.z); let center = vec3((min.x+max.x)/2.0, (min.y+max.y)/2.0, (min.z+max.z)/2.0); Matrix4::from_translation(center) * Matrix4::from_nonuniform_scale(size[0], size[1], size[2]) }
/* * YNAB API Endpoints * * Our API uses a REST based design, leverages the JSON data format, and relies upon HTTPS for transport. We respond with meaningful HTTP response codes and if an error occurs, we include error details in the response body. API Documentation is at https://api.youneedabudget.com * * The version of the OpenAPI document: 1.0.0 * * Generated by: https://openapi-generator.tech */ #[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct Account { #[serde(rename = "id")] pub id: String, #[serde(rename = "name")] pub name: String, /// The type of account. Note: payPal, merchantAccount, investmentAccount, and mortgage types have been deprecated and will be removed in the future. #[serde(rename = "type")] pub _type: Type, /// Whether this account is on budget or not #[serde(rename = "on_budget")] pub on_budget: bool, /// Whether this account is closed or not #[serde(rename = "closed")] pub closed: bool, #[serde(rename = "note", skip_serializing_if = "Option::is_none")] pub note: Option<String>, /// The current balance of the account in milliunits format #[serde(rename = "balance")] pub balance: i64, /// The current cleared balance of the account in milliunits format #[serde(rename = "cleared_balance")] pub cleared_balance: i64, /// The current uncleared balance of the account in milliunits format #[serde(rename = "uncleared_balance")] pub uncleared_balance: i64, /// The payee id which should be used when transferring to this account #[serde(rename = "transfer_payee_id")] pub transfer_payee_id: String, /// Whether or not the account has been deleted. Deleted accounts will only be included in delta requests. #[serde(rename = "deleted")] pub deleted: bool, } impl Account { pub fn new(id: String, name: String, _type: Type, on_budget: bool, closed: bool, balance: i64, cleared_balance: i64, uncleared_balance: i64, transfer_payee_id: String, deleted: bool) -> Account { Account { id, name, _type, on_budget, closed, note: None, balance, cleared_balance, uncleared_balance, transfer_payee_id, deleted, } } } /// The type of account. Note: payPal, merchantAccount, investmentAccount, and mortgage types have been deprecated and will be removed in the future. #[derive(Debug, PartialEq, Serialize, Deserialize)] pub enum Type { #[serde(rename = "checking")] Checking, #[serde(rename = "savings")] Savings, #[serde(rename = "cash")] Cash, #[serde(rename = "creditCard")] CreditCard, #[serde(rename = "lineOfCredit")] LineOfCredit, #[serde(rename = "otherAsset")] OtherAsset, #[serde(rename = "otherLiability")] OtherLiability, #[serde(rename = "payPal")] PayPal, #[serde(rename = "merchantAccount")] MerchantAccount, #[serde(rename = "investmentAccount")] InvestmentAccount, #[serde(rename = "mortgage")] Mortgage, }
use std::sync::{atomic::AtomicBool, Arc}; use lavalink_rs::LavalinkClient; use poise::serenity_prelude::TypeMapKey; use songbird::Songbird; use sqlx::PgPool; use tokio::time::Instant; use crate::types::{IdleHashMap, LastMessageHashMap}; pub struct PgPoolContainer; impl TypeMapKey for PgPoolContainer { type Value = PgPool; } pub struct Uptime; impl TypeMapKey for Uptime { type Value = Instant; } pub struct LastMessageMap; impl TypeMapKey for LastMessageMap { type Value = LastMessageHashMap; } pub struct IdleGuildMap; impl TypeMapKey for IdleGuildMap { type Value = IdleHashMap; } pub struct Data { pub songbird: Arc<Songbird>, pub lavalink: LavalinkClient, pub is_services_running: AtomicBool, } impl Data { pub fn new(songbird: Arc<Songbird>, lavalink: LavalinkClient) -> Self { Self { songbird, lavalink, is_services_running: Default::default(), } } }
use conrod_core::{self, widget, Colorable, Labelable, Positionable, Widget, Sizeable, color}; use custom_widget::item_history; use conrod_keypad::custom_widget::text_edit::TextEdit; use conrod_keypad::english; use custom_widget::Message; use std::sync::mpsc; /// The type upon which we'll implement the `Widget` trait. #[derive(WidgetCommon)] pub struct ChatView<'a> { /// An object that handles some of the dirty work of rendering a GUI. We don't /// really have to worry about it. #[conrod(common_builder)] common: widget::CommonBuilder, pub lists: &'a mut Vec<Message>, pub text_edit: &'a mut String, pub master_id: widget::Id, pub english_tuple: &'a (Vec<english::KeyButton>, Vec<english::KeyButton>, english::KeyButton), /// See the Style struct below. style: Style, /// Whether the button is currently enabled, i.e. whether it responds to /// user input. pub action_tx: mpsc::Sender<String>, pub image_id: Option<conrod_core::image::Id>, pub name: &'a String, pub closure: Box<fn(&str, &str) -> String>, enabled: bool, } #[derive(Copy, Clone, Debug, Default, PartialEq, WidgetStyle)] pub struct Style { #[conrod(default="[200.0,30.0]")] pub item_rect: Option<[f64; 2]>, //w,h, pad bottom } widget_ids! { pub struct Ids { chat_canvas, message_panel, history_list, text_edit_body, text_edit_panel, text_edit_panel_scrollbar, text_edit, text_edit_button_panel, text_edit_button, } } /// Represents the unique, cached state for our ChatView widget. pub struct State { pub ids: Ids, } impl<'a> ChatView<'a> { /// Create a button context to be built upon. pub fn new(lists: &'a mut Vec<Message>, te: &'a mut String, master_id: widget::Id, english_tuple: &'a (Vec<english::KeyButton>, Vec<english::KeyButton>, english::KeyButton), image_id: Option<conrod_core::image::Id>, name: &'a String, action_tx: mpsc::Sender<String>, closure: Box<fn(&str, &str) -> String>) -> Self { ChatView { lists: lists, common: widget::CommonBuilder::default(), text_edit: te, style: Style::default(), master_id: master_id, english_tuple: english_tuple, image_id: image_id, name: name, action_tx: action_tx, closure: closure, enabled: true, } } /// If true, will allow user inputs. If false, will disallow user inputs. Like /// other Conrod configs, this returns self for chainability. Allow dead code /// because we never call this in the example. #[allow(dead_code)] pub fn enabled(mut self, flag: bool) -> Self { self.enabled = flag; self } } /// A custom Conrod widget must implement the Widget trait. See the **Widget** trait /// documentation for more details. impl<'a> Widget for ChatView<'a> { /// The State struct that we defined above. type State = State; /// The Style struct that we defined using the `widget_style!` macro. type Style = Style; /// The event produced by instantiating the widget. /// /// `Some` when clicked, otherwise `None`. type Event = bool; fn init_state(&self, id_gen: widget::id::Generator) -> Self::State { State { ids: Ids::new(id_gen) } } fn style(&self) -> Self::Style { self.style.clone() } /// Update the state of the button by handling any input that has occurred since the last /// update. fn update(self, args: widget::UpdateArgs<Self>) -> bool { let widget::UpdateArgs { id, state, ui, style, .. } = args; // Finally, we'll describe how we want our widget drawn by simply instantiating the // necessary primitive graphics widgets. // let can = ui.rect_of(id).unwrap(); let w_can = can.w(); let h_can = can.h(); widget::Canvas::new() .flow_down(&[(state.ids.message_panel, widget::Canvas::new().color(color::GREEN).pad_bottom(20.0)), (state.ids.text_edit_body, widget::Canvas::new() .length(h_can * 0.2) .flow_right(&[(state.ids.text_edit_panel, widget::Canvas::new() .scroll_kids_vertically() .color(color::DARK_CHARCOAL) .length(w_can * 0.7)), (state.ids.text_edit_button_panel, widget::Canvas::new() .color(color::DARK_CHARCOAL))]))]) .middle_of(id) .set(state.ids.chat_canvas, ui); let k = self.text_edit; let (editz, keypad_bool) = TextEdit::new(k,self.master_id,self.english_tuple) .padded_w_of(state.ids.text_edit_panel, 20.0) .mid_top_of(state.ids.text_edit_panel) .center_justify() .line_spacing(2.5) .restrict_to_height(false) // Let the height grow infinitely and scroll. .set(state.ids.text_edit, ui); for edit in editz { *k = edit; } let button_panel = ui.rect_of(state.ids.text_edit_button_panel).unwrap(); let w_button_panel = button_panel.w(); let h_button_panel = button_panel.h(); if widget::Button::new() .color(color::GREY) .padded_w_of(state.ids.text_edit_button_panel, 0.2 * w_button_panel) .padded_h_of(state.ids.text_edit_button_panel, 0.2 * h_button_panel) .label("Enter") .middle_of(state.ids.text_edit_button_panel) .set(state.ids.text_edit_button, ui) .was_clicked() { let g = (*self.closure)(self.name, k); self.action_tx.send(g).unwrap(); *k = "".to_owned(); }; widget::Scrollbar::y_axis(state.ids.text_edit_panel) .auto_hide(true) .set(state.ids.text_edit_panel_scrollbar, ui); let num = self.lists.len(); let (mut items, scrollbar) = widget::List::flow_down(num) .scrollbar_on_top() .middle_of(state.ids.message_panel) .wh_of(state.ids.message_panel) .set(state.ids.history_list, ui); if let Some(s) = scrollbar { s.set(ui) } let mut it_j = self.lists.iter(); while let (Some(a), Some(item)) = (it_j.next(), items.next(ui)) { let cb = item_history::ItemHistory::new(&a).w_h(style.item_rect(&ui.theme)[0], style.item_rect(&ui.theme)[1]); item.set(cb, ui); } keypad_bool } }
#[doc = "Register `OR` reader"] pub type R = crate::R<OR_SPEC>; #[doc = "Register `OR` writer"] pub type W = crate::W<OR_SPEC>; #[doc = "Field `TS_OP0` reader - general purpose option bits"] pub type TS_OP0_R = crate::BitReader; #[doc = "Field `TS_OP0` writer - general purpose option bits"] pub type TS_OP0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP1` reader - general purpose option bits"] pub type TS_OP1_R = crate::BitReader; #[doc = "Field `TS_OP1` writer - general purpose option bits"] pub type TS_OP1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP2` reader - general purpose option bits"] pub type TS_OP2_R = crate::BitReader; #[doc = "Field `TS_OP2` writer - general purpose option bits"] pub type TS_OP2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP3` reader - general purpose option bits"] pub type TS_OP3_R = crate::BitReader; #[doc = "Field `TS_OP3` writer - general purpose option bits"] pub type TS_OP3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP4` reader - general purpose option bits"] pub type TS_OP4_R = crate::BitReader; #[doc = "Field `TS_OP4` writer - general purpose option bits"] pub type TS_OP4_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP5` reader - general purpose option bits"] pub type TS_OP5_R = crate::BitReader; #[doc = "Field `TS_OP5` writer - general purpose option bits"] pub type TS_OP5_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP6` reader - general purpose option bits"] pub type TS_OP6_R = crate::BitReader; #[doc = "Field `TS_OP6` writer - general purpose option bits"] pub type TS_OP6_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP7` reader - general purpose option bits"] pub type TS_OP7_R = crate::BitReader; #[doc = "Field `TS_OP7` writer - general purpose option bits"] pub type TS_OP7_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP8` reader - general purpose option bits"] pub type TS_OP8_R = crate::BitReader; #[doc = "Field `TS_OP8` writer - general purpose option bits"] pub type TS_OP8_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP9` reader - general purpose option bits"] pub type TS_OP9_R = crate::BitReader; #[doc = "Field `TS_OP9` writer - general purpose option bits"] pub type TS_OP9_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP10` reader - general purpose option bits"] pub type TS_OP10_R = crate::BitReader; #[doc = "Field `TS_OP10` writer - general purpose option bits"] pub type TS_OP10_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP11` reader - general purpose option bits"] pub type TS_OP11_R = crate::BitReader; #[doc = "Field `TS_OP11` writer - general purpose option bits"] pub type TS_OP11_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP12` reader - general purpose option bits"] pub type TS_OP12_R = crate::BitReader; #[doc = "Field `TS_OP12` writer - general purpose option bits"] pub type TS_OP12_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP13` reader - general purpose option bits"] pub type TS_OP13_R = crate::BitReader; #[doc = "Field `TS_OP13` writer - general purpose option bits"] pub type TS_OP13_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP14` reader - general purpose option bits"] pub type TS_OP14_R = crate::BitReader; #[doc = "Field `TS_OP14` writer - general purpose option bits"] pub type TS_OP14_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP15` reader - general purpose option bits"] pub type TS_OP15_R = crate::BitReader; #[doc = "Field `TS_OP15` writer - general purpose option bits"] pub type TS_OP15_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP16` reader - general purpose option bits"] pub type TS_OP16_R = crate::BitReader; #[doc = "Field `TS_OP16` writer - general purpose option bits"] pub type TS_OP16_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP17` reader - general purpose option bits"] pub type TS_OP17_R = crate::BitReader; #[doc = "Field `TS_OP17` writer - general purpose option bits"] pub type TS_OP17_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP18` reader - general purpose option bits"] pub type TS_OP18_R = crate::BitReader; #[doc = "Field `TS_OP18` writer - general purpose option bits"] pub type TS_OP18_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP19` reader - general purpose option bits"] pub type TS_OP19_R = crate::BitReader; #[doc = "Field `TS_OP19` writer - general purpose option bits"] pub type TS_OP19_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP20` reader - general purpose option bits"] pub type TS_OP20_R = crate::BitReader; #[doc = "Field `TS_OP20` writer - general purpose option bits"] pub type TS_OP20_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP21` reader - general purpose option bits"] pub type TS_OP21_R = crate::BitReader; #[doc = "Field `TS_OP21` writer - general purpose option bits"] pub type TS_OP21_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP22` reader - general purpose option bits"] pub type TS_OP22_R = crate::BitReader; #[doc = "Field `TS_OP22` writer - general purpose option bits"] pub type TS_OP22_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP23` reader - general purpose option bits"] pub type TS_OP23_R = crate::BitReader; #[doc = "Field `TS_OP23` writer - general purpose option bits"] pub type TS_OP23_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP24` reader - general purpose option bits"] pub type TS_OP24_R = crate::BitReader; #[doc = "Field `TS_OP24` writer - general purpose option bits"] pub type TS_OP24_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP25` reader - general purpose option bits"] pub type TS_OP25_R = crate::BitReader; #[doc = "Field `TS_OP25` writer - general purpose option bits"] pub type TS_OP25_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP26` reader - general purpose option bits"] pub type TS_OP26_R = crate::BitReader; #[doc = "Field `TS_OP26` writer - general purpose option bits"] pub type TS_OP26_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP27` reader - general purpose option bits"] pub type TS_OP27_R = crate::BitReader; #[doc = "Field `TS_OP27` writer - general purpose option bits"] pub type TS_OP27_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP28` reader - general purpose option bits"] pub type TS_OP28_R = crate::BitReader; #[doc = "Field `TS_OP28` writer - general purpose option bits"] pub type TS_OP28_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP29` reader - general purpose option bits"] pub type TS_OP29_R = crate::BitReader; #[doc = "Field `TS_OP29` writer - general purpose option bits"] pub type TS_OP29_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP30` reader - general purpose option bits"] pub type TS_OP30_R = crate::BitReader; #[doc = "Field `TS_OP30` writer - general purpose option bits"] pub type TS_OP30_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `TS_OP31` reader - general purpose option bits"] pub type TS_OP31_R = crate::BitReader; #[doc = "Field `TS_OP31` writer - general purpose option bits"] pub type TS_OP31_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; impl R { #[doc = "Bit 0 - general purpose option bits"] #[inline(always)] pub fn ts_op0(&self) -> TS_OP0_R { TS_OP0_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - general purpose option bits"] #[inline(always)] pub fn ts_op1(&self) -> TS_OP1_R { TS_OP1_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - general purpose option bits"] #[inline(always)] pub fn ts_op2(&self) -> TS_OP2_R { TS_OP2_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - general purpose option bits"] #[inline(always)] pub fn ts_op3(&self) -> TS_OP3_R { TS_OP3_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - general purpose option bits"] #[inline(always)] pub fn ts_op4(&self) -> TS_OP4_R { TS_OP4_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - general purpose option bits"] #[inline(always)] pub fn ts_op5(&self) -> TS_OP5_R { TS_OP5_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - general purpose option bits"] #[inline(always)] pub fn ts_op6(&self) -> TS_OP6_R { TS_OP6_R::new(((self.bits >> 6) & 1) != 0) } #[doc = "Bit 7 - general purpose option bits"] #[inline(always)] pub fn ts_op7(&self) -> TS_OP7_R { TS_OP7_R::new(((self.bits >> 7) & 1) != 0) } #[doc = "Bit 8 - general purpose option bits"] #[inline(always)] pub fn ts_op8(&self) -> TS_OP8_R { TS_OP8_R::new(((self.bits >> 8) & 1) != 0) } #[doc = "Bit 9 - general purpose option bits"] #[inline(always)] pub fn ts_op9(&self) -> TS_OP9_R { TS_OP9_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 10 - general purpose option bits"] #[inline(always)] pub fn ts_op10(&self) -> TS_OP10_R { TS_OP10_R::new(((self.bits >> 10) & 1) != 0) } #[doc = "Bit 11 - general purpose option bits"] #[inline(always)] pub fn ts_op11(&self) -> TS_OP11_R { TS_OP11_R::new(((self.bits >> 11) & 1) != 0) } #[doc = "Bit 12 - general purpose option bits"] #[inline(always)] pub fn ts_op12(&self) -> TS_OP12_R { TS_OP12_R::new(((self.bits >> 12) & 1) != 0) } #[doc = "Bit 13 - general purpose option bits"] #[inline(always)] pub fn ts_op13(&self) -> TS_OP13_R { TS_OP13_R::new(((self.bits >> 13) & 1) != 0) } #[doc = "Bit 14 - general purpose option bits"] #[inline(always)] pub fn ts_op14(&self) -> TS_OP14_R { TS_OP14_R::new(((self.bits >> 14) & 1) != 0) } #[doc = "Bit 15 - general purpose option bits"] #[inline(always)] pub fn ts_op15(&self) -> TS_OP15_R { TS_OP15_R::new(((self.bits >> 15) & 1) != 0) } #[doc = "Bit 16 - general purpose option bits"] #[inline(always)] pub fn ts_op16(&self) -> TS_OP16_R { TS_OP16_R::new(((self.bits >> 16) & 1) != 0) } #[doc = "Bit 17 - general purpose option bits"] #[inline(always)] pub fn ts_op17(&self) -> TS_OP17_R { TS_OP17_R::new(((self.bits >> 17) & 1) != 0) } #[doc = "Bit 18 - general purpose option bits"] #[inline(always)] pub fn ts_op18(&self) -> TS_OP18_R { TS_OP18_R::new(((self.bits >> 18) & 1) != 0) } #[doc = "Bit 19 - general purpose option bits"] #[inline(always)] pub fn ts_op19(&self) -> TS_OP19_R { TS_OP19_R::new(((self.bits >> 19) & 1) != 0) } #[doc = "Bit 20 - general purpose option bits"] #[inline(always)] pub fn ts_op20(&self) -> TS_OP20_R { TS_OP20_R::new(((self.bits >> 20) & 1) != 0) } #[doc = "Bit 21 - general purpose option bits"] #[inline(always)] pub fn ts_op21(&self) -> TS_OP21_R { TS_OP21_R::new(((self.bits >> 21) & 1) != 0) } #[doc = "Bit 22 - general purpose option bits"] #[inline(always)] pub fn ts_op22(&self) -> TS_OP22_R { TS_OP22_R::new(((self.bits >> 22) & 1) != 0) } #[doc = "Bit 23 - general purpose option bits"] #[inline(always)] pub fn ts_op23(&self) -> TS_OP23_R { TS_OP23_R::new(((self.bits >> 23) & 1) != 0) } #[doc = "Bit 24 - general purpose option bits"] #[inline(always)] pub fn ts_op24(&self) -> TS_OP24_R { TS_OP24_R::new(((self.bits >> 24) & 1) != 0) } #[doc = "Bit 25 - general purpose option bits"] #[inline(always)] pub fn ts_op25(&self) -> TS_OP25_R { TS_OP25_R::new(((self.bits >> 25) & 1) != 0) } #[doc = "Bit 26 - general purpose option bits"] #[inline(always)] pub fn ts_op26(&self) -> TS_OP26_R { TS_OP26_R::new(((self.bits >> 26) & 1) != 0) } #[doc = "Bit 27 - general purpose option bits"] #[inline(always)] pub fn ts_op27(&self) -> TS_OP27_R { TS_OP27_R::new(((self.bits >> 27) & 1) != 0) } #[doc = "Bit 28 - general purpose option bits"] #[inline(always)] pub fn ts_op28(&self) -> TS_OP28_R { TS_OP28_R::new(((self.bits >> 28) & 1) != 0) } #[doc = "Bit 29 - general purpose option bits"] #[inline(always)] pub fn ts_op29(&self) -> TS_OP29_R { TS_OP29_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bit 30 - general purpose option bits"] #[inline(always)] pub fn ts_op30(&self) -> TS_OP30_R { TS_OP30_R::new(((self.bits >> 30) & 1) != 0) } #[doc = "Bit 31 - general purpose option bits"] #[inline(always)] pub fn ts_op31(&self) -> TS_OP31_R { TS_OP31_R::new(((self.bits >> 31) & 1) != 0) } } impl W { #[doc = "Bit 0 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op0(&mut self) -> TS_OP0_W<OR_SPEC, 0> { TS_OP0_W::new(self) } #[doc = "Bit 1 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op1(&mut self) -> TS_OP1_W<OR_SPEC, 1> { TS_OP1_W::new(self) } #[doc = "Bit 2 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op2(&mut self) -> TS_OP2_W<OR_SPEC, 2> { TS_OP2_W::new(self) } #[doc = "Bit 3 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op3(&mut self) -> TS_OP3_W<OR_SPEC, 3> { TS_OP3_W::new(self) } #[doc = "Bit 4 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op4(&mut self) -> TS_OP4_W<OR_SPEC, 4> { TS_OP4_W::new(self) } #[doc = "Bit 5 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op5(&mut self) -> TS_OP5_W<OR_SPEC, 5> { TS_OP5_W::new(self) } #[doc = "Bit 6 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op6(&mut self) -> TS_OP6_W<OR_SPEC, 6> { TS_OP6_W::new(self) } #[doc = "Bit 7 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op7(&mut self) -> TS_OP7_W<OR_SPEC, 7> { TS_OP7_W::new(self) } #[doc = "Bit 8 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op8(&mut self) -> TS_OP8_W<OR_SPEC, 8> { TS_OP8_W::new(self) } #[doc = "Bit 9 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op9(&mut self) -> TS_OP9_W<OR_SPEC, 9> { TS_OP9_W::new(self) } #[doc = "Bit 10 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op10(&mut self) -> TS_OP10_W<OR_SPEC, 10> { TS_OP10_W::new(self) } #[doc = "Bit 11 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op11(&mut self) -> TS_OP11_W<OR_SPEC, 11> { TS_OP11_W::new(self) } #[doc = "Bit 12 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op12(&mut self) -> TS_OP12_W<OR_SPEC, 12> { TS_OP12_W::new(self) } #[doc = "Bit 13 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op13(&mut self) -> TS_OP13_W<OR_SPEC, 13> { TS_OP13_W::new(self) } #[doc = "Bit 14 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op14(&mut self) -> TS_OP14_W<OR_SPEC, 14> { TS_OP14_W::new(self) } #[doc = "Bit 15 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op15(&mut self) -> TS_OP15_W<OR_SPEC, 15> { TS_OP15_W::new(self) } #[doc = "Bit 16 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op16(&mut self) -> TS_OP16_W<OR_SPEC, 16> { TS_OP16_W::new(self) } #[doc = "Bit 17 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op17(&mut self) -> TS_OP17_W<OR_SPEC, 17> { TS_OP17_W::new(self) } #[doc = "Bit 18 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op18(&mut self) -> TS_OP18_W<OR_SPEC, 18> { TS_OP18_W::new(self) } #[doc = "Bit 19 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op19(&mut self) -> TS_OP19_W<OR_SPEC, 19> { TS_OP19_W::new(self) } #[doc = "Bit 20 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op20(&mut self) -> TS_OP20_W<OR_SPEC, 20> { TS_OP20_W::new(self) } #[doc = "Bit 21 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op21(&mut self) -> TS_OP21_W<OR_SPEC, 21> { TS_OP21_W::new(self) } #[doc = "Bit 22 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op22(&mut self) -> TS_OP22_W<OR_SPEC, 22> { TS_OP22_W::new(self) } #[doc = "Bit 23 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op23(&mut self) -> TS_OP23_W<OR_SPEC, 23> { TS_OP23_W::new(self) } #[doc = "Bit 24 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op24(&mut self) -> TS_OP24_W<OR_SPEC, 24> { TS_OP24_W::new(self) } #[doc = "Bit 25 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op25(&mut self) -> TS_OP25_W<OR_SPEC, 25> { TS_OP25_W::new(self) } #[doc = "Bit 26 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op26(&mut self) -> TS_OP26_W<OR_SPEC, 26> { TS_OP26_W::new(self) } #[doc = "Bit 27 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op27(&mut self) -> TS_OP27_W<OR_SPEC, 27> { TS_OP27_W::new(self) } #[doc = "Bit 28 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op28(&mut self) -> TS_OP28_W<OR_SPEC, 28> { TS_OP28_W::new(self) } #[doc = "Bit 29 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op29(&mut self) -> TS_OP29_W<OR_SPEC, 29> { TS_OP29_W::new(self) } #[doc = "Bit 30 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op30(&mut self) -> TS_OP30_W<OR_SPEC, 30> { TS_OP30_W::new(self) } #[doc = "Bit 31 - general purpose option bits"] #[inline(always)] #[must_use] pub fn ts_op31(&mut self) -> TS_OP31_W<OR_SPEC, 31> { TS_OP31_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "Temperature sensor option register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`or::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`or::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct OR_SPEC; impl crate::RegisterSpec for OR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`or::R`](R) reader structure"] impl crate::Readable for OR_SPEC {} #[doc = "`write(|w| ..)` method takes [`or::W`](W) writer structure"] impl crate::Writable for OR_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets OR to value 0"] impl crate::Resettable for OR_SPEC { const RESET_VALUE: Self::Ux = 0; }
//! Link: https://adventofcode.com/2019/day/16 //! Day 16: Flawed Frequency Transmission //! //! You're 3/4ths of the way through the gas giants. //! Not only do roundtrip signals to Earth take five hours, //! but the signal quality is quite bad as well. //! You can clean up the signal with the Flawed Frequency Transmission algorithm, or FFT. //! //! As input, FFT takes a list of numbers. //! In the signal you received (your puzzle input), //! each number is a single digit: //! data like 15243 represents the sequence 1, 5, 2, 4, 3. //! //! FFT operates in repeated phases. //! In each phase, a new list is constructed with the same length as the input list. //! This new list is also used as the input for the next phase. #[aoc_generator(day16)] fn input_generator(s: &str) -> Vec<isize> { s.trim() .chars() .map(|c| c.to_digit(10).unwrap() as isize) .collect() } // Each element in the new list is built by multiplying every value in // the input list by a value in a repeating pattern and then adding up the results. // // While each element in the output array uses all of the same input array elements, // the actual repeating pattern to use depends on which output element is being calculated. // The base pattern is 0, 1, 0, -1. // Then, repeat each value in the pattern a number of times equal to the position // in the output list being considered. // Repeat once for the first element, twice for the second element, // three times for the third element, and so on. // // When applying the pattern, skip the very first value exactly once. // // After using this process to calculate each element of the output list, // the phase is complete, and the output list of this phase is used as // the new input list for the next phase, if any. // // After 100 phases of FFT, what are the first eight digits in the final output list? // // Your puzzle answer was 94935919. #[aoc(day16, part1, Base)] fn solve_part1_base(v: &[isize]) -> String { let mut v = v.to_vec(); for _ in 0..100 { let mut nv = Vec::new(); for n in 0..v.len() { let sum = v.iter() .zip([0, 1, 0, -1].iter() .flat_map(|x| std::iter::once(x).cycle().take(n + 1)) .cycle() .skip(1)) .map(|(a, b)| a * b) .sum::<isize>(); nv.push((sum % 10).abs()); } v = nv; } v[0..8].iter().flat_map(|i| vec![(*i+48) as u8 as char]).collect::<String>() } // Now that your FFT is working, you can decode the real signal. // // The real signal is your puzzle input repeated 10000 times. // Treat this new signal as a single input list. // Patterns are still calculated as before, and 100 phases of FFT are still applied. // // The first seven digits of your initial input signal also represent the message offset. // The message offset is the location of the eight-digit message in the final output list. // Specifically, the message offset indicates the number of digits to skip // before reading the eight-digit message. // // After repeating your input signal 10000 times and running 100 phases of FFT, // what is the eight-digit message embedded in the final output list? // // Your puzzle answer was 24158285. #[aoc(day16, part2, Base)] fn solve_part2_base(v: &[isize]) -> String { let mut v = v.to_vec(); let index = v.iter() .take(7) .fold(0, |acc, &x| acc * 10 + x as usize); let len = v.len(); v = v.into_iter() .cycle() .take(len * 10_000) .skip(index) .collect(); for _ in 0..100 { let mut nv = Vec::new(); let mut sum = v.iter().sum::<isize>(); for n in v.iter() { nv.push(sum % 10); sum -= n; } v = nv; } v[0..8].iter().flat_map(|i| vec![(*i+48) as u8 as char]).collect::<String>() }
use super::*; use std::path::Path; #[derive(Debug, Deserialize)] pub struct KkConf { groups: Vec<Group>, participants: Vec<Participants>, } impl KkConf { pub fn build<P: AsRef<Path>>(path: P) -> KkConf { toml::from_str(&file_utils::read_from_file(path)).unwrap() } pub fn new(participants: Vec<Participants>) -> Result<KkConf, String> { let first_group = participants[0].group; if participants.iter().all(|participant| participant.group == first_group) { Err("All participants part of the same group".to_owned()) } else { Ok(KkConf { groups: vec![], participants }) } } /// Returns the participants read from the configuration pub fn get_participants(&self) -> Vec<Participants> { self.participants.clone() } /// Returns the groups read from the configuration pub fn get_groups(&self) -> Vec<Group> { self.groups.clone() } } #[derive(Debug, Deserialize, Clone)] pub struct Participants { name: String, group: u32, } impl Participants { pub fn new(name: String, group: u32) -> Participants { Participants { name: name, group: group, } } pub fn get_name(&self) -> String { self.name.clone() } pub fn get_group(&self) -> u32 { self.group } } #[derive(Debug, Clone, Deserialize)] pub struct Group { pub id: u32, pub email: Option<String>, } impl Group { pub fn get_id(&self) -> u32 { self.id } pub fn get_email(&self) -> Option<String> { self.email.clone() } } #[cfg(test)] mod tests { #[test] fn test_build() { let result = super::conf::KkConf::build("tests/resources/full.toml"); let groups = result.get_groups(); assert_eq!(1, groups[0].get_id()); assert!(groups[0].get_email().unwrap().eq("test1@hotmail.com")); assert_eq!(2, groups[1].get_id()); assert!(groups[1].get_email().unwrap().eq("test2@gmail.com")); assert_eq!(3, groups[2].get_id()); assert!(groups[2].get_email().unwrap().eq("test3@yahoo.com")); assert_eq!(4, groups[3].get_id()); assert!(groups[3].get_email().unwrap().eq("test4@outlook.com")); let participants = result.get_participants(); assert_eq!(1, participants[0].get_group()); assert!(participants[0].get_name().eq("Dylan")); assert_eq!(1, participants[1].get_group()); assert!(participants[1].get_name().eq("Jordan")); assert_eq!(1, participants[2].get_group()); assert!(participants[2].get_name().eq("Luke")); assert_eq!(2, participants[3].get_group()); assert!(participants[3].get_name().eq("Olivia")); assert_eq!(2, participants[4].get_group()); assert!(participants[4].get_name().eq("Alec")); assert_eq!(2, participants[5].get_group()); assert!(participants[5].get_name().eq("Dean")); assert_eq!(3, participants[6].get_group()); assert!(participants[6].get_name().eq("Alessia")); assert_eq!(3, participants[7].get_group()); assert!(participants[7].get_name().eq("Sienna")); assert_eq!(4, participants[8].get_group()); assert!(participants[8].get_name().eq("Isabella")); assert_eq!(4, participants[9].get_group()); assert!(participants[9].get_name().eq("Max")); assert_eq!(4, participants[10].get_group()); assert!(participants[10].get_name().eq("Luca")); } }
extern mod rfc4648; use std::vec; use rfc4648::base16; use rfc4648::base32; use rfc4648::base64; fn t(source: ~[~str], expect: ~[~str], cb: &fn(&[u8]) -> ~[u8]) { let mut source_b = vec::with_capacity(source.len()); let mut expect_b = vec::with_capacity(expect.len()); for s in source.move_iter() { source_b.push(s.into_bytes()); } for e in expect.move_iter() { expect_b.push(e.into_bytes()); } let actual_b = source_b.map(|e| cb(*e)); assert_eq!(expect_b, actual_b); } #[test] fn test_base64_standard_encode() { let source = ~[~"", ~"f", ~"fo", ~"foo", ~"foob", ~"fooba", ~"foobar"]; let expect = ~[~"", ~"Zg==", ~"Zm8=", ~"Zm9v", ~"Zm9vYg==", ~"Zm9vYmE=", ~"Zm9vYmFy"]; do t(source, expect) |src| { base64::Standard.encode(src) } } #[test] fn test_base64_urlsafe_encode() { let source = ~[~"", ~"f", ~"fo", ~"fo>", ~"foob", ~"fooba", ~"fo?ba?"]; let expect = ~[~"", ~"Zg==", ~"Zm8=", ~"Zm8-", ~"Zm9vYg==", ~"Zm9vYmE=", ~"Zm8_YmE_"]; do t(source, expect) |src| { base64::UrlSafe.encode(src) } } #[test] fn test_base64_standard_decode() { let source = ~[~"", ~"Zg==", ~"Zm8=", ~"Zm8+", ~"Zm9vYg==", ~"Zm9vYmE=", ~"Zm8/YmE/"]; let expect = ~[~"", ~"f", ~"fo", ~"fo>", ~"foob", ~"fooba", ~"fo?ba?"]; do t(source, expect) |src| { base64::Standard.decode(src) } } #[test] fn test_base64_urlsafe_decode() { let source = ~[~"", ~"Zg==", ~"Zm8=", ~"Zm8-", ~"Zm9vYg==", ~"Zm9vYmE=", ~"Zm8_YmE_"]; let expect = ~[~"", ~"f", ~"fo", ~"fo>", ~"foob", ~"fooba", ~"fo?ba?"]; do t(source, expect) |src| { base64::UrlSafe.decode(src) } } #[test] fn test_base32_standard_encode() { let source = ~[~"", ~"f", ~"fo", ~"foo", ~"foob", ~"fooba", ~"foobar"]; let expect = ~[~"", ~"MY======", ~"MZXQ====", ~"MZXW6===", ~"MZXW6YQ=", ~"MZXW6YTB", ~"MZXW6YTBOI======"]; do t(source, expect) |src| { base32::Standard.encode(src) } } #[test] fn test_base32_hex_encode() { let source = ~[~"", ~"f", ~"fo", ~"foo", ~"foob", ~"fooba", ~"foobar"]; let expect = ~[~"", ~"CO======", ~"CPNG====", ~"CPNMU===", ~"CPNMUOG=", ~"CPNMUOJ1", ~"CPNMUOJ1E8======"]; do t(source, expect) |src| { base32::Hex.encode(src) } } #[test] fn test_base32_standard_decode() { let source = ~[~"", ~"MY======", ~"MZXQ====", ~"MZXW6===", ~"MZXW6YQ=", ~"MZXW6YTB", ~"MZXW6YTBOI======"]; let expect = ~[~"", ~"f", ~"fo", ~"foo", ~"foob", ~"fooba", ~"foobar"]; do t(source, expect) |src| { base32::Standard.decode(src) } } #[test] fn test_base32_hex_decode() { let source = ~[~"", ~"CO======", ~"CPNG====", ~"CPNMU===", ~"CPNMUOG=", ~"CPNMUOJ1", ~"CPNMUOJ1E8======"]; let expect = ~[~"", ~"f", ~"fo", ~"foo", ~"foob", ~"fooba", ~"foobar"]; do t(source, expect) |src| { base32::Hex.decode(src) } } #[test] fn test_base16_encode() { let source = bytes!("foo"); let expect = bytes!("666F6F"); let actual = base16::encode(source); assert_eq!(expect, actual.as_slice()); } #[test] fn test_base16_decode() { let source = bytes!("666f6f"); let expect = bytes!("foo"); let actual = base16::decode(source); assert_eq!(expect, actual.as_slice()); }
extern crate dirac; pub mod checks;
struct Identifier(String); struct Function { name: Identifier, parameters: Vec<Identifier>, expression: Expr, } enum Expr { Const(bool), Var(Identifier), Not(Box<Expr>), Func { ident: Identifier, param: Vec<Expr> }, And(Box<Expr>, Box<Expr>), Or(Box<Expr>, Box<Expr>), } // a | b // a & b // !a // func(a) fn main() { println!("Hello, world!"); }
use {data::semantics::Semantics, proc_macro2::TokenStream, quote::quote}; impl Semantics { pub fn runtime_register_functions() -> TokenStream { quote! { fn register_classes(source: &Group, classes: &mut HashMap<&'static str, Group>) { for source in &source.classes { for class in &source.classes { register_classes(class, classes); } let mut target = classes.entry(source.selector).or_insert(Group::default()); if source.elements.len() > 0 { target.elements = Vec::new(); for element in &source.elements { target.elements.push(element.clone()); } } for listener in &source.listeners { target.listeners.push(listener.clone()); } for (property, value) in source.properties.clone() { target.properties.insert(property, value.clone()); } } } fn register_variable(source: &Group, classes: &mut HashMap<&'static str, Group>) { for source in &source.classes { for class in &source.classes { register_classes(class, classes); } let mut target = classes.entry(source.selector).or_insert(Group::default()); if source.elements.len() > 0 { target.elements = Vec::new(); for element in &source.elements { target.elements.push(element.clone()); } } for listener in &source.listeners { target.listeners.push(listener.clone()); } for (property, value) in source.properties.clone() { target.properties.insert(property, value.clone()); } } } } } }
use crate::extensions::NodeExt as _; use crate::hud; use crate::mob; use crate::player; use gdnative::api::{Area2D, AudioStreamPlayer, PathFollow2D, Position2D, RigidBody2D}; use gdnative::prelude::*; use rand::*; use std::f64::consts::PI; #[derive(NativeClass)] #[inherit(Node)] #[user_data(user_data::LocalCellData<GameHandler>)] pub struct GameHandler { #[property] mob: Ref<PackedScene>, score: u16, } #[methods] impl GameHandler { /// Create a game handler. pub fn new(_owner: &Node) -> Self { GameHandler { mob: PackedScene::new().into_shared(), score: 0 } } #[export] /// Prepare main scene. pub fn _ready(&self, owner: &Node) { unsafe { owner.get_typed_node::<AudioStreamPlayer, _>("Music") }.play(0.0); } #[export] /// Run the game over. pub fn game_over(&self, owner: &Node) { // Stop mob and score timers unsafe { owner.get_typed_node::<Timer, _>("MobTimer") }.stop(); unsafe { owner.get_typed_node::<Timer, _>("ScoreTimer") }.stop(); // Get HUD node and show the game over message let hud_node = unsafe { owner.get_typed_node::<CanvasLayer, _>("HUD") }; hud_node.cast_instance::<hud::HUD>() .and_then(|hud| hud.map(|x, o| x.show_game_over(&*o)).ok()) .unwrap_or_else(|| godot_print!("Unable to get hud")); // Run death sound unsafe { owner.get_typed_node::<AudioStreamPlayer, _>("Music") }.stop(); unsafe { owner.get_typed_node::<AudioStreamPlayer, _>("Death") } .play(0.0); } #[export] /// Prepare the game for a new game. fn new_game(&mut self, owner: &Node) { // Get default start position let start_position = unsafe { owner.get_typed_node::<Position2D, _>("StartPosition") }; // Get the player let player = unsafe { owner.get_typed_node::<Area2D, _>("Player") }; // Reset the player position player.cast_instance::<player::Player>() .and_then(|player| { player.map_mut(|x, o| x.start(&*o, start_position.position())).ok() }).unwrap_or_else(|| godot_print!("Unable to get player")); // Get HUD let hud_node = unsafe { owner.get_typed_node::<CanvasLayer, _>("HUD") }; // Print 'Get Ready' message hud_node.cast_instance::<hud::HUD>() .and_then(|hud| { hud.map(|x, o| { x.update_score(&*o, self.score); x.show_message(&*o, "Get Ready".into() )}).ok() }).unwrap_or_else(|| godot_print!("Unable to get hud")); // Start the start timer unsafe { owner.get_typed_node::<Timer, _>("StartTimer") }.start(0.0); // Run main music unsafe { owner.get_typed_node::<AudioStreamPlayer, _>("Music") }.play(0.0); unsafe { owner.get_typed_node::<AudioStreamPlayer, _>("Death") }.stop(); } #[export] /// Reset the mob and score timers. fn on_start_timer_timeout(&self, owner: &Node) { unsafe { owner.get_typed_node::<Timer, _>("MobTimer") }.start(0.0); unsafe { owner.get_typed_node::<Timer, _>("ScoreTimer") }.start(0.0); // Get the player let player = unsafe { owner.get_typed_node::<Area2D, _>("Player") }; // Reset the player position player.cast_instance::<player::Player>() .and_then(|player| { player.map_mut(|x, o| { x.allow_move(&*o) }).ok() }).unwrap_or_else(|| godot_print!("Unable to get player")); } #[export] /// Update the player score. fn on_score_timer_timeout(&mut self, owner: &Node) { // Increase the player score self.score += 1; // Get HUD let hud_node = unsafe { owner.get_typed_node::<CanvasLayer, _>("HUD") }; // Update score hud_node.cast_instance::<hud::HUD>() .and_then(|hud| hud.map(|x, o| x.update_score(&*o, self.score)).ok()) .unwrap_or_else(|| godot_print!("Unable to get hud")); } #[export] fn on_mob_timer_timeout(&self, owner: &Node) { // Get mob spawn location let mob_spawn_location = unsafe { owner.get_typed_node::<PathFollow2D, _>("MobPath/MobSpawnLocation") }; // Create a new mob let mob: Ref<RigidBody2D, _> = instance_scene(&self.mob); // Spwan mobs randomly let mut rng = rand::thread_rng(); let offset = rng.gen_range(std::u32::MIN, std::u32::MAX); mob_spawn_location.set_offset(offset.into()); // Set mob direction let mut direction = mob_spawn_location.rotation() + PI / 2.0; // Set mob position mob.set_position(mob_spawn_location.position()); // Fix direction direction += rng.gen_range(-PI / 4.0, PI / 4.0); mob.set_rotation(direction); let d = direction as f32; // Add mob in the game let mob = unsafe { mob.into_shared().assume_safe() }; owner.add_child(mob, false); // Convert the scene into mob struct let mob = mob.cast_instance::<mob::Mob>().unwrap(); // For each mob (x) and mob_owner mob.map(|x, mob_owner| { // Get mob min and max speed let (min, max) = (x.min_speed(&*mob_owner), x.max_speed(&*mob_owner)); // Set a random mob velocity mob_owner.set_linear_velocity(Vector2::new(rng.gen_range(min, max), 0.0)); mob_owner.set_linear_velocity(mob_owner.linear_velocity() .rotated(Angle { radians: d })); // Get HUD unsafe { owner.get_typed_node::<CanvasLayer, _>("HUD") } .cast_instance::<hud::HUD>().unwrap() // Connect the `start_game` signal to delete old mobs .map(|_, o| { o.connect("start_game", mob_owner, "on_start_game", VariantArray::new_shared(), 0 ).unwrap(); }).unwrap(); }).unwrap(); } } /// Root here is needs to be the same type (or a parent type) of the node that /// you put in the child scene as the root. For instance Spatial is used for /// this example. fn instance_scene<Root>(scene: &Ref<PackedScene, Shared>) -> Ref<Root, Unique> where Root: gdnative::GodotObject<RefKind = ManuallyManaged> + SubClass<Node> { // Get the scene let scene = unsafe { scene.assume_safe() }; // Create a new instance of the scene let instance = scene.instance(PackedScene::GEN_EDIT_STATE_DISABLED) .expect("should be able to instance scene"); let instance = unsafe { instance.assume_unique() }; // Return the instance of the scene instance.try_cast::<Root>().expect("root node type should be correct") }
extern crate hacspec_lib; extern crate proc_macro; extern crate proc_macro2; extern crate quote; extern crate syn; use proc_macro2::TokenStream; use quote::{quote, quote_spanned}; use syn::spanned::Spanned; use syn::{parse_macro_input, Data, DeriveInput, Fields, Ident, Index}; enum Expression { Binop(TokenStream, TokenStream, TokenStream), Shift(TokenStream, TokenStream, TokenStream), Unop(TokenStream, TokenStream), TwoArgsMethod(TokenStream, TokenStream, TokenStream), OneArgsMethod(TokenStream, TokenStream), OneArgsMethodWithBaseTypeArg(TokenStream, TokenStream), ZeroArgsMethod(TokenStream), } fn make_impl_body(name: &Ident, data: &Data, inner_expression: Expression) -> TokenStream { match *data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { let recurse = fields.named.iter().map(|f| { let name = &f.ident; match &inner_expression { Expression::Binop(e1, op, e2) => quote_spanned! {f.span() => #name: #e1.#name #op #e2.#name }, Expression::Shift(e1, op, e2) => quote_spanned! {f.span() => #name: #e1.#name #op #e2 }, Expression::Unop(op, e) => quote_spanned! {f.span() => #name: #op #e.#name }, Expression::TwoArgsMethod(func, e1, e2) => quote_spanned! {f.span() => #name: self.#name.#func(#e1.#name, #e2.#name) }, Expression::OneArgsMethod(func, e1) => quote_spanned! {f.span() => #name: self.#name.#func(#e1.#name) }, Expression::OneArgsMethodWithBaseTypeArg(func, e1) => { quote_spanned! {f.span() => #name: self.#name.#func(#e1) } } Expression::ZeroArgsMethod(func) => quote_spanned! {f.span() => #name: self.#name.#func() }, } }); let expanded = quote! { #name { #(#recurse),* } }; expanded } Fields::Unnamed(ref fields) => { let recurse = fields.unnamed.iter().enumerate().map(|(i, f)| { let index = Index::from(i); match &inner_expression { Expression::Binop(e1, op, e2) => quote_spanned! {f.span() => #e1.#index #op #e2.#index }, Expression::Shift(e1, op, e2) => quote_spanned! {f.span() => #e1.#index #op #e2 }, Expression::Unop(op, e) => quote_spanned! {f.span() => #op #e.#index }, Expression::TwoArgsMethod(func, e1, e2) => quote_spanned! {f.span() => self.#index.#func(#e1.#index, #e2.#index) }, Expression::OneArgsMethod(func, e1) => quote_spanned! {f.span() => self.#index.#func(#e1.#index) }, Expression::OneArgsMethodWithBaseTypeArg(func, e1) => { quote_spanned! {f.span() => self.#index.#func(#e1) } } Expression::ZeroArgsMethod(func) => quote_spanned! {f.span() => self.#index.#func() }, } }); quote! { #name ( #(#recurse),* ) } } Fields::Unit => quote! { #name {} }, }, Data::Enum(_) | Data::Union(_) => { panic!("Deriving the Numeric trait is impossible for enums or unions") } } } #[proc_macro_derive(Numeric)] pub fn derive_numeric_impl(input_struct: proc_macro::TokenStream) -> proc_macro::TokenStream { let input_ast = parse_macro_input!(input_struct as DeriveInput); // Used in the quasi-quotation below as `#name`. let name = input_ast.ident; let generics = input_ast.generics; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let sum = make_impl_body( &name, &input_ast.data, Expression::Binop(quote! { self }, quote! { + }, quote! { rhs }), ); let difference = make_impl_body( &name, &input_ast.data, Expression::Binop(quote! { self }, quote! { - }, quote! { rhs }), ); let mul = make_impl_body( &name, &input_ast.data, Expression::Binop(quote! { self }, quote! { * }, quote! { rhs }), ); let xor = make_impl_body( &name, &input_ast.data, Expression::Binop(quote! { self }, quote! { ^ }, quote! { rhs }), ); let or = make_impl_body( &name, &input_ast.data, Expression::Binop(quote! { self }, quote! { | }, quote! { rhs }), ); let and = make_impl_body( &name, &input_ast.data, Expression::Binop(quote! { self }, quote! { & }, quote! { rhs }), ); let shl = make_impl_body( &name, &input_ast.data, Expression::Shift(quote! { self }, quote! { << }, quote! { v }), ); let shr = make_impl_body( &name, &input_ast.data, Expression::Shift(quote! { self }, quote! { >> }, quote! { v }), ); let not = make_impl_body( &name, &input_ast.data, Expression::Unop(quote! { ! }, quote! { self}), ); let sub_mod = make_impl_body( &name, &input_ast.data, Expression::TwoArgsMethod(quote! { sub_mod }, quote! { rhs }, quote! { n }), ); let add_mod = make_impl_body( &name, &input_ast.data, Expression::TwoArgsMethod(quote! { add_mod }, quote! { rhs }, quote! { n }), ); let mul_mod = make_impl_body( &name, &input_ast.data, Expression::TwoArgsMethod(quote! { mul_mod }, quote! { rhs }, quote! { n }), ); let pow_mod = make_impl_body( &name, &input_ast.data, Expression::TwoArgsMethod(quote! { pow_mod }, quote! { exp }, quote! { n }), ); let modulo = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { modulo }, quote! { n }), ); let signed_modulo = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { signed_modulo }, quote! { n }), ); let wrap_add = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { wrap_add }, quote! { rhs }), ); let wrap_sub = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { wrap_sub }, quote! { rhs }), ); let wrap_mul = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { wrap_mul }, quote! { rhs }), ); let wrap_div = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { wrap_div }, quote! { rhs }), ); let pow_self = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { pow_self }, quote! { exp }), ); let divide = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { divide }, quote! { rhs }), ); let inv = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethod(quote! { inv }, quote! { n }), ); let absolute = make_impl_body( &name, &input_ast.data, Expression::ZeroArgsMethod(quote! { absolute }), ); let exp = make_impl_body( &name, &input_ast.data, Expression::OneArgsMethodWithBaseTypeArg(quote! { exp }, quote! { exp }), ); let expanded = quote! { impl #impl_generics Add for #name #ty_generics #where_clause { type Output = Self; fn add(self, rhs: Self) -> Self { #sum } } impl #impl_generics Sub for #name #ty_generics #where_clause { type Output = Self; fn sub(self, rhs: Self) -> Self { #difference } } impl #impl_generics Mul for #name #ty_generics #where_clause { type Output = Self; fn mul(self, rhs: Self) -> Self { #mul } } impl #impl_generics BitXor for #name #ty_generics #where_clause { type Output = Self; fn bitxor(self, rhs: Self) -> Self { #xor } } impl #impl_generics BitOr for #name #ty_generics #where_clause { type Output = Self; fn bitor(self, rhs: Self) -> Self { #or } } impl #impl_generics BitAnd for #name #ty_generics #where_clause { type Output = Self; fn bitand(self, rhs: Self) -> Self { #and } } impl #impl_generics Shl<usize> for #name #ty_generics #where_clause { type Output = Self; fn shl(self, v: usize) -> Self { #shl } } impl #impl_generics Shr<usize> for #name #ty_generics #where_clause { type Output = Self; fn shr(self, v: usize) -> Self { #shr } } impl #impl_generics Not for #name #ty_generics #where_clause { type Output = Self; fn not(self) -> Self { #not } } impl #impl_generics ModNumeric for #name #ty_generics #where_clause { /// (self - rhs) % n. fn sub_mod(self, rhs: Self, n: Self) -> Self { #sub_mod } /// `(self + rhs) % n` fn add_mod(self, rhs: Self, n: Self) -> Self { #add_mod } /// `(self * rhs) % n` fn mul_mod(self, rhs: Self, n: Self) -> Self { #mul_mod } /// `(self ^ exp) % n` fn pow_mod(self, exp: Self, n: Self) -> Self { #pow_mod } /// `self % n` fn modulo(self, n: Self) -> Self { #modulo } /// `self % n` that always returns a positive integer fn signed_modulo(self, n: Self) -> Self { #signed_modulo } /// `|self|` fn absolute(self) -> Self { #absolute } } impl #impl_generics Numeric for #name #ty_generics #where_clause { /// Return largest value that can be represented. fn max_val() -> Self { panic!("Function not implemented by auto-deriving...") } fn wrap_add(self, rhs: Self) -> Self { #wrap_add } fn wrap_sub(self, rhs: Self) -> Self { #wrap_sub } fn wrap_mul(self, rhs: Self) -> Self { #wrap_mul } fn wrap_div(self, rhs: Self) -> Self { #wrap_div } /// `self ^ exp` where `exp` is a `u32`. fn exp(self, exp: u32) -> Self { #exp } /// `self ^ exp` where `exp` is a `Self`. fn pow_self(self, exp: Self) -> Self { #pow_self } /// Division. fn divide(self, rhs: Self) -> Self { #divide } /// Invert self modulo n. fn inv(self, n: Self) -> Self { #inv } // Comparison functions returning bool. fn equal(self, other: Self) -> bool { panic!("Function not implemented by auto-deriving...") } fn greater_than(self, other: Self) -> bool { panic!("Function not implemented by auto-deriving...") } fn greater_than_or_equal(self, other: Self) -> bool { panic!("Function not implemented by auto-deriving...") } fn less_than(self, other: Self) -> bool { panic!("Function not implemented by auto-deriving...") } fn less_than_or_equal(self, other: Self) -> bool { panic!("Function not implemented by auto-deriving...") } // Comparison functions returning a bit mask (0x0..0 or 0xF..F). fn not_equal_bm(self, other: Self) -> Self { panic!("Function not implemented by auto-deriving...") } fn equal_bm(self, other: Self) -> Self { panic!("Function not implemented by auto-deriving...") } fn greater_than_bm(self, other: Self) -> Self { panic!("Function not implemented by auto-deriving...") } fn greater_than_or_equal_bm(self, other: Self) -> Self { panic!("Function not implemented by auto-deriving...") } fn less_than_bm(self, other: Self) -> Self { panic!("Function not implemented by auto-deriving...") } fn less_than_or_equal_bm(self, other: Self) -> Self { panic!("Function not implemented by auto-deriving...") } }; }; proc_macro::TokenStream::from(expanded) }
use crate::ast_parser; use crate::options::ParseOptions; pub fn parse( src: String, parse_options: ParseOptions, ) -> Result<swc_ecma_ast::Module, anyhow::Error> { let p = ast_parser::AstParser::new(); p.parse_module("root.ts", &src, parse_options, |parse_result| { let module = parse_result?; Ok(module) }) }
use crate::BASE_ADDR; /// All possible errors in this crate #[derive(Debug)] pub enum Error<E> { /// I²C communication error I2C(E), /// Invalid input data provided InvalidInputData, } /// Measurement result #[derive(Debug, Clone, Copy, PartialEq)] pub struct Measurement { /// Temperature (°C) pub temperature: f32, /// Relative Humidity (%RH) /// /// Optionally read depending on the measurement configuration pub humidity: Option<f32>, /// Last status pub status: Status, } /// Status #[derive(Debug, Clone, Copy, PartialEq)] pub struct Status { /// Whether data is ready pub data_ready: bool, /// Whether the temperature high threshold was exceeded pub high_temp_threshold_exceeded: bool, /// Whether the temperature low threshold was exceeded pub low_temp_threshold_exceeded: bool, /// Whether the humidity high threshold was exceeded pub high_humidity_threshold_exceeded: bool, /// Whether the humidity low threshold was exceeded pub low_humidity_threshold_exceeded: bool, } /// Measurement mode #[derive(Debug, Clone, Copy, PartialEq)] pub enum MeasurementMode { /// Temperature and humidity (default) TemperatureAndHumidity, /// Temperature only TemperatureOnly, } impl Default for MeasurementMode { fn default() -> Self { MeasurementMode::TemperatureAndHumidity } } /// Possible slave addresses #[derive(Debug, Clone, Copy, PartialEq)] pub enum SlaveAddr { /// Default slave address Default, /// Alternative slave address providing bit value for the SDO pin Alternative(bool), } impl Default for SlaveAddr { /// Default slave address fn default() -> Self { SlaveAddr::Default } } impl SlaveAddr { pub(crate) fn addr(self) -> u8 { match self { SlaveAddr::Default => BASE_ADDR, SlaveAddr::Alternative(false) => BASE_ADDR, SlaveAddr::Alternative(true) => BASE_ADDR | 1, } } } #[cfg(test)] mod tests { use super::BASE_ADDR as ADDR; use super::{MeasurementMode, SlaveAddr}; #[test] fn can_get_default_address() { let addr = SlaveAddr::default(); assert_eq!(ADDR, addr.addr()); } #[test] fn can_generate_alternative_addresses() { assert_eq!(ADDR, SlaveAddr::Alternative(false).addr()); assert_eq!(ADDR | 1, SlaveAddr::Alternative(true).addr()); } #[test] fn can_get_default_measurement_mode() { assert_eq!( MeasurementMode::TemperatureAndHumidity, MeasurementMode::default() ); } }
//! Provides an 'EventLoop' class for managing the gui worker. use std::time::{ Instant }; /// In most of the examples the `glutin` crate is used for providing the window context and /// events while the `glium` crate is used for displaying `conrod::render::Primitives` to the /// screen. /// /// This `Iterator`-like type simplifies some of the boilerplate involved in setting up a /// glutin+glium event loop that works efficiently with conrod. pub struct EventLoop { ui_needs_update: bool, last_update: Instant, } #[cfg(feature="glium")] mod _glium; #[cfg(feature="piston")] mod _piston; #[cfg(not(any(feature="glium", feature="piston")))] impl EventLoop { pub fn new() -> Self { panic!("Either 'glium' or 'piston' feature required.") } } #[cfg(test)] mod tests;
extern crate rider_config; use std::process::Command; #[cfg_attr(tarpaulin, skip)] fn main() { let generator = rider_config::directories::get_binary_path("rider-generator").unwrap(); println!("generator will be {:?}", generator); Command::new(generator).status().unwrap(); let editor = rider_config::directories::get_binary_path("rider-editor").unwrap(); println!("editor will be {:?}", editor); Command::new(editor).status().unwrap(); } #[cfg(test)] mod tests { use super::*; use std::env::set_var; use std::fs::create_dir_all; use std::path::Path; use uuid::Uuid; #[cfg(test)] fn exists(dir: &String, sub: &str) -> bool { Path::new(join(dir.clone(), sub.to_owned()).as_str()).exists() } #[cfg(test)] fn join(a: String, b: String) -> String { vec![a, b].join("/") } #[test] fn assert_main() { let uniq = Uuid::new_v4(); let test_path = join("/tmp/rider-tests".to_owned(), uniq.to_string()); create_dir_all(test_path.clone()).unwrap(); set_var("XDG_CONFIG_HOME", test_path.as_str()); set_var("XDG_RUNTIME_DIR", test_path.as_str()); let rider_dir = join(test_path.clone(), "rider".to_owned()); assert_eq!(exists(&rider_dir, "themes"), false); assert_eq!(exists(&rider_dir, "log"), false); assert_eq!(exists(&test_path, ".rider"), false); assert_eq!(exists(&rider_dir, "themes/default.json"), false); assert_eq!(exists(&rider_dir, "themes/railscasts.json"), false); main(); assert_eq!(exists(&rider_dir, "fonts"), true); assert_eq!(exists(&rider_dir, "log"), true); assert_eq!(exists(&rider_dir, "themes"), true); assert_eq!(exists(&test_path, ".rider"), true); assert_eq!(exists(&rider_dir, "themes/default.json"), true); assert_eq!(exists(&rider_dir, "themes/railscasts.json"), true); } }
use super::webgl::WebGlRenderingContext; use crate::arena::{resource, BlockRef}; use crate::libs::random_id::U128Id; use std::cell::Cell; use std::collections::HashMap; use std::collections::VecDeque; use std::rc::Rc; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; #[derive(Hash, PartialEq, Eq, Clone)] enum TextureId { ResourceId(U128Id), Custom(U128Id), Nameplate(String, String), } struct Lifespan<V> { value: V, life_expectancy: usize, is_used: Cell<bool>, } impl<V> Lifespan<V> { pub fn new(value: V) -> Self { Self { value, life_expectancy: 0, is_used: Cell::new(true), } } pub fn aging(&mut self) -> bool { if self.is_used.get() { self.life_expectancy += 1; self.is_used.set(false); false } else if self.life_expectancy == 0 { true } else { self.life_expectancy -= 1; false } } } impl<V> std::ops::Deref for Lifespan<V> { type Target = V; fn deref(&self) -> &Self::Target { self.is_used.set(true); &self.value } } impl<V> std::ops::DerefMut for Lifespan<V> { fn deref_mut(&mut self) -> &mut Self::Target { self.is_used.set(true); &mut self.value } } pub struct TexTable { max_tex_num: i32, unused_tex_idx: VecDeque<i32>, used_tex_idx: VecDeque<(i32, TextureId)>, nameplate_tex_table: HashMap<(String, String), Lifespan<(Rc<web_sys::WebGlTexture>, [f64; 2])>>, resource_tex_table: HashMap<U128Id, Rc<web_sys::WebGlTexture>>, tex_idx: HashMap<TextureId, i32>, string_canvas: web_sys::HtmlCanvasElement, } impl TexTable { pub fn new(gl: &WebGlRenderingContext) -> Self { let max_tex_num = (gl .get_parameter(web_sys::WebGlRenderingContext::MAX_TEXTURE_IMAGE_UNITS) .unwrap() .as_f64() .unwrap() as i32) .min(32); let mut unused_tex_idx = VecDeque::new(); for i in 0..max_tex_num { unused_tex_idx.push_back(i); } let string_canvas = crate::libs::element::html_canvas_element(); Self { max_tex_num, unused_tex_idx, used_tex_idx: VecDeque::new(), nameplate_tex_table: HashMap::new(), resource_tex_table: HashMap::new(), tex_idx: HashMap::new(), string_canvas, } } pub fn update(&mut self, gl: &WebGlRenderingContext) { let mut deleted = vec![]; for (key_text, tex) in &mut self.nameplate_tex_table { if tex.aging() { deleted.push(key_text.clone()); } } for key_text in &deleted { if let Some(tex) = self.nameplate_tex_table.remove(key_text) { gl.delete_texture(Some(&tex.0)); } } } pub fn use_blocktexture( &mut self, gl: &WebGlRenderingContext, texture: BlockRef<resource::BlockTexture>, ) -> Option<i32> { let resource_id = texture.id(); texture.map(|texture| self.use_resource(gl, &resource_id, texture.data())) } pub fn use_resource( &mut self, gl: &WebGlRenderingContext, resource_id: &U128Id, data: &resource::ImageData, ) -> i32 { let tex_id = TextureId::ResourceId(U128Id::clone(resource_id)); if let Some(tex_idx) = self.tex_idx.get(&tex_id) { *tex_idx } else if let Some(tex_buf) = self .resource_tex_table .get(&resource_id) .map(|tex_buf| Rc::clone(&tex_buf)) { let tex_idx = self.use_idx(); gl.active_texture(Self::tex_flag(tex_idx)); gl.bind_texture(web_sys::WebGlRenderingContext::TEXTURE_2D, Some(&tex_buf)); self.tex_idx.insert(TextureId::clone(&tex_id), tex_idx); self.used_tex_idx.push_back((tex_idx, tex_id)); tex_idx } else { let tex_idx = self.use_idx(); let tex_buf = gl.create_texture().unwrap(); gl.active_texture(Self::tex_flag(tex_idx)); gl.bind_texture(web_sys::WebGlRenderingContext::TEXTURE_2D, Some(&tex_buf)); gl.pixel_storei(web_sys::WebGlRenderingContext::PACK_ALIGNMENT, 1); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_MIN_FILTER, web_sys::WebGlRenderingContext::LINEAR as i32, ); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_MAG_FILTER, web_sys::WebGlRenderingContext::LINEAR as i32, ); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_WRAP_S, web_sys::WebGlRenderingContext::CLAMP_TO_EDGE as i32, ); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_WRAP_T, web_sys::WebGlRenderingContext::CLAMP_TO_EDGE as i32, ); let _ = gl.tex_image_2d_with_u32_and_u32_and_image( web_sys::WebGlRenderingContext::TEXTURE_2D, 0, web_sys::WebGlRenderingContext::RGBA as i32, web_sys::WebGlRenderingContext::RGBA, web_sys::WebGlRenderingContext::UNSIGNED_BYTE, &data.element(), ); self.resource_tex_table .insert(U128Id::clone(&resource_id), Rc::new(tex_buf)); self.tex_idx.insert(TextureId::clone(&tex_id), tex_idx); self.used_tex_idx.push_back((tex_idx, tex_id)); tex_idx } } pub fn use_nameplate( &mut self, gl: &WebGlRenderingContext, text: &(String, String), ) -> Option<(i32, [f64; 2])> { let tex_id = TextureId::Nameplate(text.0.clone(), text.1.clone()); if let Some((tex_buf, size)) = self .nameplate_tex_table .get(text) .map(|tex| (Rc::clone(&tex.0), tex.1.clone())) { if let Some(tex_idx) = self.tex_idx.get(&tex_id) { Some((*tex_idx, size)) } else { let tex_idx = self.use_idx(); gl.active_texture(Self::tex_flag(tex_idx)); gl.bind_texture(web_sys::WebGlRenderingContext::TEXTURE_2D, Some(&tex_buf)); self.tex_idx.insert(TextureId::clone(&tex_id), tex_idx); self.used_tex_idx.push_back((tex_idx, tex_id)); Some((tex_idx, size)) } } else { let canvas = &self.string_canvas; let ctx = canvas .get_context("2d") .unwrap() .unwrap() .dyn_into::<web_sys::CanvasRenderingContext2d>() .unwrap(); let font_height = 64.0; ctx.set_font(&format!("{}px sans-serif", font_height)); let metrix = ctx.measure_text(&text.0).unwrap(); let radius = 0.0; let padding = font_height / 4.0; let mut height = font_height + 2.0 * padding; let mut width = metrix.width() + 2.0 * padding; let mut sub_font_height = 0.0; let mut sub_line_margin = 0.0; if !text.1.is_empty() { sub_font_height = font_height * 5.0 / 8.0; sub_line_margin = sub_font_height / 2.0; ctx.set_font(&format!("{}px sans-serif", sub_font_height)); let metrix = ctx.measure_text(&text.1).unwrap(); height += sub_font_height + sub_line_margin; width = width.max(metrix.width() + 2.0 * padding); } let height = height; let width = width; let sub_font_height = sub_font_height; let sub_line_margin = sub_line_margin; canvas.set_width(width as u32); canvas.set_height(height as u32); let ctx = canvas .get_context("2d") .unwrap() .unwrap() .dyn_into::<web_sys::CanvasRenderingContext2d>() .unwrap(); ctx.clear_rect(0.0, 0.0, width, height); ctx.set_fill_style(&JsValue::from("#000000")); ctx.set_stroke_style(&JsValue::from("#FFFFFF")); ctx.set_line_width(padding * 0.5); let x = 0.0; let y = 0.0; ctx.begin_path(); ctx.move_to(x + radius, y); let _ = ctx.arc_to(x + width, y, x + width, y + height, radius); let _ = ctx.arc_to(x + width, y + height, x, y + height, radius); let _ = ctx.arc_to(x, y + height, x, y, radius); let _ = ctx.arc_to(x, y, x + width, y, radius); ctx.close_path(); ctx.fill(); ctx.stroke(); ctx.set_font(&format!("{}px sans-serif", font_height)); ctx.set_fill_style(&JsValue::from("#FFFFFF")); ctx.set_text_baseline("middle"); let _ = ctx.fill_text( &text.0, padding, sub_font_height + sub_line_margin + padding + (height - padding * 2.0 - sub_font_height - sub_line_margin) / 2.0, ); if !text.1.is_empty() { ctx.set_font(&format!("{}px sans-serif", sub_font_height)); let _ = ctx.fill_text(&text.1, padding, padding + sub_font_height / 2.0); } let tex_idx = self.use_idx(); let tex_buf = gl.create_texture().unwrap(); gl.active_texture(Self::tex_flag(tex_idx)); gl.bind_texture(web_sys::WebGlRenderingContext::TEXTURE_2D, Some(&tex_buf)); gl.pixel_storei(web_sys::WebGlRenderingContext::PACK_ALIGNMENT, 1); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_MIN_FILTER, web_sys::WebGlRenderingContext::LINEAR as i32, ); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_MAG_FILTER, web_sys::WebGlRenderingContext::LINEAR as i32, ); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_WRAP_S, web_sys::WebGlRenderingContext::CLAMP_TO_EDGE as i32, ); gl.tex_parameteri( web_sys::WebGlRenderingContext::TEXTURE_2D, web_sys::WebGlRenderingContext::TEXTURE_WRAP_T, web_sys::WebGlRenderingContext::CLAMP_TO_EDGE as i32, ); let _ = gl.tex_image_2d_with_u32_and_u32_and_canvas( web_sys::WebGlRenderingContext::TEXTURE_2D, 0, web_sys::WebGlRenderingContext::RGBA as i32, web_sys::WebGlRenderingContext::RGBA, web_sys::WebGlRenderingContext::UNSIGNED_BYTE, &self.string_canvas, ); self.nameplate_tex_table.insert( text.clone(), Lifespan::new((Rc::new(tex_buf), [width, height])), ); self.tex_idx.insert(TextureId::clone(&tex_id), tex_idx); self.used_tex_idx.push_back((tex_idx, tex_id)); Some((tex_idx, [width, height])) } } pub fn use_custom(&mut self, id: &U128Id) -> (i32, u32) { let tex_id = TextureId::Custom(U128Id::clone(id)); if let Some(tex_idx) = self.tex_idx.get(&tex_id) { (*tex_idx, Self::tex_flag(*tex_idx)) } else { let tex_idx = self.use_idx(); self.used_tex_idx .push_back((tex_idx, TextureId::clone(&tex_id))); self.tex_idx.insert(tex_id, tex_idx); (tex_idx, Self::tex_flag(tex_idx)) } } pub fn try_use_custom(&self, id: &U128Id) -> Option<(i32, u32)> { let tex_id = TextureId::Custom(U128Id::clone(id)); if let Some(tex_idx) = self.tex_idx.get(&tex_id) { Some((*tex_idx, Self::tex_flag(*tex_idx))) } else { None } } fn use_idx(&mut self) -> i32 { if let Some(tex_idx) = self.unused_tex_idx.pop_front() { tex_idx } else { let (tex_idx, tex_id) = self.used_tex_idx.pop_front().unwrap(); self.tex_idx.remove(&tex_id); tex_idx } } fn tex_flag(idx: i32) -> u32 { match idx { 0 => web_sys::WebGlRenderingContext::TEXTURE0, 1 => web_sys::WebGlRenderingContext::TEXTURE1, 2 => web_sys::WebGlRenderingContext::TEXTURE2, 3 => web_sys::WebGlRenderingContext::TEXTURE3, 4 => web_sys::WebGlRenderingContext::TEXTURE4, 5 => web_sys::WebGlRenderingContext::TEXTURE5, 6 => web_sys::WebGlRenderingContext::TEXTURE6, 7 => web_sys::WebGlRenderingContext::TEXTURE7, 8 => web_sys::WebGlRenderingContext::TEXTURE8, 9 => web_sys::WebGlRenderingContext::TEXTURE9, 10 => web_sys::WebGlRenderingContext::TEXTURE10, 11 => web_sys::WebGlRenderingContext::TEXTURE11, 12 => web_sys::WebGlRenderingContext::TEXTURE12, 13 => web_sys::WebGlRenderingContext::TEXTURE13, 14 => web_sys::WebGlRenderingContext::TEXTURE14, 15 => web_sys::WebGlRenderingContext::TEXTURE15, 16 => web_sys::WebGlRenderingContext::TEXTURE16, 17 => web_sys::WebGlRenderingContext::TEXTURE17, 18 => web_sys::WebGlRenderingContext::TEXTURE18, 19 => web_sys::WebGlRenderingContext::TEXTURE19, 20 => web_sys::WebGlRenderingContext::TEXTURE20, 21 => web_sys::WebGlRenderingContext::TEXTURE21, 22 => web_sys::WebGlRenderingContext::TEXTURE22, 23 => web_sys::WebGlRenderingContext::TEXTURE23, 24 => web_sys::WebGlRenderingContext::TEXTURE24, 25 => web_sys::WebGlRenderingContext::TEXTURE25, 26 => web_sys::WebGlRenderingContext::TEXTURE26, 27 => web_sys::WebGlRenderingContext::TEXTURE27, 28 => web_sys::WebGlRenderingContext::TEXTURE28, 29 => web_sys::WebGlRenderingContext::TEXTURE29, 30 => web_sys::WebGlRenderingContext::TEXTURE30, 31 => web_sys::WebGlRenderingContext::TEXTURE31, _ => unreachable!(), } } }
/// GitBlobResponse represents a git blob #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct GitBlobResponse { pub content: Option<String>, pub encoding: Option<String>, pub sha: Option<String>, pub size: Option<i64>, pub url: Option<String>, } impl GitBlobResponse { /// Create a builder for this object. #[inline] pub fn builder() -> GitBlobResponseBuilder { GitBlobResponseBuilder { body: Default::default(), } } #[inline] pub fn get_blob() -> GitBlobResponseGetBuilder<crate::generics::MissingOwner, crate::generics::MissingRepo, crate::generics::MissingSha> { GitBlobResponseGetBuilder { inner: Default::default(), _param_owner: core::marker::PhantomData, _param_repo: core::marker::PhantomData, _param_sha: core::marker::PhantomData, } } } impl Into<GitBlobResponse> for GitBlobResponseBuilder { fn into(self) -> GitBlobResponse { self.body } } /// Builder for [`GitBlobResponse`](./struct.GitBlobResponse.html) object. #[derive(Debug, Clone)] pub struct GitBlobResponseBuilder { body: self::GitBlobResponse, } impl GitBlobResponseBuilder { #[inline] pub fn content(mut self, value: impl Into<String>) -> Self { self.body.content = Some(value.into()); self } #[inline] pub fn encoding(mut self, value: impl Into<String>) -> Self { self.body.encoding = Some(value.into()); self } #[inline] pub fn sha(mut self, value: impl Into<String>) -> Self { self.body.sha = Some(value.into()); self } #[inline] pub fn size(mut self, value: impl Into<i64>) -> Self { self.body.size = Some(value.into()); self } #[inline] pub fn url(mut self, value: impl Into<String>) -> Self { self.body.url = Some(value.into()); self } } /// Builder created by [`GitBlobResponse::get_blob`](./struct.GitBlobResponse.html#method.get_blob) method for a `GET` operation associated with `GitBlobResponse`. #[repr(transparent)] #[derive(Debug, Clone)] pub struct GitBlobResponseGetBuilder<Owner, Repo, Sha> { inner: GitBlobResponseGetBuilderContainer, _param_owner: core::marker::PhantomData<Owner>, _param_repo: core::marker::PhantomData<Repo>, _param_sha: core::marker::PhantomData<Sha>, } #[derive(Debug, Default, Clone)] struct GitBlobResponseGetBuilderContainer { param_owner: Option<String>, param_repo: Option<String>, param_sha: Option<String>, } impl<Owner, Repo, Sha> GitBlobResponseGetBuilder<Owner, Repo, Sha> { /// owner of the repo #[inline] pub fn owner(mut self, value: impl Into<String>) -> GitBlobResponseGetBuilder<crate::generics::OwnerExists, Repo, Sha> { self.inner.param_owner = Some(value.into()); unsafe { std::mem::transmute(self) } } /// name of the repo #[inline] pub fn repo(mut self, value: impl Into<String>) -> GitBlobResponseGetBuilder<Owner, crate::generics::RepoExists, Sha> { self.inner.param_repo = Some(value.into()); unsafe { std::mem::transmute(self) } } /// sha of the commit #[inline] pub fn sha(mut self, value: impl Into<String>) -> GitBlobResponseGetBuilder<Owner, Repo, crate::generics::ShaExists> { self.inner.param_sha = Some(value.into()); unsafe { std::mem::transmute(self) } } } impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for GitBlobResponseGetBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::ShaExists> { type Output = GitBlobResponse; const METHOD: http::Method = http::Method::GET; fn rel_path(&self) -> std::borrow::Cow<'static, str> { format!("/repos/{owner}/{repo}/git/blobs/{sha}", owner=self.inner.param_owner.as_ref().expect("missing parameter owner?"), repo=self.inner.param_repo.as_ref().expect("missing parameter repo?"), sha=self.inner.param_sha.as_ref().expect("missing parameter sha?")).into() } } impl crate::client::ResponseWrapper<GitBlobResponse, GitBlobResponseGetBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::ShaExists>> { #[inline] pub fn message(&self) -> Option<String> { self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok()) } #[inline] pub fn url(&self) -> Option<String> { self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok()) } }
use core::marker::PhantomData; use futures::{Poll, Async, AsyncSink, StartSend}; use futures::sink::Sink; pub struct SyncSink<Item, F> { _item: PhantomData<Item>, f: F, } impl<Item, F> SyncSink<Item, F> where F: FnMut(Item), { pub fn new(f: F) -> Self { Self { _item: PhantomData, f: f, } } } impl<Item, F> Sink for SyncSink<Item, F> where F: FnMut(Item), { type SinkItem = Item; type SinkError = (); fn start_send(&mut self, item: Self::SinkItem) -> StartSend<Self::SinkItem, Self::SinkError> { (self.f)(item); Ok(AsyncSink::Ready) } fn poll_complete(&mut self) -> Poll<(), Self::SinkError> { Ok(Async::Ready(())) } fn close(&mut self) -> Poll<(), Self::SinkError> { Ok(Async::Ready(())) } }
use std::iter::Peekable; use std::cmp::Ordering; use regex::Regex::*; use regex::Regex; #[derive(Debug,Clone)] pub struct Derivatives<R> { pub d: Vec<(Vec<char>, R)>, pub rest: R, } impl<R> Derivatives<R> { pub fn map<F: FnMut(R) -> R>(self, mut f: F) -> Derivatives<R> { Derivatives { d: self.d.map_in_place(|(x, r)| (x, f(r))), rest: f(self.rest), } } } pub trait Differentiable { fn derivative(&self) -> Derivatives<Self>; } struct Union<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>> { a : Peekable<It1>, b : Peekable<It2>, } fn union<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>>(a: It1, b: It2) -> Union<T, It1, It2> { Union { a: a.peekable(), b: b.peekable() } } impl<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>> Iterator for Union<T, It1, It2> { type Item = T; fn next(&mut self) -> Option<T> { match match self.a.peek() { Some(av) => match self.b.peek() { Some(bv) => av.cmp(bv), None => Ordering::Less, }, None => Ordering::Greater, } { Ordering::Less => { self.a.next() } Ordering::Greater => { self.b.next() } Ordering::Equal => { self.a.next(); self.b.next() } } } fn size_hint(&self) -> (usize, Option<usize>) { let (a1, a2) = self.a.size_hint(); let (b1, b2) = self.b.size_hint(); (a1 + b1, if let (Some(a2), Some(b2)) = (a2, b2) { Some(a2 + b2) } else { None }) } } struct Inter<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>> { a : Peekable<It1>, b : Peekable<It2>, } fn inter<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>>(a: It1, b: It2) -> Inter<T, It1, It2> { Inter { a: a.peekable(), b: b.peekable() } } impl<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>> Iterator for Inter<T, It1, It2> { type Item = T; fn next(&mut self) -> Option<T> { loop { match if let (Some(av), Some(bv)) = (self.a.peek(), self.b.peek()) { av.cmp(bv) } else { return None } { Ordering::Less => { self.a.next(); } Ordering::Greater => { self.b.next(); } Ordering::Equal => { self.a.next(); return self.b.next(); } } } } } struct Subtract<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>> { a : Peekable<It1>, b : Peekable<It2>, } fn subtract<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>>(a: It1, b: It2) -> Subtract<T, It1, It2> { Subtract { a: a.peekable(), b: b.peekable() } } impl<T: Ord, It1: Iterator<Item=T>, It2: Iterator<Item=T>> Iterator for Subtract<T, It1, It2> { type Item = T; fn next(&mut self) -> Option<T> { loop { match match (self.a.peek(), self.b.peek()) { (Some(av), Some(bv)) => av.cmp(bv), (_, None) => Ordering::Less, (None, _) => return None, } { Ordering::Less => { return self.a.next(); } Ordering::Greater => { self.b.next(); } Ordering::Equal => { self.a.next(); self.b.next(); } } } } } enum CharSet { Just(Vec<char>), Not(Vec<char>), } impl CharSet { fn inter(&self, b: &[char]) -> CharSet { use self::CharSet::{Just, Not}; match *self { Just(ref a) => { Just(inter(a.iter().map(|x| *x), b.iter().map(|x| *x)).collect()) } Not(ref a) => { Just(subtract(b.iter().map(|x| *x), a.iter().map(|x| *x)).collect()) } } } fn subtract(&self, b: &[char]) -> CharSet { use self::CharSet::{Just, Not}; match *self { Just(ref a) => { Just(subtract(a.iter().map(|x| *x), b.iter().map(|x| *x)).collect()) } Not(ref a) => { Not(union(a.iter().map(|x| *x), b.iter().map(|x| *x)).collect()) } } } } fn combine<R, S, F: FnMut(&[&R]) -> S>(v: &[Derivatives<R>], mut f: F) -> Derivatives<S> { fn go<'a, R, S, F: FnMut(&[&R]) -> S>( v: &'a [Derivatives<R>], f: &mut F, what: CharSet, res: &mut Vec<&'a R>, out: &mut (Vec<(Vec<char>, S)>, Option<S>) ) { if let CharSet::Just(ref v) = what { if v.len() == 0 { // prune return; } } if v.len() == 0 { let reg = f(&res); match what { CharSet::Just(c) => out.0.push((c, reg)), CharSet::Not(_) => { assert!(out.1.is_none()); out.1 = Some(reg); } } return; } let (first, rest) = v.split_at(1); let first = &first[0]; let mut all_chars = Vec::new(); for &(ref chars, ref reg) in first.d.iter() { all_chars = union(all_chars.into_iter(), chars.iter().map(|x| *x)).collect(); let inter = what.inter(&chars); res.push(reg); go(rest, f, inter, res, out); res.pop(); } let inter = what.subtract(&all_chars); res.push(&first.rest); go(rest, f, inter, res, out); res.pop(); } let mut result = (Vec::new(), None); let mut regexes = Vec::new(); go(v, &mut f, CharSet::Not(Vec::new()), &mut regexes, &mut result); Derivatives { d: result.0, rest: result.1.unwrap(), } } impl Differentiable for Regex { fn derivative(&self) -> Derivatives<Regex> { match *self { Null => Derivatives { d: Vec::new(), rest: Null }, Empty => Derivatives { d: Vec::new(), rest: Null }, Except(ref cs) => { if cs.len() == 0 { Derivatives { d: Vec::new(), rest: Empty } } else { Derivatives { d: vec![(cs.clone(), Null)], rest: Empty } } } Alt(ref cs, ref xs) => { let mut ds = Vec::with_capacity(if cs.len() > 0 { 1 } else { 0 } + xs.len()); if cs.len() > 0 { ds.push(Derivatives { d: vec![(cs.clone(), Empty)], rest: Null }); } ds.extend(xs.iter().map(Differentiable::derivative)); combine(&ds, |regexes| Alt(Vec::new(), regexes.iter().map(|r| (*r).clone()).collect())) } And(ref xs) => { let ds: Vec<_> = xs.iter().map(Differentiable::derivative).collect(); combine(&ds, |regexes| And(regexes.iter().map(|r| (*r).clone()).collect())) } Not(box ref x) => x.derivative().map(|r| Not(box r)), Cat(ref xs) => { let mut ds = Vec::new(); for i in 0..xs.len() { ds.push(xs[i].derivative().map(|r| { let mut v = vec![r]; v.push_all(&xs[i+1..]); Cat(v) })); if !xs[i].nullable() { break; } } combine(&ds, |regexes| Alt(Vec::new(), regexes.iter().map(|r| (*r).clone()).collect())) } Kleene(box ref x) => x.derivative().map(|r| Cat(vec![r, Kleene(box x.clone())])), } } } // Derivatives of "regular vectors", as described in "Regular-expression derivatives reexamined" by Owens et al. impl<R: Differentiable + Clone> Differentiable for Vec<R> { fn derivative(&self) -> Derivatives<Vec<R>> { let v: Vec<Derivatives<R>> = self.iter().map(Differentiable::derivative).collect(); combine(&*v, |xs: &[&R]| xs.iter().map(|&x| x.clone()).collect()) } }
use std::cell::UnsafeCell; use std::ptr; use std::sync::atomic::{AtomicPtr, Ordering}; use crossbeam_utils::{Backoff, CachePadded}; struct Node<T> { next: AtomicPtr<Node<T>>, value: Option<T>, } impl<T> Node<T> { unsafe fn new(v: Option<T>) -> *mut Node<T> { Box::into_raw(Box::new(Node { next: AtomicPtr::new(ptr::null_mut()), value: v, })) } } /// The multi-producer single-consumer structure. This is not cloneable, but it /// may be safely shared so long as it is guaranteed that there is only one /// popper at a time (many pushers are allowed). pub struct Queue<T> { head: CachePadded<AtomicPtr<Node<T>>>, tail: UnsafeCell<*mut Node<T>>, } unsafe impl<T: Send> Send for Queue<T> {} unsafe impl<T: Send> Sync for Queue<T> {} impl<T> Queue<T> { /// Creates a new queue that is safe to share among multiple producers and /// one consumer. pub fn new() -> Queue<T> { let stub = unsafe { Node::new(None) }; Queue { head: AtomicPtr::new(stub).into(), tail: UnsafeCell::new(stub), } } pub fn push(&self, t: T) { unsafe { let node = Node::new(Some(t)); let prev = self.head.swap(node, Ordering::AcqRel); (*prev).next.store(node, Ordering::Release); } } /// if the queue is empty #[inline] pub fn is_empty(&self) -> bool { let tail = unsafe { *self.tail.get() }; // the list is empty self.head.load(Ordering::Acquire) == tail } /// Pops some data from this queue. pub fn pop(&self) -> Option<T> { unsafe { let tail = *self.tail.get(); // the list is empty if self.head.load(Ordering::Acquire) == tail { return None; } // spin until tail next become non-null let mut next; let backoff = Backoff::new(); loop { next = (*tail).next.load(Ordering::Acquire); if !next.is_null() { break; } backoff.snooze(); } // value is not an atomic operation it may read out old shadow value // assert!((*tail).value.is_none()); assert!((*next).value.is_some()); // we tack the next value, this is why use option to host the value let ret = (*next).value.take().unwrap(); let _: Box<Node<T>> = Box::from_raw(tail); // move the tail to next *self.tail.get() = next; Some(ret) } } } impl<T> Default for Queue<T> { fn default() -> Self { Queue::new() } } impl<T> Drop for Queue<T> { fn drop(&mut self) { while self.pop().is_some() {} // release the stub let _: Box<Node<T>> = unsafe { Box::from_raw(*self.tail.get()) }; } } #[cfg(test)] mod tests { use super::*; use std::sync::mpsc::channel; use std::sync::Arc; use std::thread; #[test] fn test_queue() { let q: Queue<usize> = Queue::new(); assert_eq!(q.pop(), None); q.push(1); q.push(2); assert_eq!(q.pop(), Some(1)); assert_eq!(q.pop(), Some(2)); assert!(q.is_empty()); } #[test] fn test() { let nthreads = 8; let nmsgs = 1000; let q = Queue::new(); match q.pop() { None => {} Some(..) => panic!(), } let (tx, rx) = channel(); let q = Arc::new(q); for _ in 0..nthreads { let tx = tx.clone(); let q = q.clone(); thread::spawn(move || { for i in 0..nmsgs { q.push(i); } tx.send(()).unwrap(); }); } let mut i = 0; while i < nthreads * nmsgs { match q.pop() { None => {} Some(_) => i += 1, } } drop(tx); for _ in 0..nthreads { rx.recv().unwrap(); } } } #[cfg(all(nightly, test))] mod bench { extern crate test; use self::test::Bencher; use super::*; use std::sync::Arc; use std::thread; use crate::test_queue::ScBlockPop; impl<T: Send> ScBlockPop<T> for super::Queue<T> { fn block_pop(&self) -> T { let backoff = Backoff::new(); loop { match self.pop() { Some(v) => return v, None => backoff.snooze(), } } } } #[test] fn queue_sanity() { let q = Queue::<usize>::new(); assert!(q.is_empty()); for i in 0..100 { q.push(i); } // assert_eq!(q.len(), 100); // println!("{q:?}"); for i in 0..100 { assert_eq!(q.pop(), Some(i)); } assert_eq!(q.pop(), None); assert!(q.is_empty()); } #[bench] fn single_thread_test(b: &mut Bencher) { let q = Queue::new(); let mut i = 0; b.iter(|| { q.push(i); assert_eq!(q.pop(), Some(i)); i += 1; }); } #[bench] fn multi_1p1c_test(b: &mut Bencher) { b.iter(|| { let q = Arc::new(Queue::new()); let total_work: usize = 1_000_000; // create worker threads that generate mono increasing index let _q = q.clone(); // in other thread the value should be still 100 thread::spawn(move || { for i in 0..total_work { _q.push(i); } }); for i in 0..total_work { let v = q.block_pop(); assert_eq!(i, v); } }); } #[bench] fn multi_2p1c_test(b: &mut Bencher) { b.iter(|| { let q = Arc::new(Queue::new()); let total_work: usize = 1_000_000; // create worker threads that generate mono increasing index // in other thread the value should be still 100 let mut total = 0; thread::scope(|s| { let threads = 20; for i in 0..threads { let q = q.clone(); s.spawn(move || { let len = total_work / threads; let start = i * len; for v in start..start + len { q.push(v); } }); } s.spawn(|| { for _ in 0..total_work { total += q.block_pop(); } }); }); assert!(q.is_empty()); assert_eq!(total, (0..total_work).sum::<usize>()); }); } // #[bench] // fn bulk_1p2c_test(b: &mut Bencher) { // b.iter(|| { // let q = Arc::new(Queue::new()); // let total_work: usize = 1_000_000; // // create worker threads that generate mono increasing index // // in other thread the value should be still 100 // for i in 0..total_work { // q.push(i); // } // let total = Arc::new(AtomicUsize::new(0)); // thread::scope(|s| { // let threads = 20; // for _ in 0..threads { // let q = q.clone(); // let total = total.clone(); // s.spawn(move || { // while !q.is_empty() { // if let Some(v) = q.bulk_pop() { // total.fetch_add(v.len(), Ordering::AcqRel); // } // } // }); // } // }); // assert!(q.is_empty()); // assert_eq!(total.load(Ordering::Acquire), total_work); // }); // } }
fn main() { f2(3); } fn f2(xx: i32) { println!("{}", xx); }
use std::{collections::HashMap, sync::Arc, time::Duration}; use serde::Deserialize; use crate::{ bson::{doc, DateTime}, hello::{HelloCommandResponse, HelloReply, LastWrite}, options::ServerAddress, sdam::{ description::topology::{test::f64_ms_as_duration, TopologyType}, ServerDescription, ServerType, TopologyDescription, }, selection_criteria::{SelectionCriteria, TagSet}, }; mod in_window; mod logic; #[derive(Debug, Deserialize)] struct TestTopologyDescription { #[serde(rename = "type")] topology_type: TopologyType, servers: Vec<TestServerDescription>, } impl TestTopologyDescription { fn into_topology_description( self, heartbeat_frequency: Option<Duration>, ) -> TopologyDescription { let servers: HashMap<ServerAddress, ServerDescription> = self .servers .into_iter() .filter_map(|sd| { sd.into_server_description() .map(|sd| (sd.address.clone(), sd)) }) .collect(); TopologyDescription { single_seed: servers.len() == 1, topology_type: self.topology_type, set_name: None, max_set_version: None, max_election_id: None, compatibility_error: None, logical_session_timeout: None, transaction_support_status: Default::default(), cluster_time: None, local_threshold: None, heartbeat_freq: heartbeat_frequency, servers, } } } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase", deny_unknown_fields)] struct TestServerDescription { address: String, #[serde(rename = "avg_rtt_ms")] avg_rtt_ms: Option<f64>, #[serde(rename = "type")] server_type: TestServerType, tags: Option<TagSet>, last_update_time: Option<i32>, last_write: Option<LastWriteDate>, // We don't need to use this field, but it needs to be included during deserialization so that // we can use the deny_unknown_fields tag. _max_wire_version: Option<i32>, } impl TestServerDescription { fn into_server_description(self) -> Option<ServerDescription> { let server_type = match self.server_type.into_server_type() { Some(server_type) => server_type, None => return None, }; let server_address = ServerAddress::parse(self.address).ok()?; let tags = self.tags; let last_write = self.last_write; let avg_rtt_ms = self.avg_rtt_ms; let reply = hello_response_from_server_type(server_type).map(|mut command_response| { command_response.tags = tags; command_response.last_write = last_write.map(|last_write| LastWrite { last_write_date: DateTime::from_millis(last_write.last_write_date), }); HelloReply { server_address: server_address.clone(), command_response, cluster_time: None, raw_command_response: Default::default(), } }); let mut server_desc = match reply { Some(reply) => ServerDescription::new_from_hello_reply( server_address, reply, avg_rtt_ms.map(f64_ms_as_duration).unwrap(), ), None => ServerDescription::new(server_address), }; server_desc.last_update_time = self .last_update_time .map(|i| DateTime::from_millis(i.into())); Some(server_desc) } } #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] struct LastWriteDate { last_write_date: i64, } #[derive(Clone, Copy, Debug, Deserialize)] enum TestServerType { Standalone, Mongos, #[serde(rename = "RSPrimary")] RsPrimary, #[serde(rename = "RSSecondary")] RsSecondary, #[serde(rename = "RSArbiter")] RsArbiter, #[serde(rename = "RSOther")] RsOther, #[serde(rename = "RSGhost")] RsGhost, LoadBalancer, Unknown, PossiblePrimary, } impl TestServerType { fn into_server_type(self) -> Option<ServerType> { match self { TestServerType::Standalone => Some(ServerType::Standalone), TestServerType::Mongos => Some(ServerType::Mongos), TestServerType::RsPrimary => Some(ServerType::RsPrimary), TestServerType::RsSecondary => Some(ServerType::RsSecondary), TestServerType::RsArbiter => Some(ServerType::RsArbiter), TestServerType::RsOther => Some(ServerType::RsOther), TestServerType::RsGhost => Some(ServerType::RsGhost), TestServerType::LoadBalancer => Some(ServerType::LoadBalancer), TestServerType::Unknown => Some(ServerType::Unknown), TestServerType::PossiblePrimary => None, } } } fn hello_response_from_server_type(server_type: ServerType) -> Option<HelloCommandResponse> { let mut response = HelloCommandResponse::default(); match server_type { ServerType::Unknown => { return None; } ServerType::Mongos => { response.msg = Some("isdbgrid".into()); } ServerType::RsPrimary => { response.set_name = Some("foo".into()); response.is_writable_primary = Some(true); } ServerType::RsOther => { response.set_name = Some("foo".into()); response.hidden = Some(true); } ServerType::RsSecondary => { response.set_name = Some("foo".into()); response.secondary = Some(true); } ServerType::RsArbiter => { response.set_name = Some("foo".into()); response.arbiter_only = Some(true); } ServerType::RsGhost => { response.is_replica_set = Some(true); } ServerType::Standalone | ServerType::LoadBalancer => {} }; Some(response) } #[test] fn predicate_omits_unavailable() { let criteria = SelectionCriteria::Predicate(Arc::new(|si| { !matches!(si.server_type(), ServerType::RsPrimary) })); let desc = TestTopologyDescription { topology_type: TopologyType::ReplicaSetWithPrimary, servers: vec![ TestServerDescription { address: "localhost:27017".to_string(), avg_rtt_ms: Some(12.0), server_type: TestServerType::RsPrimary, tags: None, last_update_time: None, last_write: None, _max_wire_version: None, }, TestServerDescription { address: "localhost:27018".to_string(), avg_rtt_ms: Some(12.0), server_type: TestServerType::Unknown, tags: None, last_update_time: None, last_write: None, _max_wire_version: None, }, TestServerDescription { address: "localhost:27019".to_string(), avg_rtt_ms: Some(12.0), server_type: TestServerType::RsArbiter, tags: None, last_update_time: None, last_write: None, _max_wire_version: None, }, TestServerDescription { address: "localhost:27020".to_string(), avg_rtt_ms: Some(12.0), server_type: TestServerType::RsGhost, tags: None, last_update_time: None, last_write: None, _max_wire_version: None, }, TestServerDescription { address: "localhost:27021".to_string(), avg_rtt_ms: Some(12.0), server_type: TestServerType::RsOther, tags: None, last_update_time: None, last_write: None, _max_wire_version: None, }, ], } .into_topology_description(None); pretty_assertions::assert_eq!( desc.suitable_servers_in_latency_window(&criteria).unwrap(), Vec::<&ServerDescription>::new() ); }
fn main() { // Constants are like macros in C, i.e., are changed in compiler time const MAX_VALUE: u64 = 15; println!( "The constants MAX_VALUE ({}) it's like an MACRO in C", MAX_VALUE ); // Imutable variables are like constants in C let x: i64 = 3; let x: i64 = x * 6; println!("X value is {} ", x); // An immutable variable can be "edited" in code, changing it's type // (creating a new one with this name at this point) let x: f64 = 15.0; println!("This value is {}", x); // You only can reassign an variable using let keyword // You can create an mutable element and didn't not change it, however, it'll be an warning // let mut spaces = " "; // let mut spaces = spaces.len(); // println!("Spaces: {}", spaces); // addition let sum = 5 + 10; // subtraction let difference = 95.5 - 4.3; // multiplication let product = 4 * 30; // division let quotient = 56.7 / 32.2; // remainder let remainder = 43 % 5; println!( "{}\t{}\t{}\t{}\t{}", sum, difference, product, quotient, remainder ); let heart_eyed_cat: char = '😻'; println!("The cat is {}", heart_eyed_cat); let tup: (i32, f64, u8) = (500, 6.4, 1); let (_, y, _) = tup; println!("The value of y is: {}", y); // An array using i32 with 5 elements let a: [i32; 5] = [1, 2, 3, 4, 5]; // An array for this // let months = [ // "January", // "February", // "March", // "April", // "May", // "June", // "July", // "August", // "September", // "October", // "November", // "December", // ]; // An array initialized with 3 // let a = [3; 5]; for i in a.iter() { println!("The value is {}", i); } another_function(3, 6); println!("The value after execute is {}", x); let y = { let x = 3; // The line bellow is an expression (its like an return) x + 1 }; println!("The value of y is: {}", y); println!("Five {}", five()); let x: i32 = plus_one(5); println!("The value of x is: {}", x); let number = 6; if number % 4 == 0 { println!("number is divisible by 4"); } else if number % 3 == 0 { println!("number is divisible by 3"); } else if number % 2 == 0 { println!("number is divisible by 2"); } else { println!("number is not divisible by 4, 3, or 2"); } // Ternary let condition = true; let number = if condition { 5 } else { 6 }; println!("The value of number is: {}", number); // Loop with some output let mut counter = 0; let result = loop { counter += 1; if counter == 10 { // We are passing the counter*2 as the break output; break counter * 2; } }; println!("The result is {}", result); // Loop with an breakpoint defined let mut number = 3; while number != 0 { println!("{}!", number); number -= 1; } println!("LIFTOFF!!!"); // For over numbers for number in (1..4).rev() { println!("{}!", number); } println!("LIFTOFF!!!"); } fn another_function(x: i32, y: i32) { println!("The value of x is: {}", x); println!("The value of y is: {}", y); } fn five() -> i32 { 5 } fn plus_one(x: i32) -> i32 { // You can use the "statments form" with return keyword // return x + 1; // Or you can just use the expression form (without the comma) x + 1 }
//! Module for actions setting flags. //! //! This contains helper functions to set flags whenever a signal happens. The flags are atomic //! bools or numbers and the library manipulates them with the `SeqCst` ordering, in case someone //! cares about relative order to some *other* atomic variables. If you don't care about the //! relative order, you are free to use `Ordering::Relaxed` when reading and resetting the flags. //! //! # When to use //! //! The flags in this module allow for polling if a signal arrived since the previous poll. The do //! not allow blocking until something arrives. //! //! Therefore, the natural way to use them is in applications that have some kind of iterative work //! with both some upper and lower time limit on one iteration. If one iteration could block for //! arbitrary time, the handling of the signal would be postponed for a long time. If the iteration //! didn't block at all, the checking for the signal would turn into a busy-loop. //! //! If what you need is blocking until a signal comes, you might find better tools in the //! [`pipe`][crate::low_level::pipe] and [`iterator`][crate::iterator] modules. //! //! # Examples //! //! Doing something until terminated. This also knows by which signal it was terminated. In case //! multiple termination signals arrive before it is handled, it recognizes the last one. //! //! ```rust //! use std::io::Error; //! use std::sync::Arc; //! use std::sync::atomic::{AtomicUsize, Ordering}; //! //! use signal_hook::consts::signal::*; //! use signal_hook::flag as signal_flag; //! //! fn main() -> Result<(), Error> { //! let term = Arc::new(AtomicUsize::new(0)); //! const SIGTERM_U: usize = SIGTERM as usize; //! const SIGINT_U: usize = SIGINT as usize; //! # #[cfg(not(windows))] //! const SIGQUIT_U: usize = SIGQUIT as usize; //! signal_flag::register_usize(SIGTERM, Arc::clone(&term), SIGTERM_U)?; //! signal_flag::register_usize(SIGINT, Arc::clone(&term), SIGINT_U)?; //! # #[cfg(not(windows))] //! signal_flag::register_usize(SIGQUIT, Arc::clone(&term), SIGQUIT_U)?; //! //! # // Hack to terminate the example when run as a doc-test. //! # term.store(SIGTERM_U, Ordering::Relaxed); //! loop { //! match term.load(Ordering::Relaxed) { //! 0 => { //! // Do some useful stuff here //! } //! SIGTERM_U => { //! eprintln!("Terminating on the TERM signal"); //! break; //! } //! SIGINT_U => { //! eprintln!("Terminating on the INT signal"); //! break; //! } //! # #[cfg(not(windows))] //! SIGQUIT_U => { //! eprintln!("Terminating on the QUIT signal"); //! break; //! } //! _ => unreachable!(), //! } //! } //! //! Ok(()) //! } //! ``` //! //! Sending a signal to self and seeing it arrived (not of a practical usage on itself): //! //! ```rust //! use std::io::Error; //! use std::sync::Arc; //! use std::sync::atomic::{AtomicBool, Ordering}; //! use std::thread; //! use std::time::Duration; //! //! use signal_hook::consts::signal::*; //! use signal_hook::low_level::raise; //! //! fn main() -> Result<(), Error> { //! let got = Arc::new(AtomicBool::new(false)); //! # #[cfg(not(windows))] //! signal_hook::flag::register(SIGUSR1, Arc::clone(&got))?; //! # #[cfg(windows)] //! # signal_hook::flag::register(SIGTERM, Arc::clone(&got))?; //! # #[cfg(not(windows))] //! raise(SIGUSR1).unwrap(); //! # #[cfg(windows)] //! # raise(SIGTERM).unwrap(); //! // A sleep here, because it could run the signal handler in another thread and we may not //! // see the flag right away. This is still a hack and not guaranteed to work, it is just an //! // example! //! thread::sleep(Duration::from_secs(1)); //! assert!(got.load(Ordering::Relaxed)); //! Ok(()) //! } //! ``` //! //! Reloading a configuration on `SIGHUP` (which is a common behaviour of many UNIX daemons, //! together with reopening the log file). //! //! ```rust //! use std::io::Error; //! use std::sync::Arc; //! use std::sync::atomic::{AtomicBool, Ordering}; //! //! use signal_hook::consts::signal::*; //! use signal_hook::flag as signal_flag; //! //! fn main() -> Result<(), Error> { //! // We start with true, to load the configuration in the very first iteration too. //! let reload = Arc::new(AtomicBool::new(true)); //! let term = Arc::new(AtomicBool::new(false)); //! # #[cfg(not(windows))] //! signal_flag::register(SIGHUP, Arc::clone(&reload))?; //! signal_flag::register(SIGINT, Arc::clone(&term))?; //! signal_flag::register(SIGTERM, Arc::clone(&term))?; //! # #[cfg(not(windows))] //! signal_flag::register(SIGQUIT, Arc::clone(&term))?; //! while !term.load(Ordering::Relaxed) { //! // Using swap here, not load, to reset it back to false once it is reloaded. //! if reload.swap(false, Ordering::Relaxed) { //! // Reload the config here //! # //! # // Hiden hack to make the example terminate when run as doc-test. Not part of the //! # // real code. //! # term.store(true, Ordering::Relaxed); //! } //! // Serve one request //! } //! Ok(()) //! } //! ``` use std::io::Error; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::Arc; use libc::{c_int, EINVAL}; use crate::{low_level, SigId}; /// Registers an action to set the flag to `true` whenever the given signal arrives. /// /// # Panics /// /// If the signal is one of the forbidden. pub fn register(signal: c_int, flag: Arc<AtomicBool>) -> Result<SigId, Error> { // We use SeqCst for two reasons: // * Signals should not come very often, so the performance does not really matter. // * We promise the order of actions, but setting different atomics with Relaxed or similar // would not guarantee the effective order. unsafe { low_level::register(signal, move || flag.store(true, Ordering::SeqCst)) } } /// Registers an action to set the flag to the given value whenever the signal arrives. pub fn register_usize(signal: c_int, flag: Arc<AtomicUsize>, value: usize) -> Result<SigId, Error> { unsafe { low_level::register(signal, move || flag.store(value, Ordering::SeqCst)) } } /// Terminate the application on a signal if the given condition is true. /// /// This can be used for different use cases. One of them (with the condition being always true) is /// just unconditionally terminate on the given signal. /// /// Another is being able to turn on and off the behaviour by the shared flag. /// /// The last one is handling double CTRL+C ‒ if the user presses CTRL+C, we would like to start a /// graceful shutdown. But if anything ever gets stuck in the shutdown, second CTRL+C (or other /// such termination signal) should terminate the application without further delay. /// /// To do that, one can combine this with [`register`]. On the first run, the flag is `false` and /// this doesn't terminate. But then the flag is set to true during the first run and „arms“ the /// shutdown on the second run. Note that it matters in which order the actions are registered (the /// shutdown must go first). And yes, this also allows asking the user „Do you want to terminate“ /// and disarming the abrupt shutdown if the user answers „No“. /// /// # Panics /// /// If the signal is one of the forbidden. pub fn register_conditional_shutdown( signal: c_int, status: c_int, condition: Arc<AtomicBool>, ) -> Result<SigId, Error> { let action = move || { if condition.load(Ordering::SeqCst) { low_level::exit(status); } }; unsafe { low_level::register(signal, action) } } /// Conditionally runs an emulation of the default action on the given signal. /// /// If the provided condition is true at the time of invoking the signal handler, the equivalent of /// the default action of the given signal is run. It is a bit similar to /// [`register_conditional_shutdown`], except that it doesn't terminate for non-termination /// signals, it runs their default handler. /// /// # Panics /// /// If the signal is one of the forbidden /// /// # Errors /// /// Similarly to the [`emulate_default_handler`][low_level::emulate_default_handler] function, this /// one looks the signal up in a table. If it is unknown, an error is returned. /// /// Additionally to that, any errors that can be caused by a registration of a handler can happen /// too. pub fn register_conditional_default( signal: c_int, condition: Arc<AtomicBool>, ) -> Result<SigId, Error> { // Verify we know about this particular signal. low_level::signal_name(signal).ok_or_else(|| Error::from_raw_os_error(EINVAL))?; let action = move || { if condition.load(Ordering::SeqCst) { let _ = low_level::emulate_default_handler(signal); } }; unsafe { low_level::register(signal, action) } } #[cfg(test)] mod tests { use std::sync::atomic; use std::time::{Duration, Instant}; use super::*; use crate::consts::signal::*; fn self_signal() { #[cfg(not(windows))] const SIG: c_int = SIGUSR1; #[cfg(windows)] const SIG: c_int = SIGTERM; crate::low_level::raise(SIG).unwrap(); } fn wait_flag(flag: &AtomicBool) -> bool { let start = Instant::now(); while !flag.load(Ordering::Relaxed) { // Replaced by hint::spin_loop, but we want to support older compiler #[allow(deprecated)] atomic::spin_loop_hint(); if Instant::now() - start > Duration::from_secs(1) { // We reached a timeout and nothing happened yet. // In theory, using timeouts for thread-synchronization tests is wrong, but a // second should be enough in practice. return false; } } true } #[test] fn register_unregister() { // When we register the action, it is active. let flag = Arc::new(AtomicBool::new(false)); #[cfg(not(windows))] let signal = register(SIGUSR1, Arc::clone(&flag)).unwrap(); #[cfg(windows)] let signal = register(crate::SIGTERM, Arc::clone(&flag)).unwrap(); self_signal(); assert!(wait_flag(&flag)); // But stops working after it is unregistered. assert!(crate::low_level::unregister(signal)); flag.store(false, Ordering::Relaxed); self_signal(); assert!(!wait_flag(&flag)); // And the unregistration actually dropped its copy of the Arc assert_eq!(1, Arc::strong_count(&flag)); } // The shutdown is tested in tests/shutdown.rs }
#[doc = "Register `CFGR2` reader"] pub type R = crate::R<CFGR2_SPEC>; #[doc = "Register `CFGR2` writer"] pub type W = crate::W<CFGR2_SPEC>; #[doc = "Field `OVSE` reader - Oversampler Enable This bit is set and cleared by software. Note: Software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type OVSE_R = crate::BitReader; #[doc = "Field `OVSE` writer - Oversampler Enable This bit is set and cleared by software. Note: Software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type OVSE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `OVSR` reader - Oversampling ratio This bit filed defines the number of oversampling ratio. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type OVSR_R = crate::FieldReader; #[doc = "Field `OVSR` writer - Oversampling ratio This bit filed defines the number of oversampling ratio. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type OVSR_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>; #[doc = "Field `OVSS` reader - Oversampling shift This bit is set and cleared by software. Others: Reserved Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type OVSS_R = crate::FieldReader; #[doc = "Field `OVSS` writer - Oversampling shift This bit is set and cleared by software. Others: Reserved Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type OVSS_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>; #[doc = "Field `TOVS` reader - Triggered Oversampling This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type TOVS_R = crate::BitReader; #[doc = "Field `TOVS` writer - Triggered Oversampling This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] pub type TOVS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `LFTRIG` reader - Low frequency trigger mode enable This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART bit is cleared to 0 (this ensures that no conversion is ongoing)."] pub type LFTRIG_R = crate::BitReader; #[doc = "Field `LFTRIG` writer - Low frequency trigger mode enable This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART bit is cleared to 0 (this ensures that no conversion is ongoing)."] pub type LFTRIG_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `CKMODE` reader - ADC clock mode These bits are set and cleared by software to define how the analog ADC is clocked: In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."] pub type CKMODE_R = crate::FieldReader; #[doc = "Field `CKMODE` writer - ADC clock mode These bits are set and cleared by software to define how the analog ADC is clocked: In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."] pub type CKMODE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>; impl R { #[doc = "Bit 0 - Oversampler Enable This bit is set and cleared by software. Note: Software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] pub fn ovse(&self) -> OVSE_R { OVSE_R::new((self.bits & 1) != 0) } #[doc = "Bits 2:4 - Oversampling ratio This bit filed defines the number of oversampling ratio. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] pub fn ovsr(&self) -> OVSR_R { OVSR_R::new(((self.bits >> 2) & 7) as u8) } #[doc = "Bits 5:8 - Oversampling shift This bit is set and cleared by software. Others: Reserved Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] pub fn ovss(&self) -> OVSS_R { OVSS_R::new(((self.bits >> 5) & 0x0f) as u8) } #[doc = "Bit 9 - Triggered Oversampling This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] pub fn tovs(&self) -> TOVS_R { TOVS_R::new(((self.bits >> 9) & 1) != 0) } #[doc = "Bit 29 - Low frequency trigger mode enable This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART bit is cleared to 0 (this ensures that no conversion is ongoing)."] #[inline(always)] pub fn lftrig(&self) -> LFTRIG_R { LFTRIG_R::new(((self.bits >> 29) & 1) != 0) } #[doc = "Bits 30:31 - ADC clock mode These bits are set and cleared by software to define how the analog ADC is clocked: In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."] #[inline(always)] pub fn ckmode(&self) -> CKMODE_R { CKMODE_R::new(((self.bits >> 30) & 3) as u8) } } impl W { #[doc = "Bit 0 - Oversampler Enable This bit is set and cleared by software. Note: Software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] #[must_use] pub fn ovse(&mut self) -> OVSE_W<CFGR2_SPEC, 0> { OVSE_W::new(self) } #[doc = "Bits 2:4 - Oversampling ratio This bit filed defines the number of oversampling ratio. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] #[must_use] pub fn ovsr(&mut self) -> OVSR_W<CFGR2_SPEC, 2> { OVSR_W::new(self) } #[doc = "Bits 5:8 - Oversampling shift This bit is set and cleared by software. Others: Reserved Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] #[must_use] pub fn ovss(&mut self) -> OVSS_W<CFGR2_SPEC, 5> { OVSS_W::new(self) } #[doc = "Bit 9 - Triggered Oversampling This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART = 0 (which ensures that no conversion is ongoing)."] #[inline(always)] #[must_use] pub fn tovs(&mut self) -> TOVS_W<CFGR2_SPEC, 9> { TOVS_W::new(self) } #[doc = "Bit 29 - Low frequency trigger mode enable This bit is set and cleared by software. Note: The software is allowed to write this bit only when ADSTART bit is cleared to 0 (this ensures that no conversion is ongoing)."] #[inline(always)] #[must_use] pub fn lftrig(&mut self) -> LFTRIG_W<CFGR2_SPEC, 29> { LFTRIG_W::new(self) } #[doc = "Bits 30:31 - ADC clock mode These bits are set and cleared by software to define how the analog ADC is clocked: In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."] #[inline(always)] #[must_use] pub fn ckmode(&mut self) -> CKMODE_W<CFGR2_SPEC, 30> { CKMODE_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "ADC configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct CFGR2_SPEC; impl crate::RegisterSpec for CFGR2_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`cfgr2::R`](R) reader structure"] impl crate::Readable for CFGR2_SPEC {} #[doc = "`write(|w| ..)` method takes [`cfgr2::W`](W) writer structure"] impl crate::Writable for CFGR2_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets CFGR2 to value 0"] impl crate::Resettable for CFGR2_SPEC { const RESET_VALUE: Self::Ux = 0; }
use std::{fs::File, time::Duration}; use resol_vbus::{DataSet, RecordingWriter}; use tokio::prelude::*; use tokio_serial::{DataBits, FlowControl, Parity, Serial, SerialPortSettings, StopBits}; use tokio_resol_vbus::LiveDataStream; fn main() { // Create an recording file and hand it to a `RecordingWriter` let file = File::create("test.vbus").expect("Unable to create output file"); let mut rw = RecordingWriter::new(file); let path = "/dev/tty.usbmodem"; let settings = SerialPortSettings { baud_rate: 9600, data_bits: DataBits::Eight, flow_control: FlowControl::None, parity: Parity::None, stop_bits: StopBits::One, timeout: Duration::from_millis(500), }; let serial = Serial::from_path(&path, &settings).expect("Unable to open serial port"); let (reader, writer) = serial.split(); let stream = LiveDataStream::new(reader, writer, 0, 0x0020); // Read VBus `Data` values from the `LiveDataStream` let handler = stream .for_each(move |data| { println!("{}", data.id_string()); // Add `Data` value into `DataSet` to be stored let mut data_set = DataSet::new(); data_set.timestamp = data.as_ref().timestamp; data_set.add_data(data); // Write the `DataSet` into the `RecordingWriter` for permanent storage rw.write_data_set(&data_set) .expect("Unable to write data set"); Ok(()) }) .map_err(|err| { eprintln!("{}", err); }); tokio::run(handler); }
#[macro_use] extern crate serde_json; extern crate conrod_core; extern crate conrod_glium; #[cfg(target_os="android")] extern crate rusttype; #[cfg(target_os="android")] extern crate android_glue; #[cfg(not(target_os="android"))] extern crate find_folder; extern crate conrod_chat; extern crate futures; extern crate hardback_codec; extern crate toa_ping; extern crate websocket; extern crate image; extern crate glium; // run with --features "keypad" use conrod_core::{widget, color, Colorable, Widget, Positionable, Sizeable}; use glium::Surface; use conrod_chat::custom_widget::chatview_futures; use conrod_chat::chat; use conrod_chat::chat::ConrodMessage; use hardback_codec::codec; use websocket::message::OwnedMessage; use std::sync::mpsc::{Sender, Receiver}; use futures::sync::mpsc; use std::sync::{Arc, Mutex}; const CONNECTION: &'static str = "ws://0.0.0.0:8080"; pub struct GameApp {} widget_ids! { pub struct Ids { master, rect, chat_canvas, chat, keypad, keypad_canvas } } pub mod support; pub mod backend; use backend::websocket::client; const WIDTH: u32 = 600; const HEIGHT: u32 = 420; impl GameApp { pub fn new() -> Result<(), String> { let (proxy_tx, proxy_rx) = std::sync::mpsc::channel(); let (proxy_action_tx, proxy_action_rx) = mpsc::channel(2); let s_tx = Arc::new(Mutex::new(proxy_action_tx)); let s_rx = Arc::new(Mutex::new(proxy_action_rx)); let (ss_tx, ss_rx) = (s_tx.clone(), s_rx.clone()); std::thread::spawn(move || { let mut connected = false; let mut last_update = std::time::Instant::now(); let mut c = 0; while !connected { println!("connected {:?}", connected); let sixteen_ms = std::time::Duration::new(20, 0); let now = std::time::Instant::now(); let duration_since_last_update = now.duration_since(last_update); if (duration_since_last_update < sixteen_ms) & (c > 0) { std::thread::sleep(sixteen_ms - duration_since_last_update); } match toa_ping::run("www.google.com") { Ok(_) => { let (tx, rx) = mpsc::channel(3); let mut ss_tx = ss_tx.lock().unwrap(); *ss_tx = tx; drop(ss_tx); match client::run_owned_message(CONNECTION.to_owned(), proxy_tx.clone(), rx) { Ok(_) => connected = true, Err(err) => { println!("reconnecting"); connected = false; } } connected = true; } _ => { connected = false; } } last_update = std::time::Instant::now(); c += 1; } }); let mut events_loop = glium::glutin::EventsLoop::new(); let window = glium::glutin::WindowBuilder::new() .with_title("Hello Conrod!") .with_dimensions((WIDTH, HEIGHT).into()); let context = glium::glutin::ContextBuilder::new() .with_vsync(true) .with_multisampling(4); let display = glium::Display::new(window, context, &events_loop).unwrap(); let display = support::glium_wrapper::GliumDisplayWinitWrapper(display); let mut renderer = conrod_glium::Renderer::new(&display.0).unwrap(); // construct our `Ui`. let mut ui = conrod_core::UiBuilder::new([WIDTH as f64, HEIGHT as f64]).build(); println!("conrod ..ui"); ui.fonts.insert(support::assets::load_font("fonts/NotoSans/NotoSans-Regular.ttf")); let rust_logo = load_image(&display.0, "images/rust.png"); let mut image_map: conrod_core::image::Map<glium::texture::Texture2d> = conrod_core::image::Map::new(); let rust_logo = image_map.insert(rust_logo); let events_loop_proxy = events_loop.create_proxy(); //<logic::game::ConrodMessage<OwnedMessage>> let mut last_update = std::time::Instant::now(); let mut last_update_sys = std::time::SystemTime::now(); let mut demo_text_edit = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. \ Mauris aliquet porttitor tellus vel euismod. Integer lobortis volutpat bibendum. Nulla \ finibus odio nec elit condimentum, rhoncus fermentum purus lacinia. Interdum et malesuada \ fames ac ante ipsum primis in faucibus. Cras rhoncus nisi nec dolor bibendum pellentesque. \ Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. \ Quisque commodo nibh hendrerit nunc sollicitudin sodales. Cras vitae tempus ipsum. Nam \ magna est, efficitur suscipit dolor eu, consectetur consectetur urna.".to_owned(); //let mut demo_text_edit = "".to_owned(); let mut lobby_history = vec![]; let mut c = 0; let mut ids = Ids::new(ui.widget_id_generator()); let name = "alan".to_owned(); let sixteen_ms = std::time::Duration::from_millis(100); let mut captured_event: Option<ConrodMessage> = None; let mut keypad_bool = false; 'render: loop { let mut to_break = false; let mut to_continue = false; let ss_tx = s_tx.lock().unwrap(); let proxy_action_tx = ss_tx.clone(); events_loop.poll_events(|event| { match event.clone() { glium::glutin::Event::WindowEvent { event, .. } => { match event { glium::glutin::WindowEvent::CloseRequested | glium::glutin::WindowEvent::KeyboardInput { input: glium::glutin::KeyboardInput { virtual_keycode: Some(glium::glutin::VirtualKeyCode::Escape), .. }, .. } => {to_break=true;} _ => (), } } _ => {} } let input = match conrod_winit::convert_event(event, &display) { None => { to_continue = true; } Some(input) => { let d = std::time::Instant::now(); captured_event = Some(ConrodMessage::Event(d, input)); } }; }); if to_break { break 'render; } if to_continue { continue; } match captured_event { Some(ConrodMessage::Event(_, ref input)) => { ui.handle_event(input.clone()); let mut ui = ui.set_widgets(); set_widgets(&mut ui, [WIDTH as f64, HEIGHT as f64], &mut lobby_history, &mut demo_text_edit, &mut keypad_bool, &name, Some(rust_logo), proxy_action_tx.clone(), &mut ids); } Some(ConrodMessage::Thread(t)) => { let mut ui = ui.set_widgets(); set_widgets(&mut ui, [WIDTH as f64, HEIGHT as f64], &mut lobby_history, &mut demo_text_edit, &mut keypad_bool, &name, Some(rust_logo), proxy_action_tx.clone(), &mut ids); } None => { let now = std::time::Instant::now(); let duration_since_last_update = now.duration_since(last_update); if duration_since_last_update < sixteen_ms { std::thread::sleep(sixteen_ms - duration_since_last_update); } let t = std::time::Instant::now(); captured_event = Some(ConrodMessage::Thread(t)); } } while let Ok(msg) = proxy_rx.try_recv() { //update_state if let OwnedMessage::Text(z) = OwnedMessage::from(msg) { if let Ok(codec::ClientReceivedMsg { type_name, location, sender, message, .. }) = codec::ClientReceivedMsg::deserialize_receive(&z) { println!("location {:?},type_name {:?}, sender {:?}, message:{:?}", location, type_name, sender, message); if let (Some(Some(_type_name)), Some(Some(_location)), Some(Some(_sender)), Some(Some(_message))) = (type_name.clone(), location, sender, message) { println!("_type_name {:?}", _type_name); if _type_name == "chat" { if _location == "lobby" { lobby_history.push(chat::message::Message { image_id: Some(rust_logo), name: _sender, text: _message, }); } } } } } } let primitives = ui.draw(); renderer.fill(&display.0, primitives, &image_map); let mut target = display.0.draw(); target.clear_color(0.0, 0.0, 0.0, 1.0); renderer.draw(&display.0, &mut target, &image_map).unwrap(); target.finish().unwrap(); c += 1; } Ok(()) } } fn main() { println!("conrod ..main"); match GameApp::new() { Err(why) => println!("Error while running Hardback:\n{}", why), Ok(_) => (), } } fn load_rust_logo(display: &glium::Display) -> glium::texture::Texture2d { let rgba_image = support::assets::load_image("images/rust.png").to_rgba(); let image_dimensions = rgba_image.dimensions(); let raw_image = glium::texture::RawImage2d::from_raw_rgba_reversed(&rgba_image.into_raw(), image_dimensions); let texture = glium::texture::Texture2d::new(display, raw_image).unwrap(); texture } fn process(name: &String, text: &String) -> OwnedMessage { let g = json!({ "type":"chat", "message": text, "location":"lobby" }); OwnedMessage::Text(g.to_string()) } fn load_image(display: &glium::Display, path: &str) -> glium::texture::Texture2d { let rgba_image = support::assets::load_image(path).to_rgba(); let image_dimensions = rgba_image.dimensions(); let raw_image = glium::texture::RawImage2d::from_raw_rgba_reversed(&rgba_image.into_raw(), image_dimensions); let texture = glium::texture::Texture2d::new(display, raw_image).unwrap(); texture } #[cfg(feature="keypad")] fn set_widgets(ui: &mut conrod_core::UiCell, dimension: [f64; 2], lobby_history: &mut Vec<chat::message::Message>, text_edit: &mut String, keypad_bool: &mut bool, name: &String, rust_logo: Option<conrod_core::image::Id>, action_tx: mpsc::Sender<OwnedMessage>, ids: &mut Ids) { use conrod_chat::chat::{ english}; let english_tuple = english::populate(); let (keypad_length, _) = if *keypad_bool { (dimension[1] * 0.375, 400.0) } else { (0.0, 700.0) }; widget::Canvas::new() .flow_down(&[(ids.chat_canvas, widget::Canvas::new().color(color::LIGHT_BLUE)), (ids.keypad_canvas, widget::Canvas::new().length(keypad_length).color(color::LIGHT_BLUE))]) .set(ids.master, ui); let keypad_bool_ = chatview_futures::ChatView::new(lobby_history, text_edit, ids.master, &english_tuple, rust_logo, name, action_tx, Box::new(process)) .middle_of(ids.chat_canvas) .padded_wh_of(ids.chat_canvas, 0.0) .set(ids.chat, ui); *keypad_bool = keypad_bool_; } #[cfg(not(feature="keypad"))] fn set_widgets(ui: &mut conrod_core::UiCell, dimension: [f64; 2], lobby_history: &mut Vec<chat::message::Message>, text_edit: &mut String, keypad_bool: &mut bool, name: &String, rust_logo: Option<conrod_core::image::Id>, _keypad:conrod_core::image::Id, action_tx: mpsc::Sender<OwnedMessage>, ids: &mut Ids) { let (keypad_length, _) = if *keypad_bool { (dimension[1] * 0.375, 400.0) } else { (0.0, 700.0) }; widget::Canvas::new() .flow_down(&[(ids.chat_canvas, widget::Canvas::new().color(color::LIGHT_BLUE)), (ids.keypad_canvas, widget::Canvas::new().length(keypad_length).color(color::LIGHT_BLUE))]) .set(ids.master, ui); chatview_futures::ChatView::new(lobby_history, text_edit, rust_logo, name, action_tx, Box::new(process)) .middle_of(ids.chat_canvas) .padded_wh_of(ids.chat_canvas, 0.0) .set(ids.chat, ui); }
//generics //generics constructor struct genericPoint<T> { x : T, y : T, } struct parametricGenericPoint<T,U> { x : T, y : U, } fn main() { //enum Option<T> let x: Option<i32> = Some(4); let y: Option<f64> = Some(4.0); let ip = genericPoint {x : 0, y : 0}; let fp = genericPoint {x : 0.0,y : 0.0}; //error //let ifp = genericPoint {x : 0, y : 0.0}; let ifp = parametricGenericPoint {x : 0,y : 0.0}; }
use crossbeam::thread; use crate::account::Account; use csv; use std::collections::BTreeMap; use std::io; use structopt::StructOpt; use transaction::Transaction; mod account; mod transaction; type AccountsType = BTreeMap<u16, Account>; /// This is the struct we use to parse command line /// arguments and display usage / help to the user. #[derive(Debug, StructOpt)] struct Cli { #[structopt(parse(from_os_str))] path: std::path::PathBuf, } /// reads the CSV file and creates a mapping of account /// id - Account objects. /// /// If the reader fails to parse the CSV file for any reason /// the application aborts. Invalid data in the CSV file will /// ignore those rows and print the error to STDERR. fn populate_accounts(path: String, accounts: &mut AccountsType) { let mut reader = match csv::ReaderBuilder::new() .flexible(true) .trim(csv::Trim::All) .from_path(path) { Ok(reader) => reader, Err(err) => { panic!("{:?}", err); } }; for result in reader.deserialize() { match result { Ok(result) => { let transaction: Transaction = result; let account = accounts .entry(transaction.client_id) .or_insert(Account::new(transaction.client_id)); account.add_transaction(transaction); } Err(err) => { eprintln!("{:?}", err) } } } } /// processes each accounts transactions concurrently. /// /// Exclusive Borrow that is mutable so that we can calculate /// the transactions without copying values in memory. /// /// See more explanation here: /// https://docs.rs/crossbeam/0.8.1/crossbeam/thread/index.html#why-scoped-threads async fn process_transactions(accounts: &mut AccountsType) { thread::scope(|s| { let mut handles = Vec::new(); for account in accounts { let account = account.1; let handle = s.spawn(move |_| { account.process_transactions(); }); handles.push(handle); } for handle in handles { let _ = handle.join().unwrap(); } }).unwrap(); } /// Writes the account statuses to STDOUT using /// the serde + csv crates. fn write_account_summary(accounts: &AccountsType) { let mut writer = csv::Writer::from_writer(io::stdout()); for account in (accounts).values() { match writer.serialize(account) { Ok(_item) => {} Err(err) => { eprintln!("{:?}", err); } } } writer.flush().unwrap(); } /// Toy Payment Engine /// /// This CLI program processes a list of transactions /// given a filepath to a CSV like: /// ```csv /// type, client, tx, amount /// deposit, 1, 1, 1.0 /// ``` /// /// The ouput is a CSV of account statuses. /// This is directed to STDOUT and looks like: /// ```csv /// client,available,held,total,locked /// 1,1.5,0.0,1.5,false /// ``` /// ```shell /// usage: cargo run -- transactions.csv > accounts.csv /// ``` #[tokio::main] pub async fn main() { let opt = Cli::from_args(); let filepath = opt.path.as_path().display().to_string(); let mut accounts: AccountsType = BTreeMap::new(); populate_accounts(filepath, &mut accounts); process_transactions(&mut accounts).await; write_account_summary(&accounts); }
// Copyright 2019, 2020 Wingchain // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use lru::LruCache; use once_cell::sync::Lazy; use parity_wasm::builder; use parity_wasm::elements; use parity_wasm::elements::{External, MemorySection, Type}; use parking_lot::Mutex; use wasmer_runtime_core::wasmparser; use wasmer_runtime_core::Module; use primitives::Hash; use crate::errors::{PreCompileError, VMResult}; use crate::{VMCodeProvider, VMConfig}; const CACHE_SIZE: usize = 1024; static MODULE_CACHE: Lazy<Mutex<LruCache<Hash, Module>>> = Lazy::new(|| Mutex::new(LruCache::new(CACHE_SIZE))); pub fn compile(code: &dyn VMCodeProvider, config: &VMConfig) -> VMResult<Module> { let code_hash = code.provide_code_hash()?; let mut guard = MODULE_CACHE.lock(); if let Some(module) = guard.get(code_hash) { return Ok(module.clone()); } let code = &*code.provide_code()?; let code = pre_compile(code, config)?; let module = wasmer_runtime::compile(&code)?; guard.put(code_hash.clone(), module.clone()); Ok(module) } fn pre_compile(code: &[u8], config: &VMConfig) -> VMResult<Vec<u8>> { wasmparser::validate(code, None).map_err(|e| PreCompileError::ValidationError { msg: format!("{}", e), })?; let module: elements::Module = elements::deserialize_buffer(code).map_err(|e| PreCompileError::ValidationError { msg: format!("{}", e), })?; let module = import_memory(module, config)?; let module = validate_memory(module)?; let module = inject_stack_height_metering(module, config)?; let module = validate_imports(module, config)?; let code = elements::serialize(module).map_err(|_| PreCompileError::Serialize)?; Ok(code) } fn import_memory(mut module: elements::Module, config: &VMConfig) -> VMResult<elements::Module> { let mut tmp = MemorySection::default(); module .memory_section_mut() .unwrap_or(&mut tmp) .entries_mut() .pop(); let entry = elements::MemoryType::new(config.initial_memory_pages, Some(config.max_memory_pages)); let mut builder = builder::from_module(module); builder.push_import(elements::ImportEntry::new( "env".to_string(), "memory".to_string(), elements::External::Memory(entry), )); let module = builder.build(); Ok(module) } fn validate_memory(module: elements::Module) -> VMResult<elements::Module> { if module .memory_section() .map_or(false, |ms| !ms.entries().is_empty()) { Err(PreCompileError::InternalMemoryDeclared.into()) } else { Ok(module) } } fn inject_stack_height_metering( module: elements::Module, config: &VMConfig, ) -> VMResult<elements::Module> { let module = pwasm_utils::stack_height::inject_limiter(module, config.max_stack_height) .map_err(|_| PreCompileError::StackHeightMetering)?; Ok(module) } fn validate_imports(module: elements::Module, config: &VMConfig) -> VMResult<elements::Module> { let types = module .type_section() .map(elements::TypeSection::types) .unwrap_or(&[]); let import_entries = module .import_section() .map(elements::ImportSection::entries) .unwrap_or(&[]); let mut imported_memory_type = None; for import in import_entries { if import.module() != "env" { return Err(PreCompileError::Imports.into()); } let type_idx = match *import.external() { External::Function(ref type_idx) => type_idx, External::Memory(ref memory_type) => { imported_memory_type = Some(memory_type); continue; } _ => continue, }; let Type::Function(ref _func_ty) = types .get(*type_idx as usize) .ok_or(PreCompileError::Imports)?; } if let Some(memory_type) = imported_memory_type { let limits = memory_type.limits(); if limits.initial() != config.initial_memory_pages || limits.maximum() != Some(config.max_memory_pages) { return Err(PreCompileError::Imports.into()); } } else { return Err(PreCompileError::Imports.into()); }; Ok(module) }