text
stringlengths
8
4.13M
use std::fs::File; use std::io::{BufRead, BufReader}; use std::num::ParseIntError; use std::str::FromStr; enum Action { Cut(i64), Deal(u64), NewStack, } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] struct ParseActionError; impl From<ParseIntError> for ParseActionError { fn from(_err: ParseIntError) -> Self { Self } } impl FromStr for Action { type Err = ParseActionError; // cut 123 // deal with increment -345 // deal into new stack fn from_str(s: &str) -> Result<Self, Self::Err> { let mut words = s.split(' '); let verb = words.next().ok_or(ParseActionError)?; let last = words.last().ok_or(ParseActionError)?; let action = match verb { "cut" => { let count = last.parse::<i64>()?; Action::Cut(count) } "deal" => { let count = last.parse::<u64>(); match count { Ok(n) => Action::Deal(n), _ => Action::NewStack, } } _ => return Err(ParseActionError), }; Ok(action) } } impl Action { // Each action corresponds to a linear mapping x -> mx + a. fn linearize(&self, num_cards: u64) -> (u64, u64) { match self { Action::Cut(n) => { let m = 1; let a = if *n > 0 { num_cards - *n as u64 } else { -(*n) as u64 }; (m, a) } Action::Deal(n) => (*n, 0), Action::NewStack => (num_cards - 1, num_cards - 1), } } } struct Deck { cards: Vec<u32>, } impl Deck { fn new(cards: Vec<u32>) -> Self { Self { cards } } fn apply(&mut self, action: &Action) { match *action { Action::NewStack => self.cards.reverse(), Action::Cut(n) => { if n < 0 { self.cards.rotate_right(-n as usize) } else { self.cards.rotate_left(n as usize) } } Action::Deal(n) => { let len = self.cards.len(); let mut cards = vec![0; len]; let position = (0usize..).step_by(n as usize); for (c, p) in self.cards.iter().zip(position) { cards[p % len] = *c; } self.cards = cards; } } } } pub(crate) fn day22() { let input = File::open("data/day22.txt").expect("Failed to open input"); let buffered = BufReader::new(input); let lines = buffered.lines().map(|line| line.unwrap()); let actions: Vec<Action> = lines .map(|line| line.parse().expect("Parsing failed")) .collect(); // Part one. let cards = (0u32..10007).collect(); let mut deck = Deck::new(cards); for action in &actions { deck.apply(action); } let answer = deck.cards.iter().position(|&n| n == 2019).unwrap(); println!("Part one answer is: {}", answer); // What is the combined effect of our actions, as x -> mx + a? let num_cards = 119_315_717_514_047; let (mult, add) = actions.iter().fold((1, 0), |acc, action| { let pair = action.linearize(num_cards); compose(num_cards, acc, pair) }); // Calculate the inverse operation. let imult = modular_inverse(num_cards, mult); let iadd = modular_multiplication(num_cards, num_cards - add, imult); // Calculate the effect of performing that inverse many times. let repetitions = 101_741_582_076_661; let (m, a) = repeatedly(num_cards, (imult, iadd), repetitions); // Finally we can read out the answer. let answer = (modular_multiplication(num_cards, m, 2020) + a) % num_cards; println!("Part two answer is: {}", answer); } fn compose(num_cards: u64, (m1, a1): (u64, u64), (m2, a2): (u64, u64)) -> (u64, u64) { // m2 * (m1 x + a1) + a2 -> (m2 * m1)x + m2 * a1 + a2 let m = modular_multiplication(num_cards, m2, m1); let a = (modular_multiplication(num_cards, m2, a1) + a2) % num_cards; (m, a) } fn repeatedly(num_cards: u64, (mult, add): (u64, u64), count: u64) -> (u64, u64) { // End result. let mut m = 1; let mut a = 0; // Working values. let mut xm = mult; let mut xa = add; let mut iterations = count; while iterations != 0 { if (iterations % 2) == 1 { let (m1, a1) = compose(num_cards, (m, a), (xm, xa)); m = m1; a = a1; } let (m1, a1) = compose(num_cards, (xm, xa), (xm, xa)); xm = m1; xa = a1; iterations /= 2; } (m, a) } fn modular_multiplication(modulus: u64, x: u64, y: u64) -> u64 { let mut result = 0; let mut a = x; let mut b = y; while b != 0 { if (b % 2) == 1 { result = (result + a) % modulus; }; a = (a * 2) % modulus; b /= 2; } result } fn modular_inverse(modulus: u64, n: u64) -> u64 { let mut a = modulus; let mut b = n; let mut x0 = 0; let mut x1 = 1; // Extended Euclidean algorithm for GCD. while b != 0 { let quotient = a / b; let remainder = a % b; a = b; b = remainder; let temp = x1; x1 = (x0 + modular_multiplication(modulus, modulus - quotient, x1)) % modulus; x0 = temp; } assert_eq!(a, 1); x0 }
// Copyright (c) The diem-devtools Contributors // SPDX-License-Identifier: MIT OR Apache-2.0 use crate::{ partition::{Partitioner, PartitionerBuilder}, test_list::TestBinary, }; use aho_corasick::AhoCorasick; use anyhow::bail; use serde::{Deserialize, Serialize}; use std::{fmt, str::FromStr}; /// Whether to run ignored tests. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum RunIgnored { /// Only run tests that aren't ignored. /// /// This is the default. Default, /// Only run tests that are ignored. IgnoredOnly, /// Run both ignored and non-ignored tests. All, } impl RunIgnored { pub fn variants() -> [&'static str; 3] { ["default", "ignored-only", "all"] } } impl Default for RunIgnored { fn default() -> Self { RunIgnored::Default } } impl fmt::Display for RunIgnored { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { RunIgnored::Default => write!(f, "default"), RunIgnored::IgnoredOnly => write!(f, "ignored-only"), RunIgnored::All => write!(f, "all"), } } } impl FromStr for RunIgnored { type Err = anyhow::Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let val = match s { "default" => RunIgnored::Default, "ignored-only" => RunIgnored::IgnoredOnly, "all" => RunIgnored::All, other => bail!("unrecognized value for run-ignored: {}", other), }; Ok(val) } } /// A builder for `TestFilter` instances. #[derive(Clone, Debug)] pub struct TestFilterBuilder { run_ignored: RunIgnored, partitioner_builder: Option<PartitionerBuilder>, name_match: NameMatch, } #[derive(Clone, Debug)] enum NameMatch { MatchAll, MatchSet(Box<AhoCorasick>), } impl TestFilterBuilder { /// Creates a new `TestFilterBuilder` from the given patterns. /// /// If an empty slice is passed, the test filter matches all possible test names. pub fn new( run_ignored: RunIgnored, partitioner_builder: Option<PartitionerBuilder>, patterns: &[impl AsRef<[u8]>], ) -> Self { let name_match = if patterns.is_empty() { NameMatch::MatchAll } else { NameMatch::MatchSet(Box::new(AhoCorasick::new_auto_configured(patterns))) }; Self { run_ignored, partitioner_builder, name_match, } } /// Creates a new `TestFilterBuilder` that matches any pattern by name. pub fn any(run_ignored: RunIgnored) -> Self { Self { run_ignored, partitioner_builder: None, name_match: NameMatch::MatchAll, } } /// Creates a new test filter scoped to a single binary. /// /// This test filter may be stateful. pub fn build(&self, test_binary: &TestBinary) -> TestFilter<'_> { let partitioner = self .partitioner_builder .as_ref() .map(|partitioner_builder| partitioner_builder.build(test_binary)); TestFilter { builder: self, partitioner, } } } /// Test filter, scoped to a single binary. #[derive(Debug)] pub struct TestFilter<'builder> { builder: &'builder TestFilterBuilder, partitioner: Option<Box<dyn Partitioner>>, } impl<'filter> TestFilter<'filter> { /// Returns an enum describing the match status of this filter. pub fn filter_match(&mut self, test_name: &str, ignored: bool) -> FilterMatch { match self.builder.run_ignored { RunIgnored::IgnoredOnly => { if !ignored { return FilterMatch::Mismatch { reason: MismatchReason::Ignored, }; } } RunIgnored::Default => { if ignored { return FilterMatch::Mismatch { reason: MismatchReason::Ignored, }; } } _ => {} }; let string_match = match &self.builder.name_match { NameMatch::MatchAll => true, NameMatch::MatchSet(set) => set.is_match(test_name), }; if !string_match { return FilterMatch::Mismatch { reason: MismatchReason::String, }; } let partition_match = match &mut self.partitioner { Some(partitioner) => partitioner.test_matches(test_name), None => true, }; if !partition_match { return FilterMatch::Mismatch { reason: MismatchReason::Partition, }; } FilterMatch::Matches } } /// An enum describing whether a test matches a filter. #[derive(Copy, Clone, Debug, Eq, PartialEq, Deserialize, Serialize)] #[serde(rename_all = "kebab-case", tag = "status")] pub enum FilterMatch { /// This test matches this filter. Matches, /// This test does not match this filter. /// /// The `MismatchReason` inside describes the reason this filter isn't matched. Mismatch { reason: MismatchReason }, } impl FilterMatch { /// Returns true if the filter doesn't match. pub fn is_match(&self) -> bool { matches!(self, FilterMatch::Matches) } } /// The reason for why a test doesn't match a filter. #[derive(Copy, Clone, Debug, Eq, PartialEq, Deserialize, Serialize)] #[serde(rename_all = "kebab-case")] pub enum MismatchReason { /// This test does not match the run-ignored option in the filter. Ignored, /// This test does not match the provided string filters. String, /// This test is in a different partition. Partition, } impl fmt::Display for MismatchReason { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { MismatchReason::Ignored => write!(f, "does not match the run-ignored option"), MismatchReason::String => write!(f, "does not match the provided string filters"), MismatchReason::Partition => write!(f, "is in a different partition"), } } } #[cfg(test)] mod tests { use super::*; use proptest::{collection::vec, prelude::*}; proptest! { #[test] fn proptest_empty(test_names in vec(any::<String>(), 0..16)) { let patterns: &[String] = &[]; let test_filter = TestFilterBuilder::new(RunIgnored::Default, None, patterns); let mut single_filter = test_filter.build(&make_test_binary()); for test_name in test_names { prop_assert!(single_filter.filter_match(&test_name, false).is_match()); } } // Test that exact names match. #[test] fn proptest_exact(test_names in vec(any::<String>(), 0..16)) { let test_filter = TestFilterBuilder::new(RunIgnored::Default, None, &test_names); let mut single_filter = test_filter.build(&make_test_binary()); for test_name in test_names { prop_assert!(single_filter.filter_match(&test_name, false).is_match()); } } // Test that substrings match. #[test] fn proptest_substring( substring_prefix_suffixes in vec([any::<String>(); 3], 0..16), ) { let mut patterns = Vec::with_capacity(substring_prefix_suffixes.len()); let mut test_names = Vec::with_capacity(substring_prefix_suffixes.len()); for [substring, prefix, suffix] in substring_prefix_suffixes { test_names.push(prefix + &substring + &suffix); patterns.push(substring); } let test_filter = TestFilterBuilder::new(RunIgnored::Default, None, &patterns); let mut single_filter = test_filter.build(&make_test_binary()); for test_name in test_names { prop_assert!(single_filter.filter_match(&test_name, false).is_match()); } } // Test that dropping a character from a string doesn't match. #[test] fn proptest_no_match( substring in any::<String>(), prefix in any::<String>(), suffix in any::<String>(), ) { prop_assume!(!substring.is_empty() && !(prefix.is_empty() && suffix.is_empty())); let pattern = prefix + &substring + &suffix; let test_filter = TestFilterBuilder::new(RunIgnored::Default, None, &[&pattern]); let mut single_filter = test_filter.build(&make_test_binary()); prop_assert!(!single_filter.filter_match(&substring, false).is_match()); } } /// Creates a fake test binary instance. fn make_test_binary() -> TestBinary { TestBinary { binary: "/fake/path".into(), binary_id: "fake-id".to_owned(), cwd: None, } } }
use anyhow::{Context, Result}; use sightglass_data::{Format, Measurement}; use sightglass_upload::{upload, upload_package, MeasurementPackage}; use std::{ fs::File, io::{self, BufReader, Read}, }; use structopt::StructOpt; /// Upload benchmark output to an ElasticSearch server; accepts raw benchmark /// results in `stdin` (i.e., from `sightglass-cli benchmark ...`). #[derive(Debug, StructOpt)] #[structopt(name = "upload-elastic")] pub struct UploadCommand { /// The format of the input data. Either 'json' or 'csv'. #[structopt(short = "i", long = "input-format", default_value = "json")] input_format: Format, /// Path to the file that will be read from, or none to indicate stdin /// (default). #[structopt(short = "f", long = "input-file")] input_file: Option<String>, /// The URL of a server receiving results; this command only understands how /// to upload results to an ElasticSearch server; e.g., /// `http://localhost:9200`. #[structopt(index = 1, default_value = "http://localhost:9200", value_name = "URL")] server: String, /// Setting this flag will prevent any uploading to the server. Instead, /// the command will emit a JSON "package" to stdout that can be used to /// upload at a later time, see `--from-package`. #[structopt(short = "d", long = "dry-run")] dry_run: bool, /// Path to a file containing a package of measurements and fingerprint data /// to be uploaded. If this is set, `--input-file` and `--input-format` are /// ignored. #[structopt(short = "p", long = "from-package")] from_package: Option<String>, /// The number of measurements to upload together; this can speed up the /// upload. Defaults to `2000`. #[structopt(short = "b", long = "batch-size", default_value = "2000")] batch_size: usize, } impl UploadCommand { pub fn execute(&self) -> Result<()> { if let Some(file) = &self.from_package { let reader = BufReader::new(File::open(file).context("unable to open --from-package path")?); let package: MeasurementPackage = serde_json::from_reader(reader).context("unable to parse --from-package JSON")?; upload_package(&self.server, self.batch_size, self.dry_run, package) } else { let file: Box<dyn Read> = if let Some(file) = self.input_file.as_ref() { Box::new(BufReader::new( File::open(file).context("unable to open --input-file")?, )) } else { Box::new(io::stdin()) }; let measurements: Vec<Measurement> = self.input_format.read(file)?; upload(&self.server, self.batch_size, self.dry_run, measurements) } } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponseLinkedStorage { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<error_response_linked_storage::Error>, } pub mod error_response_linked_storage { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Error { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ComponentLinkedStorageAccounts { #[serde(flatten)] pub proxy_resource: ProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<LinkedStorageAccountsProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ComponentLinkedStorageAccountsPatch { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<LinkedStorageAccountsProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LinkedStorageAccountsProperties { #[serde(rename = "linkedStorageAccount", default, skip_serializing_if = "Option::is_none")] pub linked_storage_account: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProxyResource { #[serde(flatten)] pub resource: Resource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, }
// thread 'rustc' panicked at 'no entry found for key' // prusti-interface/src/environment/polonius_info.rs:1169:9 #[derive(Clone)] struct Foo<'a> { x: &'a i32, } fn bar(foo: &Foo) { let _ = Foo { ..foo.clone() }; } fn main() {} /* thread 'rustc' panicked at 'no entry found for key', prusti-interface/src/environment/polonius_info.rs:1169:9 stack backtrace: 0: rust_begin_unwind at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/std/src/panicking.rs:517:5 1: core::panicking::panic_fmt at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/core/src/panicking.rs:93:14 2: core::option::expect_failed at /rustc/8007b506ac5da629f223b755f5a5391edd5f6d01/library/core/src/option.rs:1618:5 3: prusti_interface::environment::polonius_info::PoloniusInfo::get_loan_at_location 4: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_assign_operand 5: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_assign 6: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_statement_at 7: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode_blocks_group 8: prusti_viper::encoder::procedure_encoder::ProcedureEncoder::encode 9: prusti_viper::encoder::encoder::Encoder::encode_procedure 10: prusti_viper::encoder::encoder::Encoder::process_encoding_queue 11: prusti_viper::verifier::Verifier::verify 12: prusti_driver::verifier::verify 13: <prusti_driver::callbacks::PrustiCompilerCalls as rustc_driver::Callbacks>::after_analysis 14: rustc_interface::queries::<impl rustc_interface::interface::Compiler>::enter 15: rustc_span::with_source_map 16: rustc_interface::interface::create_compiler_and_run 17: scoped_tls::ScopedKey<T>::set */
/* * Datadog API V1 Collection * * Collection of all Datadog Public endpoints. * * The version of the OpenAPI document: 1.0 * Contact: support@datadoghq.com * Generated by: https://openapi-generator.tech */ /// EventQueryDefinition : The event query. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct EventQueryDefinition { /// The query being made on the event. #[serde(rename = "search")] pub search: String, /// The execution method for multi-value filters. Can be either and or or. #[serde(rename = "tags_execution")] pub tags_execution: String, } impl EventQueryDefinition { /// The event query. pub fn new(search: String, tags_execution: String) -> EventQueryDefinition { EventQueryDefinition { search, tags_execution, } } }
use std::ffi::CString; use libc::c_void; use raw; use gs::effect; pub trait InputSource: Sized { fn id() -> &'static str; fn new(/* settings, obs_source */) -> Self; fn width(&self) -> u32; fn height(&self) -> u32; fn render(&self, effect: Option<&mut effect::Effect>); fn register() { unsafe { let id = CString::new(Self::id()).expect("str contains null"); raw::rust_obs_register_input_source( id.into_raw(), Self::raw_new, Self::raw_width, Self::raw_height, Self::raw_render, Self::raw_destroy, ) } } } trait RawInputSource: InputSource { extern fn raw_new(settings: *mut raw::ObsData, source: *mut raw::ObsSource) -> *mut c_void { let this = Self::new(); let this = Box::new(this); Box::into_raw(this) as _ } unsafe extern fn raw_width(this: *mut c_void) -> u32 { let this: &Self = &*(this as *mut Self); this.width() } unsafe extern fn raw_height(this: *mut c_void) -> u32 { let this: &Self = &*(this as *mut Self); this.height() } unsafe extern fn raw_render(this: *mut c_void, effect: *mut raw::Effect) { let this: &Self = &*(this as *mut Self); let mut effect = effect::RefMut::from_raw(effect); let effect = effect.as_mut().map(|e| &mut **e); this.render(effect); } unsafe extern fn raw_destroy(this: *mut c_void) { println!("Rust: Destroying!"); Box::from_raw(this as *mut Self); } } impl<T: InputSource> RawInputSource for T {}
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use serde::{Serialize, Deserialize}; #[derive(Serialize, Deserialize)] /// A 2-dimensional vector, holding 2 values of the same type. #[derive(Debug, Copy, Clone, PartialEq, Default)] pub struct Vec2<T> { pub x: T, pub y: T, } impl<T> Vec2<T> { /// Returns a vector_t value made from the given arguments. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let mut v = Vec2::new("element1", "element2"); /// ``` pub const fn new(x: T, y: T) -> Self { Self { x, y } } /// Maps the components of the vector into another type. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v1 = Vec2::new(1, 2); /// let v2 = v1.map(|i| i + 2); /// assert_eq!(v2, Vec2::new(3, 4)); /// ``` pub fn map<B>(self, f: impl Fn(T) -> B) -> Vec2<B> { Vec2::new(f(self.x), f(self.y)) } } impl<T> From<Vec2<T>> for (T, T) { /// Unzip the vector into a tuple. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v: (i32, i32) = Vec2::new(1, 2).into(); /// assert_eq!(v, (1, 2)); /// ``` fn from(v: Vec2<T>) -> Self { (v.x, v.y) } } impl<T> From<(T, T)> for Vec2<T> { /// Zip the tuple into a vector. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v: Vec2<i32> = (1,2).into(); /// assert_eq!(v, Vec2::new(1, 2)); /// ``` fn from(t: (T, T)) -> Self { Vec2::new(t.0, t.1) } } impl<T: Add<Output = T>> Add for Vec2<T> { type Output = Self; /// Adds two vectors. /// Performs the usual componentwise vector sum. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v1 = Vec2::new(1, 2); /// let v2 = Vec2::new(3, 4); /// let v = v1 + v2; /// assert_eq!(v, Vec2::new(4,6)); /// ``` fn add(self, rhs: Self) -> Self::Output { Self::new(self.x + rhs.x, self.y + rhs.y) } } impl<T: AddAssign> AddAssign for Vec2<T> { /// Adds two vectors. /// Performs the usual componentwise vector sum. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let mut v1 = Vec2::new(1, 2); /// let v2 = Vec2::new(3, 4); /// v1 += v2; /// assert_eq!(v1, Vec2::new(4,6)); /// ``` fn add_assign(&mut self, rhs: Self) { self.x += rhs.x; self.y += rhs.y; } } impl<T: Sub<Output = T>> Sub for Vec2<T> { type Output = Self; /// Subtracts two vectors. /// Performs the usual componentwise vector difference. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v1 = Vec2::new(1, 2); /// let v2 = Vec2::new(3, 4); /// let v = v1 - v2; /// assert_eq!(v, Vec2::new(-2,-2)); /// ``` fn sub(self, rhs: Self) -> Self::Output { Self::new(self.x - rhs.x, self.y - rhs.y) } } impl<T: SubAssign> SubAssign for Vec2<T> { /// Subtracts two vectors. /// Performs the usual componentwise vector difference. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let mut v1 = Vec2::new(1, 2); /// let v2 = Vec2::new(3, 4); /// v1 -= v2; /// assert_eq!(v1, Vec2::new(-2,-2)); /// ``` fn sub_assign(&mut self, rhs: Self) { self.x -= rhs.x; self.y -= rhs.y; } } impl<T: Mul<Output = T> + Copy> Mul<T> for Vec2<T> { type Output = Self; /// Multiplies a vector by a scalar. /// Performs the usual componentwise product. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v1 = Vec2::new(1, 2); /// let v = v1 * 3; /// assert_eq!(v, Vec2::new(3, 6)); /// ``` fn mul(self, rhs: T) -> Self::Output { self.map(|n| n * rhs) } } impl<T: MulAssign + Copy> MulAssign<T> for Vec2<T> { /// Multiplies a vector by a scalar. /// Performs the usual componentwise product. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let mut v = Vec2::new(1, 2); /// v *= 3; /// assert_eq!(v, Vec2::new(3, 6)); /// ``` fn mul_assign(&mut self, rhs: T) { self.x *= rhs; self.y *= rhs; } } impl<T: Neg<Output = T>> Neg for Vec2<T> { type Output = Self; /// Computes the additive inverse a vector. /// This is done by using componentwise negation. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v = Vec2::new(1, 2); /// assert_eq!(-v, Vec2::new(-1, -2)); /// ``` fn neg(self) -> Self::Output { self.map(|n| -n) } } impl<T: Neg<Output = T>> Vec2<T> { /// Returns a perpendicular vector to the input vector. /// /// # Example /// ``` /// use cs3_physics::vector::Vec2; /// let v = Vec2::new(1.0, 2.0); /// assert_eq!(v.dot(v.perp()), 0.0); /// ``` pub fn perp(self) -> Vec2<T> { Self::new(self.y, -self.x) } } /// A real-valued 2-dimensional vector. /// /// Positive x is towards the right; positive y is towards the top. pub type Vec2f = Vec2<f64>; impl Mul<Vec2f> for f64 { type Output = Vec2f; /// Multiplies a vector by a scalar. /// Performs the usual componentwise product. fn mul(self, rhs: Vec2f) -> Self::Output { Vec2::new(self * rhs.x, self * rhs.y) } } impl Div<f64> for Vec2f { type Output = Self; /// Divides a vector by a scalar. /// Performs the usual componentwise quotient. fn div(self, rhs: f64) -> Self::Output { self.map(|n| n / rhs) } } impl DivAssign<f64> for Vec2f { /// Divides a vector by a scalar. /// Performs the usual componentwise quotient. fn div_assign(&mut self, rhs: f64) { self.x /= rhs; self.y /= rhs; } } impl Vec2f { /// The zero vector, i.e. (0, 0). pub const ZERO: Self = Self::new(0.0, 0.0); /// Rotates a vector by an angle around (0, 0). /// The angle is given in radians. /// Positive angles are counterclockwise, according to the right hand rule. /// /// See https://en.wikipedia.org/wiki/Rotation_matrix. /// (You can derive this matrix by noticing that rotation by a fixed angle /// is linear and then computing what it does to (1, 0) and (0, 1).) pub fn rotate(self, angle: f64) -> Self { Self::new( self.x * angle.cos() - self.y * angle.sin(), self.x * angle.sin() + self.y * angle.cos(), ) } pub fn rotate_relative(self, angle: f64, center: Self) -> Self { return (self - center).rotate(angle) + center; } /// Returns a velocity vector after a given acceleration has been applied. /// This is essentially the formula v(t) = v_0 + at. pub fn tick(self, acc: Self, dt: f64) -> Self { return self + dt * acc; } /// Computes the dot product of two vectors. /// See https://en.wikipedia.org/wiki/Dot_product#Algebraic_definition. pub fn dot(self, rhs: Self) -> f64 { self.x * rhs.x + self.y * rhs.y } /// Computes the cross product of two vectors, which lies along the z-axis. /// /// See https://en.wikipedia.org/wiki/Cross_product#Computing_the_cross_product. pub fn cross(self, rhs: Self) -> f64 { self.x * rhs.y - rhs.x * self.y } /// Returns the magnitude of the vector. pub fn norm(self) -> f64 { (self.x.powi(2) + self.y.powi(2)).sqrt() } /// Returns a normalized vector (same direction, but magnitude is 1). pub fn normalize(self) -> Self { self / self.norm() } }
use std::{error, fmt}; #[derive(Copy, Clone, Debug)] pub enum Error { Init, Start, Stop, Cleanup, Task, } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(f, "Error: {:?}.", self) } } impl error::Error for Error { fn description(&self) -> &str { match self { Error::Init => "Bela_initAudio error", Error::Start => "Bela_startAudio error", Error::Stop => "Bela_stopAudio error", Error::Cleanup => "Bela_cleanupAudio error", Error::Task => "Bela_scheduleAuxiliaryTask error", } } }
use super::event::{new_data_event, new_fin_event, new_window_update_event, Event}; use super::message::ConnectRequest; use bytes::BytesMut; use std::pin::Pin; use std::sync::atomic::{AtomicBool, AtomicI32, AtomicU32, Ordering}; use std::sync::Arc; use std::sync::Mutex; use std::task::{Context, Poll, Waker}; use std::time::Instant; use tokio::io::AsyncRead; use tokio::io::AsyncWrite; use tokio::sync::mpsc; use crate::channel::ChannelStream; use crate::utils::{clear_unbounded_channel, fill_read_buf, make_io_error}; const MIN_REPORT_RECV_SIZE: i32 = 32 * 1024; const RELAY_BUF_FACTOR: usize = 4; // static READER_COUNT: AtomicU32 = AtomicU32::new(0); // static WRITER_COUNT: AtomicU32 = AtomicU32::new(0); // static STREAM_COUNT: AtomicU32 = AtomicU32::new(0); pub struct MuxStreamState { pub channel: String, pub session_id: u32, pub stream_id: u32, pub send_buf_window: AtomicI32, pub recv_buf_size: AtomicI32, pub closed: AtomicBool, pub total_recv_bytes: AtomicU32, pub total_send_bytes: AtomicU32, pub born_time: Instant, relay_buf_size: usize, } struct SharedIOState { waker: Option<Waker>, data_tx: Option<mpsc::UnboundedSender<Vec<u8>>>, data_rx: Option<mpsc::UnboundedReceiver<Vec<u8>>>, } impl SharedIOState { fn try_close(&mut self) { if let Some(tx) = &mut self.data_tx { let empty = Vec::new(); //let _ = tx.clone().try_send(empty); let _ = tx.clone().send(empty); } if let Some(waker) = self.waker.take() { waker.wake() } } } impl MuxStreamState { fn close(&self) { self.closed.store(true, Ordering::SeqCst); } } struct MuxStreamReader { tx: mpsc::Sender<Event>, rx: mpsc::UnboundedReceiver<Vec<u8>>, recv_buf: BytesMut, state: Arc<MuxStreamState>, } impl MuxStreamReader {} fn inc_recv_buf_window( tx: &mut mpsc::Sender<Event>, state: &MuxStreamState, inc: usize, cx: &mut Context<'_>, ) -> Poll<std::io::Result<()>> { state.recv_buf_size.fetch_add(inc as i32, Ordering::SeqCst); state .total_recv_bytes .fetch_add(inc as u32, Ordering::SeqCst); let current_recv_buf_size = state.recv_buf_size.load(Ordering::SeqCst); let mut min_report_window: i32 = (state.relay_buf_size * RELAY_BUF_FACTOR) as i32; if min_report_window > MIN_REPORT_RECV_SIZE { min_report_window = MIN_REPORT_RECV_SIZE; } if current_recv_buf_size >= min_report_window { let ev = new_window_update_event(state.stream_id, current_recv_buf_size as u32, false); match tx.poll_ready(cx) { Poll::Ready(Ok(())) => {} Poll::Pending => return Poll::Pending, Poll::Ready(Err(e)) => { return Poll::Ready(Err(make_io_error(&e.to_string()))); } } match tx.try_send(ev) { Err(e) => Poll::Ready(Err(make_io_error(&e.to_string()))), Ok(()) => { state .recv_buf_size .fetch_sub(current_recv_buf_size, Ordering::SeqCst); Poll::Ready(Ok(())) } } } else { Poll::Ready(Ok(())) } } impl Drop for MuxStreamReader { fn drop(&mut self) { clear_unbounded_channel(&mut self.rx); // READER_COUNT.fetch_sub(1, Ordering::SeqCst); // info!( // "Drop reader, READER:{}, WRITER:{}", // READER_COUNT.load(Ordering::SeqCst), // WRITER_COUNT.load(Ordering::SeqCst), // ); } } impl AsyncRead for MuxStreamReader { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut [u8], ) -> Poll<std::io::Result<usize>> { let Self { tx, rx, recv_buf, state, } = &mut *self; if state.closed.load(Ordering::SeqCst) { clear_unbounded_channel(rx); return Poll::Ready(Err(make_io_error("closed"))); } let n = if !recv_buf.is_empty() { fill_read_buf(recv_buf, buf) } else { 0 }; match inc_recv_buf_window(tx, &state, n, cx) { Poll::Pending => return Poll::Pending, Poll::Ready(Ok(())) => { if n > 0 { return Poll::Ready(Ok(n)); } } Poll::Ready(Err(e)) => { return Poll::Ready(Err(e)); } } recv_buf.clear(); match rx.poll_recv(cx) { Poll::Ready(Some(b)) => { let mut copy_n = b.len(); if 0 == copy_n { //close //error!("[{}]####2 Close", state.stream_id); state.close(); //rx.close(); clear_unbounded_channel(rx); return Poll::Ready(Ok(0)); } if copy_n > buf.len() { copy_n = buf.len(); } match inc_recv_buf_window(tx, &state, copy_n, cx) { Poll::Pending => return Poll::Pending, Poll::Ready(Ok(())) => {} Poll::Ready(Err(e)) => { return Poll::Ready(Err(e)); } } //info!("[{}]tx ready {} ", state.stream_id, copy_n); buf[0..copy_n].copy_from_slice(&b[0..copy_n]); if copy_n < b.len() { recv_buf.extend_from_slice(&b[copy_n..]); } Poll::Ready(Ok(copy_n)) } Poll::Ready(None) => { //error!("[{}]####3 Close", state.stream_id); state.close(); clear_unbounded_channel(rx); Poll::Ready(Ok(0)) } Poll::Pending => Poll::Pending, } } } struct MuxStreamWriter { tx: mpsc::Sender<Event>, state: Arc<MuxStreamState>, io_state: Arc<Mutex<SharedIOState>>, } impl Drop for MuxStreamWriter { fn drop(&mut self) { // WRITER_COUNT.fetch_sub(1, Ordering::SeqCst); // info!( // "Drop writer, READER:{}, WRITER:{}", // READER_COUNT.load(Ordering::SeqCst), // WRITER_COUNT.load(Ordering::SeqCst), // ); } } impl AsyncWrite for MuxStreamWriter { fn poll_write( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &[u8], ) -> Poll<Result<usize, std::io::Error>> { let Self { tx, state, io_state, } = &mut *self; if state.closed.load(Ordering::SeqCst) { io_state.lock().unwrap().try_close(); return Poll::Ready(Err(make_io_error("closed"))); } if state.send_buf_window.load(Ordering::SeqCst) < 0 { io_state.lock().unwrap().waker = Some(cx.waker().clone()); return Poll::Pending; } let ev = new_data_event(state.stream_id, buf, false); // let future = tx.send(ev); // pin_mut!(future); // match future.as_mut().poll(cx) { // Poll::Pending => Poll::Pending, // Poll::Ready(Err(e)) => Poll::Ready(Err(make_io_error(e.description()))), // Poll::Ready(Ok(())) => { // state // .send_buf_window // .fetch_sub(buf.len() as i32, Ordering::SeqCst); // state // .total_send_bytes // .fetch_add(buf.len() as u32, Ordering::SeqCst); // Poll::Ready(Ok(buf.len())) // } // } match tx.poll_ready(cx) { Poll::Pending => return Poll::Pending, Poll::Ready(Err(e)) => { io_state.lock().unwrap().try_close(); return Poll::Ready(Err(make_io_error(&e.to_string()))); } Poll::Ready(Ok(())) => {} } match tx.try_send(ev) { Err(e) => { io_state.lock().unwrap().try_close(); Poll::Ready(Err(make_io_error(&e.to_string()))) } Ok(()) => { state .send_buf_window .fetch_sub(buf.len() as i32, Ordering::SeqCst); state .total_send_bytes .fetch_add(buf.len() as u32, Ordering::SeqCst); Poll::Ready(Ok(buf.len())) } } } fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), std::io::Error>> { Poll::Ready(Ok(())) } fn poll_shutdown( self: Pin<&mut Self>, _cx: &mut Context<'_>, ) -> Poll<Result<(), std::io::Error>> { self.state.closed.store(true, Ordering::SeqCst); self.io_state.lock().unwrap().try_close(); Poll::Ready(Ok(())) } } pub struct MuxStream { pub target: ConnectRequest, event_tx: mpsc::Sender<Event>, pub data_tx: Option<mpsc::UnboundedSender<Vec<u8>>>, pub state: Arc<MuxStreamState>, io_state: Arc<Mutex<SharedIOState>>, } impl Drop for MuxStream { fn drop(&mut self) { // STREAM_COUNT.fetch_sub(1, Ordering::SeqCst); // info!("Drop stream {}", STREAM_COUNT.load(Ordering::SeqCst),); } } impl MuxStream { pub fn new( name: &str, id0: u32, id1: u32, evtx: mpsc::Sender<Event>, target: ConnectRequest, relay_buf_size: usize, ) -> Self { let state = MuxStreamState { channel: String::from(name), session_id: id0, stream_id: id1, send_buf_window: AtomicI32::new((relay_buf_size * RELAY_BUF_FACTOR) as i32), recv_buf_size: AtomicI32::new(0), closed: AtomicBool::new(false), total_recv_bytes: AtomicU32::new(0), total_send_bytes: AtomicU32::new(0), born_time: Instant::now(), relay_buf_size, }; let (dtx, drx) = mpsc::unbounded_channel(); let io_state = SharedIOState { waker: None, data_tx: Some(dtx), data_rx: Some(drx), }; // STREAM_COUNT.fetch_add(1, Ordering::SeqCst); // info!("New stream {}", STREAM_COUNT.load(Ordering::SeqCst),); Self { target, event_tx: evtx, data_tx: None, state: Arc::new(state), io_state: Arc::new(Mutex::new(io_state)), } } pub fn id(&self) -> u32 { self.state.stream_id } pub fn relay_buf_size(&self) -> usize { self.state.relay_buf_size } fn check_data_tx(&mut self) { if self.data_tx.is_some() { return; } if let Some(tx) = self.io_state.lock().unwrap().data_tx.take() { self.data_tx = Some(tx); } } pub fn update_send_window(&self, inc: u32) { self.state .send_buf_window .fetch_add(inc as i32, Ordering::SeqCst); if self.state.send_buf_window.load(Ordering::SeqCst) > 0 { if let Some(waker) = self.io_state.lock().unwrap().waker.take() { waker.wake() } } } pub async fn offer_data(&mut self, data: Vec<u8>) { self.check_data_tx(); if self.state.closed.load(Ordering::SeqCst) { error!( "[{}]Already closed for data len:{}.", self.state.stream_id, data.len() ); return; } //error!("[{}]off data len:{}.", self.state.stream_id, data.len()); assert!(!data.is_empty()); if let Some(tx) = &mut self.data_tx { //let _ = tx.send(data).await; let _ = tx.send(data); } else { //error!("[{}]Non recv rx for data.", self.state.stream_id); } } pub fn clone(&self) -> Self { let mut v = Self { target: self.target.clone(), event_tx: self.event_tx.clone(), data_tx: None, state: self.state.clone(), io_state: self.io_state.clone(), }; // STREAM_COUNT.fetch_add(1, Ordering::SeqCst); // info!("Clone stream {}", STREAM_COUNT.load(Ordering::SeqCst),); if let Some(tx) = &self.data_tx { v.data_tx = Some(tx.clone()); } v } } impl ChannelStream for MuxStream { fn split( &mut self, ) -> ( Box<dyn AsyncRead + Send + Unpin + '_>, Box<dyn AsyncWrite + Send + Unpin + '_>, ) { //let (dtx, drx) = mpsc::channel(16); let r = MuxStreamReader { tx: self.event_tx.clone(), rx: self.io_state.lock().unwrap().data_rx.take().unwrap(), recv_buf: BytesMut::new(), state: self.state.clone(), }; let w = MuxStreamWriter { tx: self.event_tx.clone(), state: self.state.clone(), io_state: self.io_state.clone(), }; // READER_COUNT.fetch_add(1, Ordering::SeqCst); // WRITER_COUNT.fetch_add(1, Ordering::SeqCst); // info!( // "Create READER:{}, WRITER:{}", // READER_COUNT.load(Ordering::SeqCst), // WRITER_COUNT.load(Ordering::SeqCst), // ); //error!("[{}]split.", self.state.stream_id); (Box::new(r), Box::new(w)) } fn close(&mut self) -> std::io::Result<()> { //error!("[{}]####1 Close", self.state.stream_id); self.state.close(); if let Some(tx) = &self.data_tx { let empty = Vec::new(); //let _ = tx.clone().try_send(empty); let _ = tx.clone().send(empty); } self.io_state.lock().unwrap().try_close(); let fin = new_fin_event(self.state.stream_id, false); let _ = self.event_tx.try_send(fin); Ok(()) } }
use crate::error::RPCError; use ckb_dao::DaoCalculator; use ckb_fee_estimator::MAX_CONFIRM_BLOCKS; use ckb_jsonrpc_types::{ Capacity, DryRunResult, EstimateResult, OutPoint, Script, Transaction, Uint64, }; use ckb_logger::error; use ckb_shared::{shared::Shared, Snapshot}; use ckb_store::ChainStore; use ckb_types::{ core::cell::{resolve_transaction, CellProvider, CellStatus, HeaderChecker}, packed, prelude::*, H256, }; use ckb_verification::ScriptVerifier; use jsonrpc_core::{Error, Result}; use jsonrpc_derive::rpc; use std::collections::HashSet; #[rpc(server)] pub trait ExperimentRpc { #[rpc(name = "_compute_transaction_hash")] fn compute_transaction_hash(&self, tx: Transaction) -> Result<H256>; #[rpc(name = "_compute_script_hash")] fn compute_script_hash(&self, script: Script) -> Result<H256>; #[rpc(name = "dry_run_transaction")] fn dry_run_transaction(&self, _tx: Transaction) -> Result<DryRunResult>; // Calculate the maximum withdraw one can get, given a referenced DAO cell, // and a withdraw block hash #[rpc(name = "calculate_dao_maximum_withdraw")] fn calculate_dao_maximum_withdraw(&self, _out_point: OutPoint, _hash: H256) -> Result<Capacity>; // Estimate fee #[rpc(name = "estimate_fee_rate")] fn estimate_fee_rate(&self, expect_confirm_blocks: Uint64) -> Result<EstimateResult>; } pub(crate) struct ExperimentRpcImpl { pub shared: Shared, } impl ExperimentRpc for ExperimentRpcImpl { fn compute_transaction_hash(&self, tx: Transaction) -> Result<H256> { let tx: packed::Transaction = tx.into(); Ok(tx.calc_tx_hash().unpack()) } fn compute_script_hash(&self, script: Script) -> Result<H256> { let script: packed::Script = script.into(); Ok(script.calc_script_hash().unpack()) } fn dry_run_transaction(&self, tx: Transaction) -> Result<DryRunResult> { let tx: packed::Transaction = tx.into(); DryRunner::new(&self.shared).run(tx) } fn calculate_dao_maximum_withdraw(&self, out_point: OutPoint, hash: H256) -> Result<Capacity> { let snapshot: &Snapshot = &self.shared.snapshot(); let consensus = snapshot.consensus(); let calculator = DaoCalculator::new(consensus, snapshot); match calculator.maximum_withdraw(&out_point.into(), &hash.pack()) { Ok(capacity) => Ok(capacity.into()), Err(err) => Err(RPCError::custom(RPCError::Invalid, format!("{:#}", err))), } } fn estimate_fee_rate(&self, expect_confirm_blocks: Uint64) -> Result<EstimateResult> { let expect_confirm_blocks = expect_confirm_blocks.value() as usize; // A tx need 1 block to propose, then 2 block to get confirmed // so at least confirm blocks is 3 blocks. if expect_confirm_blocks < 3 || expect_confirm_blocks > MAX_CONFIRM_BLOCKS { return Err(RPCError::custom( RPCError::Invalid, format!( "expect_confirm_blocks should between 3 and {}, got {}", MAX_CONFIRM_BLOCKS, expect_confirm_blocks ), )); } let tx_pool = self.shared.tx_pool_controller(); let fee_rate = tx_pool.estimate_fee_rate(expect_confirm_blocks); if let Err(e) = fee_rate { error!("send estimate_fee_rate request error {}", e); return Err(Error::internal_error()); }; let fee_rate = fee_rate.unwrap(); if fee_rate.as_u64() == 0 { return Err(RPCError::custom( RPCError::Invalid, "collected samples is not enough, please make sure node has peers and try later" .into(), )); } Ok(EstimateResult { fee_rate: fee_rate.as_u64().into(), }) } } // DryRunner dry run given transaction, and return the result, including execution cycles. pub(crate) struct DryRunner<'a> { shared: &'a Shared, } impl<'a> CellProvider for DryRunner<'a> { fn cell(&self, out_point: &packed::OutPoint, with_data: bool) -> CellStatus { let snapshot = self.shared.snapshot(); snapshot .get_cell_meta(&out_point.tx_hash(), out_point.index().unpack()) .map(|mut cell_meta| { if with_data { cell_meta.mem_cell_data = snapshot .get_cell_data(&out_point.tx_hash(), out_point.index().unpack()); } CellStatus::live_cell(cell_meta) }) // treat as live cell, regardless of live or dead .unwrap_or(CellStatus::Unknown) } } impl<'a> HeaderChecker for DryRunner<'a> { fn check_valid( &self, block_hash: &packed::Byte32, ) -> std::result::Result<(), ckb_error::Error> { self.shared.snapshot().check_valid(block_hash) } } impl<'a> DryRunner<'a> { pub(crate) fn new(shared: &'a Shared) -> Self { Self { shared } } pub(crate) fn run(&self, tx: packed::Transaction) -> Result<DryRunResult> { let snapshot: &Snapshot = &self.shared.snapshot(); match resolve_transaction(tx.into_view(), &mut HashSet::new(), self, self) { Ok(resolved) => { let consensus = snapshot.consensus(); let max_cycles = consensus.max_block_cycles; match ScriptVerifier::new(&resolved, snapshot).verify(max_cycles) { Ok(cycles) => Ok(DryRunResult { cycles: cycles.into(), }), Err(err) => Err(RPCError::custom(RPCError::Invalid, format!("{:?}", err))), } } Err(err) => Err(RPCError::custom(RPCError::Invalid, format!("{:?}", err))), } } }
use std::fmt; #[derive(Clone)] pub struct Password(String); impl fmt::Debug for Password { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Password(..)") } } impl Password { pub fn new(inner: String) -> Self { Password(inner) } pub unsafe fn take(self) -> String { self.0 } }
//! Handles the multiboot2 information structure. mod boot_loader_name; mod framebuffer_info; pub use self::boot_loader_name::get_bootloader_name; pub use self::framebuffer_info::get_vga_info; /// Represents a tag in the information structure. #[repr(C)] struct BasicTag { tag_type: u32, size: u32 } /// Represents an iterator for the tags. struct BasicTagIterator { current_address: usize } impl BasicTagIterator { /// Returns a new iterator for the tags. fn new() -> BasicTagIterator { unsafe { BasicTagIterator { current_address: STRUCT_BASE_ADDRESS + 8 } } } } /// The base address for the information structure. // this will only be valid after init was called and will never be changed // afterwards static mut STRUCT_BASE_ADDRESS: usize = 0; impl Iterator for BasicTagIterator { type Item = *const BasicTag; fn next(&mut self) -> Option<*const BasicTag> { let current_tag = unsafe { &*(self.current_address as *const BasicTag) }; if current_tag.tag_type == 0 && current_tag.size == 8 { None } else { let last_address = self.current_address; self.current_address += current_tag.size as usize; self.current_address += if self.current_address % 8 == 0 { 0 } else { 8 - (self.current_address % 8) }; Some(last_address as *const BasicTag) } } } /// Initializes the multiboot2 module. pub fn init(information_structure_address: usize) { assert_has_not_been_called!("The multiboot2 module should only be initialized once."); assert!(check_validity(information_structure_address)); unsafe { STRUCT_BASE_ADDRESS = information_structure_address }; } /// Checks if the passed information structure is valid. fn check_validity(information_structure_address: usize) -> bool { let total_size: u32 = unsafe { *(information_structure_address as *const u32) }; let end_tag_type: u32 = unsafe { *((information_structure_address + total_size as usize - 8) as *const u32) }; let end_tag_size: u32 = unsafe { *((information_structure_address + total_size as usize - 4) as *const u32) }; end_tag_type == 0 && end_tag_size == 8 } /// Returns the tag that corresponds to the given number. fn get_tag(tag_type: u32) -> Option<*const BasicTag> { unsafe { BasicTagIterator::new().find(|tag| (**tag).tag_type == tag_type) } }
//! `synmap` provides utilities for parsing multi-file crates into `syn` AST //! nodes, and resolving the spans attached to those nodes into raw source text, //! and line/column information. //! //! The primary entry point for the crate is the `SourceMap` type, which stores //! mappings from byte offsets to file information, along with cached file //! information. extern crate cpp_syn as syn; extern crate memchr; use std::fmt::{self, Write}; use std::error; use std::fs::{self, File}; use std::io::prelude::*; use std::io::{self, Error, ErrorKind}; use std::path::{Path, PathBuf}; use syn::{Attribute, Crate, Ident, Item, ItemKind, Lit, LitKind, MetaItem, Span}; use syn::fold::{self, Folder}; use std::mem; /// This constant controls the amount of padding which is created between /// consecutive files' span ranges. It is non-zero to ensure that the low byte /// offset of one file is not equal to the high byte offset of the previous /// file. const FILE_PADDING_BYTES: usize = 1; /// Information regarding the on-disk location of a span of code. /// This type is produced by `SourceMap::locinfo`. #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] pub struct LocInfo<'a> { pub path: &'a Path, pub line: usize, pub col: usize, } impl<'a> fmt::Display for LocInfo<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}:{}:{}", self.path.display(), self.line, self.col) } } #[derive(Debug)] struct FileInfo { span: Span, path: PathBuf, src: String, lines: Vec<usize>, } /// NOTE: This produces line and column. Line is 1-indexed, column is 0-indexed fn offset_line_col(lines: &Vec<usize>, off: usize) -> (usize, usize) { match lines.binary_search(&off) { Ok(found) => (found + 1, 0), Err(idx) => (idx, off - lines[idx - 1]), } } fn lines_offsets(s: &[u8]) -> Vec<usize> { let mut lines = vec![0]; let mut prev = 0; while let Some(len) = memchr::memchr(b'\n', &s[prev..]) { prev += len + 1; lines.push(prev); } lines } /// The `SourceMap` is the primary entry point for `synmap`. It maintains a /// mapping between `Span` objects and the original source files they were /// parsed from. #[derive(Debug)] pub struct SourceMap { files: Vec<FileInfo>, offset: usize, } impl SourceMap { /// Create a new `SourceMap` object with no files inside of it. pub fn new() -> SourceMap { SourceMap { files: Vec::new(), offset: 0, } } /// Read and parse the passed-in file path as a crate root, recursively /// parsing each of the submodules. Produces a syn `Crate` object with /// all submodules inlined. /// /// `Span` objects inside the resulting crate object are `SourceMap` /// relative, and should be interpreted by passing to the other methods on /// this type, such as `locinfo`, `source_text`, or `filename`. pub fn add_crate_root<P: AsRef<Path>>(&mut self, path: P) -> io::Result<Crate> { self.parse_canonical_file(fs::canonicalize(path)?) } /// This is an internal method which requires a canonical pathbuf as /// returned from a method like `fs::canonicalize`. fn parse_canonical_file(&mut self, path: PathBuf) -> io::Result<Crate> { // Parse the crate with syn let mut source = String::new(); File::open(&path)?.read_to_string(&mut source)?; let krate = syn::parse_crate(&source).map_err(|e| Error::new(ErrorKind::InvalidData, e))?; let parent = path.parent() .ok_or(Error::new( ErrorKind::InvalidInput, "cannot parse file without parent directory", ))? .to_path_buf(); // Register the read-in file in the SourceMap let offset = self.offset; self.offset += source.len() + FILE_PADDING_BYTES; self.files.push(FileInfo { span: Span { lo: offset, hi: offset + source.len(), }, path: path, lines: lines_offsets(source.as_bytes()), src: source, }); // Walk the parsed Crate object, recursively filling in the bodies of // `mod` statements, and rewriting spans to be SourceMap-relative // instead of file-relative. let idx = self.files.len() - 1; let mut walker = Walker { idx: idx, error: None, sm: self, parent: parent, }; let krate = walker.fold_crate(krate); if let Some(err) = walker.error { return Err(err); } Ok(krate) } fn local_fileinfo(&self, mut span: Span) -> io::Result<(&FileInfo, Span)> { if span.lo > span.hi { return Err(Error::new( ErrorKind::InvalidInput, "Invalid span object with negative length", )); } for fi in &self.files { if span.lo >= fi.span.lo && span.lo <= fi.span.hi && span.hi >= fi.span.lo && span.hi <= fi.span.hi { // Remove the offset span.lo -= fi.span.lo; span.hi -= fi.span.lo; // Set the path return Ok((fi, span)); } } Err(Error::new( ErrorKind::InvalidInput, "Span is not part of any input file", )) } /// Get the filename which contains the given span. /// /// Fails if the span is invalid or spans multiple source files. pub fn filename(&self, span: Span) -> io::Result<&Path> { Ok(&self.local_fileinfo(span)?.0.path) } /// Get the source text for the passed-in span. /// /// Fails if the span is invalid or spans multiple source files. pub fn source_text(&self, span: Span) -> io::Result<&str> { let (fi, span) = self.local_fileinfo(span)?; Ok(&fi.src[span.lo..span.hi]) } /// Get a LocInfo object for the passed-in span, containing line, column, /// and file name information for the beginning and end of the span. The /// `path` field in the returned LocInfo struct will be a reference to a /// canonical path. /// /// Fails if the span is invalid or spans multiple source files. pub fn locinfo(&self, span: Span) -> io::Result<LocInfo> { let (fi, span) = self.local_fileinfo(span)?; let (line, col) = offset_line_col(&fi.lines, span.lo); Ok(LocInfo { path: &fi.path, line: line, col: col, }) } } struct Walker<'a> { idx: usize, error: Option<Error>, sm: &'a mut SourceMap, parent: PathBuf, } impl<'a> Walker<'a> { fn read_submodule(&mut self, path: PathBuf) -> io::Result<Crate> { let faux_crate = self.sm.parse_canonical_file(path)?; if faux_crate.shebang.is_some() { return Err(Error::new( ErrorKind::InvalidData, "Only the root file of a crate may contain shebangs", )); } Ok(faux_crate) } fn get_attrs_items(&mut self, attrs: &[Attribute], ident: &Ident) -> io::Result<Crate> { // Determine the path of the inner module's file for attr in attrs { match attr.value { MetaItem::NameValue( ref id, Lit { node: LitKind::Str(ref s, _), .. }, ) => if id.as_ref() == "path" { let explicit = self.parent.join(&s[..]); return self.read_submodule(explicit); }, _ => {} } } let mut subdir = self.parent.join(ident.as_ref()); subdir.push("mod.rs"); if subdir.is_file() { return self.read_submodule(subdir); } let adjacent = self.parent.join(&format!("{}.rs", ident)); if adjacent.is_file() { return self.read_submodule(adjacent); } Err(Error::new( ErrorKind::NotFound, format!("No file with module definition for `mod {}`", ident), )) } } impl<'a> Folder for Walker<'a> { fn fold_item(&mut self, mut item: Item) -> Item { if self.error.is_some() { return item; // Early return to avoid extra work when erroring } match item.node { ItemKind::Mod(None) => { let (attrs, items) = match self.get_attrs_items(&item.attrs, &item.ident) { Ok(Crate { attrs, items, .. }) => (attrs, items), Err(e) => { // Get the file, line, and column information for the // mod statement we're looking at. let span = self.fold_span(item.span); let loc = match self.sm.locinfo(span) { Ok(li) => li.to_string(), Err(_) => "unknown location".to_owned(), }; let e = Error::new( ErrorKind::Other, ModParseErr { err: e, msg: format!( "Error while parsing `mod {}` \ statement at {}", item.ident, loc ), }, ); self.error = Some(e); return item; } }; item.attrs.extend_from_slice(&attrs); item.node = ItemKind::Mod(Some(items)); item } ItemKind::Mod(Some(items)) => { let mut parent = self.parent.join(item.ident.as_ref()); mem::swap(&mut self.parent, &mut parent); let items = items.into_iter().map(|item| self.fold_item(item)).collect(); mem::swap(&mut self.parent, &mut parent); item.node = ItemKind::Mod(Some(items)); item } _ => fold::noop_fold_item(self, item), } } fn fold_span(&mut self, span: Span) -> Span { let offset = self.sm.files[self.idx].span.lo; Span { lo: span.lo + offset, hi: span.hi + offset, } } } /// This is an internal error which is used to build errors when parsing an /// inner module fails. #[derive(Debug)] struct ModParseErr { err: Error, msg: String, } impl error::Error for ModParseErr { fn description(&self) -> &str { &self.msg } fn cause(&self) -> Option<&error::Error> { Some(&self.err) } } impl fmt::Display for ModParseErr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&self.msg, f)?; f.write_char('\n')?; fmt::Display::fmt(&self.err, f) } }
// https://adventofcode.com/2017/day/13 use std::io::{BufRead, BufReader}; use std::fs::File; fn main() { // Read input to a map of name -> connections let f = BufReader::new(File::open("input.txt").expect("Opening input.txt failed")); let mut layers = Vec::new(); for line in f.lines() { let raw_line = line.expect("Reading line failed"); let split: Vec<&str> = raw_line.split(": ").collect(); let layer_num = split[0].parse::<usize>().unwrap(); let layer_depth = split[1].parse::<usize>().unwrap(); while layers.len() < layer_num { layers.push(0); } layers.push(layer_depth); } let layers = layers; let severity = check_severity(0, &layers); // Assert to facilitate further tweaks assert_eq!(1960, severity); println!("Severity of the straight trip is {}", severity); let mut delay = 0; while !check_success(delay, &layers) { delay += 1; } // Assert to facilitate further tweaks assert_eq!(3903378, delay); println!("Shortest delay to get through unscathed is {} ps", delay); } fn check_severity(delay: usize, layers: &Vec<usize>) -> usize { let mut severity = 0; for (i, &depth) in layers.iter().enumerate() { if depth > 0 { // Check if scanner is at 0 let scanner_cycle = (i + delay) % (2 * depth - 2); if scanner_cycle == 0 { severity += i * depth; } } } severity } fn check_success(delay: usize, layers: &Vec<usize>) -> bool { for (i, &depth) in layers.iter().enumerate() { if depth > 0 { // Check if scanner is at 0 let scanner_cycle = (i + delay) % (2 * depth - 2); if scanner_cycle == 0 { return false; } } } true }
// By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13, we can see that the 6th prime is 13. // // What is the 10 001st prime number? fn main() { let limit = 10001; let mut values: Vec<i64> = Vec::new(); let mut i = 1; while values.len() <= limit { if is_prime(&i) { values.push(i); } i += 1; } println!("Solution: {}", values.pop().unwrap()); } fn is_prime(val: &i64) -> (bool) { for i in 2..*val { if val % i == 0 && *val != i { return false; } } return true; }
// This file is part of Substrate. // Copyright (C) 2018-2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Added as part the code review and testing // by ChainSafe Systems Aug 2021 use crate as xx_economics; use crate::*; use frame_support::{ parameter_types, ord_parameter_types, traits::{ OnUnbalanced, }, weights::constants::RocksDbWeight, }; use frame_system::{EnsureSignedBy}; use pallet_staking::{EraIndex}; use sp_runtime::{ testing::{Header, TestXt, H256}, traits::{IdentityLookup, Zero}, Perbill, }; /// The AccountId alias in this test module. pub(crate) type AccountId = u64; pub(crate) type AccountIndex = u64; pub(crate) type BlockNumber = u64; pub(crate) type Balance = u128; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>; type Block = frame_system::mocking::MockBlock<Test>; frame_support::construct_runtime!( pub enum Test where Block = Block, NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Config, Storage, Event<T>}, Balances: pallet_balances::{Pallet, Call, Storage, Config<T>, Event<T>}, XXEconomics: xx_economics::{Pallet, Call, Storage, Event<T>, Config<T>}, } ); parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = frame_system::limits::BlockWeights::simple_max( frame_support::weights::constants::WEIGHT_PER_SECOND * 2 ); pub const MaxLocks: u32 = 1024; pub static ExistentialDeposit: Balance = 1; pub static SlashDeferDuration: EraIndex = 0; pub static Period: BlockNumber = 5; pub static Offset: BlockNumber = 0; } impl frame_system::Config for Test { type BaseCallFilter = frame_support::traits::AllowAll; type BlockWeights = (); type BlockLength = (); type DbWeight = RocksDbWeight; type Origin = Origin; type Index = AccountIndex; type BlockNumber = BlockNumber; type Call = Call; type Hash = H256; type Hashing = ::sp_runtime::traits::BlakeTwo256; type AccountId = AccountId; type Lookup = IdentityLookup<Self::AccountId>; type Header = Header; type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); type PalletInfo = PalletInfo; type AccountData = pallet_balances::AccountData<Balance>; type OnNewAccount = (); type OnKilledAccount = (); type SystemWeightInfo = (); type SS58Prefix = (); type OnSetCode = (); } impl pallet_balances::Config for Test { type MaxLocks = MaxLocks; type Balance = Balance; type Event = Event; type DustRemoval = (); type ExistentialDeposit = ExistentialDeposit; type AccountStore = System; type WeightInfo = (); type MaxReserves = (); type ReserveIdentifier = [u8; 8]; } parameter_types! { pub const UncleGenerations: u64 = 0; pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(25); } pub struct MockCustodianHandler; impl pallet_staking::CustodianHandler<AccountId, Balance> for MockCustodianHandler { fn is_custody_account(_: &AccountId) -> bool { false } fn total_custody() -> Balance { Balance::zero() } } pub const MOCK_TREASURY: &AccountId = &1337; pub const MILLISECONDS_PER_YEAR: u64 = 1000 * 3600 * 24 * 36525 / 100; // allows funds to be deposited in a mock treasury account pub struct MockTreasury<Test>(sp_std::marker::PhantomData<Test>); impl OnUnbalanced<NegativeImbalanceOf<Test>> for MockTreasury<Test> { fn on_nonzero_unbalanced(amount: NegativeImbalanceOf<Test>) { // add balance to mock treasury account <Test as Config>::Currency::resolve_creating(&MOCK_TREASURY, amount); } } parameter_types! { pub const RewardsPoolId: PalletId = PalletId(*b"xx/rwrds"); pub const EraDuration: BlockNumber = 10; // 10 blocks per era } ord_parameter_types! { pub const AdminAccount: AccountId = 99; } pub type TestAdminOrigin = EnsureSignedBy<AdminAccount, AccountId>; impl xx_economics::Config for Test { type Event = Event; type Currency = Balances; type CustodianHandler = MockCustodianHandler; type RewardsPoolId = RewardsPoolId; type RewardRemainder = MockTreasury<Test>; type EraDuration = EraDuration; type AdminOrigin = TestAdminOrigin; type WeightInfo = weights::SubstrateWeight<Self>; } pub type Extrinsic = TestXt<Call, ()>; impl<LocalCall> frame_system::offchain::SendTransactionTypes<LocalCall> for Test where Call: From<LocalCall>, { type OverarchingCall = Call; type Extrinsic = Extrinsic; } pub struct ExtBuilder { rewards_balance: BalanceOf<Test>, interest_points: Vec<inflation::IdealInterestPoint<BlockNumber>>, } impl Default for ExtBuilder { fn default() -> Self { Self { rewards_balance: Default::default(), interest_points: Default::default(), } } } impl ExtBuilder { pub fn with_rewards_balance(mut self, rewards_balance: BalanceOf<Test>) -> Self { self.rewards_balance = rewards_balance; self } pub fn with_interest_points(mut self, points: Vec<inflation::IdealInterestPoint<BlockNumber>>) -> Self { self.interest_points = points; self } pub fn build(self) -> sp_io::TestExternalities { sp_tracing::try_init_simple(); let mut storage = frame_system::GenesisConfig::default() .build_storage::<Test>() .unwrap(); xx_economics::GenesisConfig::<Test> { balance: self.rewards_balance, interest_points: self.interest_points, ..Default::default() } .assimilate_storage(&mut storage) .unwrap(); let ext = sp_io::TestExternalities::from(storage); ext } pub fn build_and_execute(self, test: impl FnOnce() -> ()) { let mut ext = self.build(); ext.execute_with(|| { System::set_block_number(1); }); ext.execute_with(test); } } pub(crate) fn run_to_block(n: BlockNumber) { for b in (System::block_number() + 1)..=n { System::set_block_number(b); } } pub(crate) fn xx_economics_events() -> Vec<xx_economics::Event<Test>> { System::events() .into_iter() .map(|r| r.event) .filter_map(|e| { if let Event::XXEconomics(inner) = e { Some(inner) } else { None } }) .collect() }
#[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_json; extern crate rusoto_core; extern crate rusoto_route53; extern crate rusoto_sts; extern crate rusoto_s3; use std::collections::HashMap; // Sub-modules for parsing tinydns and interacting with AWS pub mod tinydns; pub mod r53; pub mod resource; pub mod s3; pub mod state; pub mod compare; // Define a struct for holding configuration metadata #[derive(Deserialize, Debug)] pub struct MacrotisConfig { pub provider: MacrotisProviderConfig, pub statefile: MacrotisStateConfig, pub zones: Vec<Zone> } // Define a struct for holding provider configuration metadata // If assume_role is true, role_arn needs to be populated // Region is optional as well #[derive(Serialize, Deserialize, Debug)] pub struct MacrotisProviderConfig { pub name: String, pub region: Option<String>, pub assume_role: bool, pub role_arn: Option<String>, pub session_name: Option<String> } // Define a struct for holding State configuration metadata // If backend=local, only filename need be populated. If backend=s3, // everything else should be populated. #[derive(Serialize, Deserialize, Debug)] pub struct MacrotisStateConfig { pub backend: String, pub filename: Option<String>, pub bucket: Option<String>, pub key: Option<String>, pub region: Option<String>, pub role_arn: Option<String>, pub tags: Option<HashMap<String, String>>, pub session_name: Option<String>, } // Helper struct for Zone data #[derive(Deserialize, Debug)] pub struct Zone { pub name: String, pub domain: String, pub id: String }
use babylon::prelude::*; #[macro_use] extern crate lazy_static; use std::sync::Mutex; lazy_static! { static ref GAME: Mutex<Game> = Mutex::new(Game::new()); } struct Game { time: f64, scene: Scene, shape: Vec<Sphere>, } impl Game { fn new() -> Self { Game { scene: Scene::create_from_basic_engine("#renderCanvas"), shape: vec![], time: 0.0, } } } #[no_mangle] pub fn main() { let game = GAME.lock().unwrap(); game.scene.add_before_render_observable(|| { let mut game = GAME.lock().unwrap(); let delta_time = game.scene.get_delta_time(); game.time += delta_time; if game.time > 1000.0 { game.time -= 1000.0; // add sphere every second let mut sphere = Sphere::new(&game.scene, babylon::js::random()); sphere.set_position(Vector::new( babylon::js::random() - 0.5, babylon::js::random() - 0.5, babylon::js::random() - 0.5, )); game.shape.push(sphere); } }) }
use chrono::{DateTime, Utc}; use super::resource::{ResourceKind, CResourceUpdate, SResourceUpdate}; use super::criterion::Criterion; /// Client -> server messages, deserialize only #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(tag = "type")] #[serde(deny_unknown_fields)] pub enum CMessage { // Standard messages GetResources { serial: u64, ids: Vec<String> }, Subscribe { serial: u64, ids: Vec<String> }, Unsubscribe { serial: u64, ids: Vec<String> }, UpdateResource { serial: u64, resource: CResourceUpdate, }, RemoveResource { serial: u64, id: String }, FilterSubscribe { serial: u64, #[serde(default)] kind: ResourceKind, criteria: Vec<Criterion>, }, FilterUnsubscribe { serial: u64, filter_serial: u64 }, // Special messages UploadTorrent { serial: u64, size: u64, path: Option<String>, }, UploadMagnet { serial: u64, uri: String, path: Option<String>, }, UploadFiles { serial: u64, size: u64, path: String, }, DownloadFile { serial: u64, id: String }, } /// Server -> client message, serialize only #[derive(Serialize, Deserialize)] #[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(tag = "type")] #[serde(deny_unknown_fields)] pub enum SMessage<'a> { // Standard messages #[serde(skip_deserializing)] ResourcesExtant { serial: u64, ids: Vec<&'a str> }, #[serde(skip_serializing)] #[serde(rename = "RESOURCES_EXTANT")] OResourcesExtant { serial: u64, ids: Vec<String> }, ResourcesRemoved { serial: u64, ids: Vec<String> }, UpdateResources { resources: Vec<SResourceUpdate<'a>> }, // Special messages TransferOffer { serial: u64, expires: DateTime<Utc>, token: String, size: u64, }, // Error messages UnknownResource(Error), InvalidResource(Error), // InvalidMessage(Error), InvalidSchema(Error), // InvalidRequest(Error), PermissionDenied(Error), TransferFailed(Error), // ServerError(Error), } #[derive(Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct Error { pub serial: Option<u64>, pub reason: String, } #[cfg(test)] mod tests { extern crate serde_json; use super::*; use super::super::{resource, criterion}; #[test] fn test_json_repr() { let data = r#" { "type": "FILTER_SUBSCRIBE", "serial": 0, "criteria": [ { "field": "id", "op": "in", "value": [1,2,3] } ] } "#; let m = serde_json::from_str(data).unwrap(); if let CMessage::FilterSubscribe { kind: resource::ResourceKind::Torrent, serial: 0, criteria: c, } = m { assert_eq!(c[0].field, "id"); assert_eq!(c[0].op, criterion::Operation::In); assert_eq!(c[0].value, criterion::Value::AN(vec![1, 2, 3])); } else { unreachable!(); } } }
use rps::moves::Move; use ::game_state::GameState; /// List of commands that clients send to server #[derive(Debug, Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub enum ClientCommand { Ping, JoinNewGame, MakeMove(u32, Move), } /// List of commands that server sends to clients #[derive(Debug, Clone, Copy, RustcEncodable, RustcDecodable)] pub enum ServerCommand { NewGameStarted, GameState(GameState), InvalidMove(u32, Move), }
use core::marker::PhantomData; use core::ptr::NonNull; use core::{mem, ptr}; pub mod raw; mod run_queue; pub(crate) mod timer; mod timer_queue; mod util; mod waker; use crate::interrupt::{Interrupt, InterruptExt}; use crate::time::Alarm; #[must_use = "Calling a task function does nothing on its own. You must pass the returned SpawnToken to Executor::spawn()"] pub struct SpawnToken<F> { raw_task: Option<NonNull<raw::TaskHeader>>, phantom: PhantomData<*mut F>, } impl<F> Drop for SpawnToken<F> { fn drop(&mut self) { // TODO deallocate the task instead. panic!("SpawnToken instances may not be dropped. You must pass them to Executor::spawn()") } } #[derive(Copy, Clone, Debug)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] pub enum SpawnError { Busy, } /// Handle to spawn tasks into an executor. /// /// This Spawner can spawn any task (Send and non-Send ones), but it can /// only be used in the executor thread (it is not Send itself). /// /// If you want to spawn tasks from another thread, use [SendSpawner]. #[derive(Copy, Clone)] pub struct Spawner { executor: &'static raw::Executor, not_send: PhantomData<*mut ()>, } impl Spawner { pub fn spawn<F>(&self, token: SpawnToken<F>) -> Result<(), SpawnError> { let task = token.raw_task; mem::forget(token); match task { Some(task) => { unsafe { self.executor.spawn(task) }; Ok(()) } None => Err(SpawnError::Busy), } } /// Convert this Spawner to a SendSpawner. This allows you to send the /// spawner to other threads, but the spawner loses the ability to spawn /// non-Send tasks. pub fn make_send(&self) -> SendSpawner { SendSpawner { executor: self.executor, not_send: PhantomData, } } } /// Handle to spawn tasks into an executor from any thread. /// /// This Spawner can be used from any thread (it implements Send and Sync, so after any task (Send and non-Send ones), but it can /// only be used in the executor thread (it is not Send itself). /// /// If you want to spawn tasks from another thread, use [SendSpawner]. #[derive(Copy, Clone)] pub struct SendSpawner { executor: &'static raw::Executor, not_send: PhantomData<*mut ()>, } unsafe impl Send for SendSpawner {} unsafe impl Sync for SendSpawner {} /// Handle to spawn tasks to an executor. /// /// This Spawner can spawn any task (Send and non-Send ones), but it can /// only be used in the executor thread (it is not Send itself). /// /// If you want to spawn tasks from another thread, use [SendSpawner]. impl SendSpawner { pub fn spawn<F: Send>(&self, token: SpawnToken<F>) -> Result<(), SpawnError> { let header = token.raw_task; mem::forget(token); match header { Some(header) => { unsafe { self.executor.spawn(header) }; Ok(()) } None => Err(SpawnError::Busy), } } } pub struct Executor { inner: raw::Executor, not_send: PhantomData<*mut ()>, } impl Executor { pub const fn new() -> Self { Self { inner: raw::Executor::new(|_| cortex_m::asm::sev(), ptr::null_mut()), not_send: PhantomData, } } pub fn set_alarm(&mut self, alarm: &'static dyn Alarm) { self.inner.set_alarm(alarm); } /// Runs the executor. /// /// This function never returns. pub fn run(&'static mut self, init: impl FnOnce(Spawner)) -> ! { init(unsafe { self.inner.spawner() }); loop { unsafe { self.inner.run_queued() }; cortex_m::asm::wfe(); } } } fn pend_by_number(n: u16) { #[derive(Clone, Copy)] struct N(u16); unsafe impl cortex_m::interrupt::InterruptNumber for N { fn number(self) -> u16 { self.0 } } cortex_m::peripheral::NVIC::pend(N(n)) } pub struct InterruptExecutor<I: Interrupt> { irq: I, inner: raw::Executor, not_send: PhantomData<*mut ()>, } impl<I: Interrupt> InterruptExecutor<I> { pub fn new(irq: I) -> Self { let ctx = irq.number() as *mut (); Self { irq, inner: raw::Executor::new(|ctx| pend_by_number(ctx as u16), ctx), not_send: PhantomData, } } pub fn set_alarm(&mut self, alarm: &'static dyn Alarm) { self.inner.set_alarm(alarm); } /// Start the executor. /// /// `init` is called in the interrupt context, then the interrupt is /// configured to run the executor. pub fn start(&'static mut self, init: impl FnOnce(Spawner) + Send) { self.irq.disable(); init(unsafe { self.inner.spawner() }); self.irq.set_handler(|ctx| unsafe { let executor = &*(ctx as *const raw::Executor); executor.run_queued(); }); self.irq.set_handler_context(&self.inner as *const _ as _); self.irq.enable(); } }
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationsListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LookUpResourceIdRequest { #[serde(default, skip_serializing_if = "Option::is_none")] pub identifier: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<look_up_resource_id_request::Type>, } pub mod look_up_resource_id_request { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { #[serde(rename = "Microsoft.Support/supportTickets")] MicrosoftSupportSupportTickets, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LookUpResourceIdResponse { #[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")] pub resource_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExceptionResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ServiceError>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceError { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub details: Vec<ServiceErrorDetail>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceErrorDetail { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub target: Option<String>, }
struct Solution(); // use std::collections::HashMap; // impl Solution { // pub fn majority_element(nums: Vec<i32>) -> i32 { // let mut map:HashMap<i32,u32>=HashMap::new(); // let length=nums.len(); // let half_length=(length+1)/2; // for i in 0..length{ // match map.get_mut(&nums[i]){ // Some(value)=>{ // *value+=1; // if *value>=half_length as u32{ // return nums[i] // } // } // None=>{ // map.insert(nums[i],1); // } // } // } // nums[0] // } // } impl Solution { pub fn majority_element(nums: Vec<i32>) -> i32 { let (mut candidate,length,mut count)=(nums[0],nums.len(),1); for i in 1..length{ count=if candidate==nums[i]{count + 1}else {count-1}; if count==0{ candidate=nums[i]; count=1; } } candidate } } fn main(){ println!("{}",Solution::majority_element(vec![3,2,3])); println!("{}",Solution::majority_element(vec![2,2,1,1,1,2,2])); println!("{}",Solution::majority_element(vec![6,5,5])); }
/* * cd C:\Users\むずでょ\source\repos\practice-rust\tokio * cargo check --example ep4-read * cargo build --example ep4-read * cargo run --example ep4-read * * [Reading and Writing Data](https://tokio.rs/docs/io/reading_writing_data/) */ use futures::executor::block_on; use std::fs::File; use std::io; use std::io::prelude::*; use std::net::SocketAddr; use std::str; use tokio::net::TcpStream; use tokio::prelude::*; async fn connect() { // v4 か v6 かはサーバー側と合わせること。 let addr: SocketAddr = "127.0.0.1:3000".parse().unwrap(); println!("Host | {}", &addr); // クライアント側は リスナーではなく、ストリームを取得する。 // https://docs.rs/tokio-tcp/0.1.2/src/tokio_tcp/stream.rs.html#49-58 match TcpStream::connect(&addr).await { Ok(mut stream) => { match stream.set_nodelay(true) { Ok(_x) => {} Err(e) => panic!("{}", e), } println!("Connected from | {:?}", stream.peer_addr().unwrap()); loop { // 改行を付けないと、受信側が 受信完了しません。 match stream.write_all("hot dog!\n".as_bytes()).await { Ok(_x) => { stream.flush(); println!("Info | Writed."); } Err(e) => panic!("{}", e), } println!("Info | Waiting for read."); // Read. // https://docs.rs/tokio/0.1.12/tokio/prelude/trait.Read.html#tymethod.read let mut buffer = [0; 2048]; // 末尾の改行をもって受信完了。 match stream.read(&mut buffer[..]).await { Ok(size) => { println!( "Read | {} | {:?}", size, str::from_utf8(&buffer[..size]).unwrap() ); } Err(e) => panic!(e), } // Sleep 3 seconds. println!("Info | Please wait 3 seconds."); std::thread::sleep(std::time::Duration::from_secs(3)); } } Err(e) => println!("{:?}", e), }; } #[tokio::main] async fn main() { println!("Info | Please wait 1 seconds."); std::thread::sleep(std::time::Duration::from_secs(1)); // syncronized. block_on(connect()); // asyncronized. // tokio::spawn(connect()); // Sleep 9 seconds. println!("Info | Please wait 10 seconds."); std::thread::sleep(std::time::Duration::from_secs(10)); println!("Info | Finished."); }
//! WMATA-defined codes for each MetroRail station. use crate::{ error::Error, rail::{client::responses, line::Line, traits::NeedsStation}, requests::Fetch, }; use serde::{ de::{Deserializer, Error as SerdeError}, Deserialize, }; use std::{error, fmt, str::FromStr}; /// Every MetroRail station code as defined by WMATA. #[derive(Debug, Copy, Clone, PartialEq)] pub enum Station { A01, A02, A03, A04, A05, A06, A07, A08, A09, A10, A11, A12, A13, A14, A15, B01, B02, B03, B04, B05, B06, B07, B08, B09, B10, B11, B35, C01, C02, C03, C04, C05, C06, C07, C08, C09, C10, C12, C13, C14, C15, D01, D02, D03, D04, D05, D06, D07, D08, D09, D10, D11, D12, D13, E01, E02, E03, E04, E05, E06, E07, E08, E09, E10, F01, F02, F03, F04, F05, F06, F07, F08, F09, F10, F11, G01, G02, G03, G04, G05, J02, J03, K01, K02, K03, K04, K05, K06, K07, K08, N01, N02, N03, N04, N06, } impl Fetch for Station {} impl NeedsStation for Station {} impl Station { /// Distance, fare information, and estimated travel time between this and another optional station, including those on different lines. /// [WMATA Documentation](https://developer.wmata.com/docs/services/5476364f031f590f38092507/operations/5476364f031f5909e4fe3313?) /// /// # Example /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let path = block_on(async { Station::A01.to_station(Some(Station::A02), "9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(path.is_ok()); /// /// ``` pub async fn to_station( self, destination_station: Option<Station>, api_key: &str, ) -> Result<responses::StationToStationInfos, Error> { self.station_to_station(Some(self), destination_station, api_key) .await } // List of reported elevator and escalator outages at this station. /// [WMATA Documentation](https://developer.wmata.com/docs/services/54763641281d83086473f232/operations/54763641281d830c946a3d76?) /// /// # Examples /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let incidents = block_on(async { Station::A01.elevator_and_escalator_incidents("9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(incidents.is_ok()); /// ``` pub async fn elevator_and_escalator_incidents( self, api_key: &str, ) -> Result<responses::ElevatorAndEscalatorIncidents, Error> { self.elevator_and_escalator_incidents_at(Some(self), api_key) .await } /// Reported rail incidents (significant disruptions and delays to normal service) at this station /// /// # Examples /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let incidents = block_on(async { Station::A01.incidents("9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(incidents.is_ok()); /// ``` pub async fn incidents(self, api_key: &str) -> Result<responses::RailIncidents, Error> { self.incidents_at(Some(self), api_key).await } /// Next train arrivals for this station /// /// # Examples /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let next_trains = block_on(async { Station::A01.next_trains("9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(next_trains.is_ok()); /// ``` pub async fn next_trains(self, api_key: &str) -> Result<responses::RailPredictions, Error> { <Self as NeedsStation>::next_trains(&self, self, api_key).await } /// Location and address information at this station /// /// # Examples /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let information = block_on(async { Station::A01.information("9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(information.is_ok()); /// ``` pub async fn information(self, api_key: &str) -> Result<responses::StationInformation, Error> { self.station_information(self, api_key).await } /// Parking information for this station /// /// # Examples /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let parking_information = block_on(async { Station::A01.parking_information("9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(parking_information.is_ok()); /// ``` pub async fn parking_information( self, api_key: &str, ) -> Result<responses::StationsParking, Error> { <Self as NeedsStation>::parking_information(&self, self, api_key).await } /// Set of ordered stations and distances between this station and another on the **same line**. /// /// # Examples /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let path = block_on(async { Station::A01.path_to(Station::A02, "9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(path.is_ok()); /// ``` pub async fn path_to( self, destination_station: Station, api_key: &str, ) -> Result<responses::PathBetweenStations, Error> { self.path_from(self, destination_station, api_key).await } /// Opening and scheduled first/last train times for this station. /// /// # Examples /// ``` /// use wmata::Station; /// use tokio_test::block_on; /// /// let timings = block_on(async { Station::A01.timings("9e38c3eab34c4e6c990828002828f5ed").await }); /// assert!(timings.is_ok()); /// ``` pub async fn timings(self, api_key: &str) -> Result<responses::StationTimings, Error> { <Self as NeedsStation>::timings(&self, self, api_key).await } } impl<'de> Deserialize<'de> for Station { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let station = String::deserialize(deserializer)?; Station::from_str(&station) .map_err(|_| SerdeError::custom("String provided is not a Station code")) } } impl Station { pub fn name(self) -> String { match self { Station::A01 => "Metro Center".to_string(), Station::A02 => "Farragut North".to_string(), Station::A03 => "Dupont Circle".to_string(), Station::A04 => "Woodley Park-Zoo/Adams Morgan".to_string(), Station::A05 => "Cleveland Park".to_string(), Station::A06 => "Van Ness-UDC".to_string(), Station::A07 => "Tenleytown-AU".to_string(), Station::A08 => "Friendship Heights".to_string(), Station::A09 => "Bethesda".to_string(), Station::A10 => "Medical Center".to_string(), Station::A11 => "Grosvenor-Strathmore".to_string(), Station::A12 => "White Flint".to_string(), Station::A13 => "Twinbrook".to_string(), Station::A14 => "Rockville".to_string(), Station::A15 => "Shady Grove".to_string(), Station::B01 => "Gallery Pl-Chinatown".to_string(), Station::B02 => "Judiciary Square".to_string(), Station::B03 => "Union Station".to_string(), Station::B04 => "Rhode Island Ave-Brentwood".to_string(), Station::B05 => "Brookland-CUA".to_string(), Station::B06 => "Fort Totten".to_string(), Station::B07 => "Takoma".to_string(), Station::B08 => "Silver Spring".to_string(), Station::B09 => "Forest Glen".to_string(), Station::B10 => "Wheaton".to_string(), Station::B11 => "Glenmont".to_string(), Station::B35 => "NoMa-Gallaudet U".to_string(), Station::C01 => "Metro Center".to_string(), Station::C02 => "McPherson Square".to_string(), Station::C03 => "Farragut West".to_string(), Station::C04 => "Foggy Bottom-GWU".to_string(), Station::C05 => "Rosslyn".to_string(), Station::C06 => "Arlington Cemetery".to_string(), Station::C07 => "Pentagon".to_string(), Station::C08 => "Pentagon City".to_string(), Station::C09 => "Crystal City".to_string(), Station::C10 => "Ronald Reagan Washington National Airport".to_string(), Station::C12 => "Braddock Road".to_string(), Station::C13 => "King St-Old Town".to_string(), Station::C14 => "Eisenhower Avenue".to_string(), Station::C15 => "Huntington".to_string(), Station::D01 => "Federal Triangle".to_string(), Station::D02 => "Smithsonian".to_string(), Station::D03 => "L'Enfant Plaza".to_string(), Station::D04 => "Federal Center SW".to_string(), Station::D05 => "Capitol South".to_string(), Station::D06 => "Eastern Market".to_string(), Station::D07 => "Potomac Ave".to_string(), Station::D08 => "Stadium-Armory".to_string(), Station::D09 => "Minnesota Ave".to_string(), Station::D10 => "Deanwood".to_string(), Station::D11 => "Cheverly".to_string(), Station::D12 => "Landover".to_string(), Station::D13 => "New Carrollton".to_string(), Station::E01 => "Mt Vernon Sq 7th St-Convention Center".to_string(), Station::E02 => "Shaw-Howard U".to_string(), Station::E03 => "U Street/African-Amer Civil War Memorial/Cardozo".to_string(), Station::E04 => "Columbia Heights".to_string(), Station::E05 => "Georgia Ave-Petworth".to_string(), Station::E06 => "Fort Totten".to_string(), Station::E07 => "West Hyattsville".to_string(), Station::E08 => "Prince George's Plaza".to_string(), Station::E09 => "College Park-U of Md".to_string(), Station::E10 => "Greenbelt".to_string(), Station::F01 => "Gallery Pl-Chinatown".to_string(), Station::F02 => "Archives-Navy Memorial-Penn Quarter".to_string(), Station::F03 => "L'Enfant Plaza".to_string(), Station::F04 => "Waterfront".to_string(), Station::F05 => "Navy Yard-Ballpark".to_string(), Station::F06 => "Anacostia".to_string(), Station::F07 => "Congress Heights".to_string(), Station::F08 => "Southern Avenue".to_string(), Station::F09 => "Naylor Road".to_string(), Station::F10 => "Suitland".to_string(), Station::F11 => "Branch Ave".to_string(), Station::G01 => "Benning Road".to_string(), Station::G02 => "Capitol Heights".to_string(), Station::G03 => "Addison Road-Seat Pleasant".to_string(), Station::G04 => "Morgan Boulevard".to_string(), Station::G05 => "Largo Town Center".to_string(), Station::J02 => "Van Dorn Street".to_string(), Station::J03 => "Franconia-Springfield".to_string(), Station::K01 => "Court House".to_string(), Station::K02 => "Clarendon".to_string(), Station::K03 => "Virginia Square-GMU".to_string(), Station::K04 => "Ballston-MU".to_string(), Station::K05 => "East Falls Church".to_string(), Station::K06 => "West Falls Church-VT/UVA".to_string(), Station::K07 => "Dunn Loring-Merrifield".to_string(), Station::K08 => "Vienna/Fairfax-GMU".to_string(), Station::N01 => "McLean".to_string(), Station::N02 => "Tysons Corner".to_string(), Station::N03 => "Greensboro".to_string(), Station::N04 => "Spring Hill".to_string(), Station::N06 => "Wiehle-Reston East".to_string(), } } pub fn lines(&self) -> &[Line] { match self { Station::A01 | Station::C01 => &[Line::Blue, Line::Orange, Line::Silver, Line::Red], Station::A02 | Station::A03 | Station::A04 | Station::A05 | Station::A06 | Station::A07 | Station::A08 | Station::A09 | Station::A10 | Station::A11 | Station::A12 | Station::A13 | Station::A14 | Station::A15 | Station::B02 | Station::B03 | Station::B04 | Station::B05 | Station::B07 | Station::B08 | Station::B09 | Station::B10 | Station::B11 | Station::B35 => &[Line::Red], Station::B01 | Station::B06 | Station::E06 | Station::F01 => { &[Line::Red, Line::Yellow, Line::Green] } Station::C02 | Station::C03 | Station::C04 | Station::C05 | Station::D01 | Station::D02 | Station::D04 | Station::D05 | Station::D06 | Station::D07 | Station::D08 => &[Line::Blue, Line::Orange, Line::Silver], Station::C06 | Station::J02 | Station::J03 => &[Line::Blue], Station::C07 | Station::C08 | Station::C09 | Station::C10 | Station::C12 | Station::C13 => &[Line::Blue, Line::Yellow], Station::C14 | Station::C15 => &[Line::Yellow], Station::D03 | Station::F03 => &[ Line::Green, Line::Yellow, Line::Blue, Line::Orange, Line::Silver, ], Station::D09 | Station::D10 | Station::D11 | Station::D12 | Station::D13 | Station::K06 | Station::K07 | Station::K08 => &[Line::Orange], Station::E01 | Station::E02 | Station::E03 | Station::E04 | Station::E05 | Station::E07 | Station::E08 | Station::E09 | Station::E10 | Station::F02 => &[Line::Green, Line::Yellow], Station::F04 | Station::F05 | Station::F06 | Station::F07 | Station::F08 | Station::F09 | Station::F10 | Station::F11 => &[Line::Green], Station::G01 | Station::G02 | Station::G03 | Station::G04 | Station::G05 => { &[Line::Blue, Line::Silver] } Station::K01 | Station::K02 | Station::K03 | Station::K04 | Station::K05 => { &[Line::Orange, Line::Silver] } Station::N01 | Station::N02 | Station::N03 | Station::N04 | Station::N06 => { &[Line::Silver] } } } } impl ToString for Station { fn to_string(&self) -> String { match self { Station::A01 => "A01".to_string(), Station::A02 => "A02".to_string(), Station::A03 => "A03".to_string(), Station::A04 => "A04".to_string(), Station::A05 => "A05".to_string(), Station::A06 => "A06".to_string(), Station::A07 => "A07".to_string(), Station::A08 => "A08".to_string(), Station::A09 => "A09".to_string(), Station::A10 => "A10".to_string(), Station::A11 => "A11".to_string(), Station::A12 => "A12".to_string(), Station::A13 => "A13".to_string(), Station::A14 => "A14".to_string(), Station::A15 => "A15".to_string(), Station::B01 => "B01".to_string(), Station::B02 => "B02".to_string(), Station::B03 => "B03".to_string(), Station::B04 => "B04".to_string(), Station::B05 => "B05".to_string(), Station::B06 => "B06".to_string(), Station::B07 => "B07".to_string(), Station::B08 => "B08".to_string(), Station::B09 => "B09".to_string(), Station::B10 => "B10".to_string(), Station::B11 => "B11".to_string(), Station::B35 => "B35".to_string(), Station::C01 => "C01".to_string(), Station::C02 => "C02".to_string(), Station::C03 => "C03".to_string(), Station::C04 => "C04".to_string(), Station::C05 => "C05".to_string(), Station::C06 => "C06".to_string(), Station::C07 => "C07".to_string(), Station::C08 => "C08".to_string(), Station::C09 => "C09".to_string(), Station::C10 => "C10".to_string(), Station::C12 => "C12".to_string(), Station::C13 => "C13".to_string(), Station::C14 => "C14".to_string(), Station::C15 => "C15".to_string(), Station::D01 => "D01".to_string(), Station::D02 => "D02".to_string(), Station::D03 => "D03".to_string(), Station::D04 => "D04".to_string(), Station::D05 => "D05".to_string(), Station::D06 => "D06".to_string(), Station::D07 => "D07".to_string(), Station::D08 => "D08".to_string(), Station::D09 => "D09".to_string(), Station::D10 => "D10".to_string(), Station::D11 => "D11".to_string(), Station::D12 => "D12".to_string(), Station::D13 => "D13".to_string(), Station::E01 => "E01".to_string(), Station::E02 => "E02".to_string(), Station::E03 => "E03".to_string(), Station::E04 => "E04".to_string(), Station::E05 => "E05".to_string(), Station::E06 => "E06".to_string(), Station::E07 => "E07".to_string(), Station::E08 => "E08".to_string(), Station::E09 => "E09".to_string(), Station::E10 => "E10".to_string(), Station::F01 => "F01".to_string(), Station::F02 => "F02".to_string(), Station::F03 => "F03".to_string(), Station::F04 => "F04".to_string(), Station::F05 => "F05".to_string(), Station::F06 => "F06".to_string(), Station::F07 => "F07".to_string(), Station::F08 => "F08".to_string(), Station::F09 => "F09".to_string(), Station::F10 => "F10".to_string(), Station::F11 => "F11".to_string(), Station::G01 => "G01".to_string(), Station::G02 => "G02".to_string(), Station::G03 => "G03".to_string(), Station::G04 => "G04".to_string(), Station::G05 => "G05".to_string(), Station::J02 => "J02".to_string(), Station::J03 => "J03".to_string(), Station::K01 => "K01".to_string(), Station::K02 => "K02".to_string(), Station::K03 => "K03".to_string(), Station::K04 => "K04".to_string(), Station::K05 => "K05".to_string(), Station::K06 => "K06".to_string(), Station::K07 => "K07".to_string(), Station::K08 => "K08".to_string(), Station::N01 => "N01".to_string(), Station::N02 => "N02".to_string(), Station::N03 => "N03".to_string(), Station::N04 => "N04".to_string(), Station::N06 => "N06".to_string(), } } } impl FromStr for Station { type Err = StringIsNotStationError; /// Converts a string to a [`Station`]. /// /// # Examples /// ``` /// use wmata::Station; /// /// let station_code: Station = "A01".parse().unwrap(); /// /// assert_eq!(Station::A01, station_code); /// ``` fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "A01" => Ok(Station::A01), "A02" => Ok(Station::A02), "A03" => Ok(Station::A03), "A04" => Ok(Station::A04), "A05" => Ok(Station::A05), "A06" => Ok(Station::A06), "A07" => Ok(Station::A07), "A08" => Ok(Station::A08), "A09" => Ok(Station::A09), "A10" => Ok(Station::A10), "A11" => Ok(Station::A11), "A12" => Ok(Station::A12), "A13" => Ok(Station::A13), "A14" => Ok(Station::A14), "A15" => Ok(Station::A15), "B01" => Ok(Station::B01), "B02" => Ok(Station::B02), "B03" => Ok(Station::B03), "B04" => Ok(Station::B04), "B05" => Ok(Station::B05), "B06" => Ok(Station::B06), "B07" => Ok(Station::B07), "B08" => Ok(Station::B08), "B09" => Ok(Station::B09), "B10" => Ok(Station::B10), "B11" => Ok(Station::B11), "B35" => Ok(Station::B35), "C01" => Ok(Station::C01), "C02" => Ok(Station::C02), "C03" => Ok(Station::C03), "C04" => Ok(Station::C04), "C05" => Ok(Station::C05), "C06" => Ok(Station::C06), "C07" => Ok(Station::C07), "C08" => Ok(Station::C08), "C09" => Ok(Station::C09), "C10" => Ok(Station::C10), "C12" => Ok(Station::C12), "C13" => Ok(Station::C13), "C14" => Ok(Station::C14), "C15" => Ok(Station::C15), "D01" => Ok(Station::D01), "D02" => Ok(Station::D02), "D03" => Ok(Station::D03), "D04" => Ok(Station::D04), "D05" => Ok(Station::D05), "D06" => Ok(Station::D06), "D07" => Ok(Station::D07), "D08" => Ok(Station::D08), "D09" => Ok(Station::D09), "D10" => Ok(Station::D10), "D11" => Ok(Station::D11), "D12" => Ok(Station::D12), "D13" => Ok(Station::D13), "E01" => Ok(Station::E01), "E02" => Ok(Station::E02), "E03" => Ok(Station::E03), "E04" => Ok(Station::E04), "E05" => Ok(Station::E05), "E06" => Ok(Station::E06), "E07" => Ok(Station::E07), "E08" => Ok(Station::E08), "E09" => Ok(Station::E09), "E10" => Ok(Station::E10), "F01" => Ok(Station::F01), "F02" => Ok(Station::F02), "F03" => Ok(Station::F03), "F04" => Ok(Station::F04), "F05" => Ok(Station::F05), "F06" => Ok(Station::F06), "F07" => Ok(Station::F07), "F08" => Ok(Station::F08), "F09" => Ok(Station::F09), "F10" => Ok(Station::F10), "F11" => Ok(Station::F11), "G01" => Ok(Station::G01), "G02" => Ok(Station::G02), "G03" => Ok(Station::G03), "G04" => Ok(Station::G04), "G05" => Ok(Station::G05), "J02" => Ok(Station::J02), "J03" => Ok(Station::J03), "K01" => Ok(Station::K01), "K02" => Ok(Station::K02), "K03" => Ok(Station::K03), "K04" => Ok(Station::K04), "K05" => Ok(Station::K05), "K06" => Ok(Station::K06), "K07" => Ok(Station::K07), "K08" => Ok(Station::K08), "N01" => Ok(Station::N01), "N02" => Ok(Station::N02), "N03" => Ok(Station::N03), "N04" => Ok(Station::N04), "N06" => Ok(Station::N06), _ => Err(StringIsNotStationError), } } } pub fn empty_or_station<'de, D>(deserializer: D) -> Result<Option<Station>, D::Error> where D: Deserializer<'de>, { match Station::deserialize(deserializer) { Ok(station) => Ok(Some(station)), Err(_) => Ok(None), } } /// An error incidating that the provided string is not a WMATA Station Code. #[derive(Debug, Clone)] pub struct StringIsNotStationError; impl fmt::Display for StringIsNotStationError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Provided string is not a valid station code.") } } impl error::Error for StringIsNotStationError { fn source(&self) -> Option<&(dyn error::Error + 'static)> { None } }
use std::io fn main(){ println!("H"); println!("e"); println!("l"); println!("l"); println!("o"); println!(" "); println!("H"); println!("a"); println!("c"); println!("k"); println!("t"); println!("o"); println!("b"); println!("e"); println!("r"); println!("fest!"); }
pub mod bson_decode; pub mod bson_encode; pub mod find_many; pub mod find_one; pub mod gridfs_download; pub mod gridfs_multi_download; pub mod gridfs_multi_upload; pub mod gridfs_upload; pub mod insert_many; pub mod insert_one; pub mod json_multi_export; pub mod json_multi_import; pub mod run_command; use std::{ convert::TryInto, sync::Arc, time::{Duration, Instant}, }; use anyhow::{bail, Result}; use futures::stream::TryStreamExt; use indicatif::{ProgressBar, ProgressStyle}; use lazy_static::lazy_static; use mongodb::{ bson::{doc, Bson, Document}, options::{Acknowledgment, ClientOptions, SelectionCriteria, WriteConcern}, Client, }; use serde_json::Value; use crate::fs::{BufReader, File}; lazy_static! { static ref DATABASE_NAME: String = option_env!("DATABASE_NAME") .unwrap_or("perftest") .to_string(); static ref COLL_NAME: String = option_env!("COLL_NAME").unwrap_or("corpus").to_string(); static ref MAX_EXECUTION_TIME: u64 = option_env!("MAX_EXECUTION_TIME") .unwrap_or("300") .parse::<u64>() .expect("invalid MAX_EXECUTION_TIME"); static ref MIN_EXECUTION_TIME: u64 = option_env!("MIN_EXECUTION_TIME") .unwrap_or("60") .parse::<u64>() .expect("invalid MIN_EXECUTION_TIME"); pub static ref TARGET_ITERATION_COUNT: usize = option_env!("TARGET_ITERATION_COUNT") .unwrap_or("100") .parse::<usize>() .expect("invalid TARGET_ITERATION_COUNT"); } #[async_trait::async_trait] pub trait Benchmark: Sized { type Options; /// execute once before benchmarking async fn setup(options: Self::Options) -> Result<Self>; /// execute at the beginning of every iteration async fn before_task(&mut self) -> Result<()> { Ok(()) } async fn do_task(&self) -> Result<()>; /// execute at the end of every iteration async fn after_task(&self) -> Result<()> { Ok(()) } /// execute once after benchmarking async fn teardown(&self) -> Result<()> { Ok(()) } } pub(crate) async fn parse_json_file_to_documents(file: File) -> Result<Vec<Document>> { let mut docs: Vec<Document> = Vec::new(); let mut lines = BufReader::new(file).lines(); while let Some(line) = lines.try_next().await? { let json: Value = serde_json::from_str(&line)?; docs.push(match json.try_into()? { Bson::Document(doc) => doc, _ => bail!("invalid json document"), }); } Ok(docs) } fn finished(duration: Duration, iter: usize) -> bool { let elapsed = duration.as_secs(); elapsed >= *MAX_EXECUTION_TIME || (iter >= *TARGET_ITERATION_COUNT && elapsed > *MIN_EXECUTION_TIME) } pub async fn run_benchmark<B: Benchmark + Send + Sync>( options: B::Options, ) -> Result<Vec<Duration>> { let mut test = B::setup(options).await?; let mut test_durations = Vec::new(); let progress_bar = ProgressBar::new(*TARGET_ITERATION_COUNT as u64); progress_bar.set_style( ProgressStyle::default_bar() .template( "{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos:>2}/{len:2} \ ({eta})", ) .progress_chars("#>-"), ); let benchmark_timer = Instant::now(); let mut iter = 0; while !finished(benchmark_timer.elapsed(), iter) { progress_bar.inc(1); test.before_task().await?; let timer = Instant::now(); test.do_task().await?; test_durations.push(timer.elapsed()); test.after_task().await?; iter += 1; } test.teardown().await?; progress_bar.finish(); test_durations.sort(); Ok(test_durations) } pub async fn drop_database(uri: &str, database: &str) -> Result<()> { let mut options = ClientOptions::parse(uri).await?; options.write_concern = Some(WriteConcern::builder().w(Acknowledgment::Majority).build()); let client = Client::with_options(options.clone())?; let hello = client .database("admin") .run_command(doc! { "hello": true }, None) .await?; client.database(&database).drop(None).await?; // in sharded clusters, take additional steps to ensure database is dropped completely. // see: https://www.mongodb.com/docs/manual/reference/method/db.dropDatabase/#replica-set-and-sharded-clusters let is_sharded = hello.get_str("msg").ok() == Some("isdbgrid"); if is_sharded { client.database(&database).drop(None).await?; for host in options.hosts { client .database("admin") .run_command( doc! { "flushRouterConfig": 1 }, SelectionCriteria::Predicate(Arc::new(move |s| s.address() == &host)), ) .await?; } } Ok(()) }
use crate::HitRecord; use crate::HittableList; use crate::Object; use crate::Ray; use crate::AABB; use std::sync::Arc; extern crate rand; use rand::Rng; pub struct BvhNode { pub box_: AABB, pub left: Arc<dyn Object>, pub right: Arc<dyn Object>, } impl BvhNode { pub fn new_(b: AABB, l: Arc<dyn Object>, r: Arc<dyn Object>) -> Self { Self { box_: b, left: l, right: r, } } pub fn new(world: HittableList, t0: f64, t1: f64) -> Self { BvhNode::build(world.objects, world.num as u32, t0, t1) } pub fn random_int(min: u32, max: u32) -> u32 { let mut rng = rand::thread_rng(); rng.gen_range(min, max + 1) as u32 } pub fn build(mut objects: Vec<Arc<dyn Object>>, object_span: u32, t0: f64, t1: f64) -> Self { let axis = BvhNode::random_int(0, 2); let left: Arc<dyn Object>; let right: Arc<dyn Object>; if object_span == 1 as u32 { left = objects.remove(0); right = left.clone(); } else if object_span == 2 as u32 { objects.sort_by(|a, b| { let bo_a = a.bounding_box().unwrap().min.get(axis); let bo_b = b.bounding_box().unwrap().min.get(axis); bo_a.partial_cmp(&bo_b).unwrap() }); right = objects.remove(1); left = objects.remove(0); } else { objects.sort_by(|a, b| { let bo_a = a.bounding_box().unwrap().min.get(axis); let bo_b = b.bounding_box().unwrap().min.get(axis); bo_a.partial_cmp(&bo_b).unwrap() }); let mid = object_span / 2; let (object1, object2) = objects.split_at_mut(mid as usize); left = Arc::new(BvhNode::build(object1.to_vec(), mid, t0, t1)); right = Arc::new(BvhNode::build(object2.to_vec(), object_span - mid, t0, t1)); } let box_left = left.bounding_box(); let box_right = right.bounding_box(); BvhNode::new_( AABB::surrounding_box(box_left.unwrap(), box_right.unwrap()), left, right, ) } } impl Object for BvhNode { fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord> { let tmp = self.box_.hit(r, t_min, t_max); if !tmp { Option::None } else { let hit_left = self.left.hit(r, t_min, t_max); match hit_left { None => { let hit_right = self.right.hit(r, t_min, t_max); if let Some(z) = hit_right { Option::Some(z) } else { Option::None } } Some(y) => { let hit_right = self.right.hit(r, t_min, y.t); if let Some(z) = hit_right { if z.t < y.t { Option::Some(z) } else { Option::Some(y) } } else { Option::Some(y) } } } } } fn bounding_box(&self) -> Option<AABB> { Option::Some(self.box_) } }
use rltk::{ RGB, Rltk, RandomNumberGenerator }; use std::cmp::{ min, max }; use super::Rect; #[derive(PartialEq, Copy, Clone)] pub enum TileType { Wall, Floor } /// Makes a map with solid boundaires and 400 randomly placed wall. It won't be pretty. pub fn new_map_test() -> Vec<TileType> { use TileType::*; // save myself some typing here... let mut map = vec![Floor; 80 * 50]; // make boundaries the walls for x in 0..80 { map[xy_idx(x, 0)] = Wall; map[xy_idx(x, 49)] = Wall; } for y in 0..50 { map[xy_idx(0, y)] = Wall; map[xy_idx(79, y)] = Wall; } // Now randomly place a ton of walls // obtain RNG: let mut rng = rltk::RandomNumberGenerator::new(); for _i in 0..400 { let x = rng.roll_dice(1, 79); let y = rng.roll_dice(1, 49); let idx = xy_idx(x, y); if idx != xy_idx(40, 25) { map[idx] = Wall; } } map } pub fn draw_map(map: &[TileType], ctx: &mut Rltk) { let mut x = 0; let mut y = 0; for tile in map.iter() { // render tile match tile { TileType::Floor => { ctx.set(x, y, RGB::from_f32(0.5, 0.5, 0.5), RGB::from_f32(0., 0., 0.), rltk::to_cp437('.')); } TileType::Wall => { ctx.set(x, y, RGB::from_f32(0., 1., 0.), RGB::from_f32(0., 0., 0.), rltk::to_cp437('#')); } } x += 1; if x > 79 { x = 0; y += 1; } } } // takes an (x, y) point and changes into a single usize for array purposes pub fn xy_idx(x: i32, y: i32) -> usize { (y as usize * 80) + x as usize } pub fn new_map_rooms_and_corridors() -> (Vec<Rect>, Vec<TileType>) { let mut map = vec![TileType::Wall; 80 * 50]; let mut rooms: Vec<Rect> = Vec::new(); const MAX_ROOMS: i32 = 30; const MIN_SIZE: i32 = 6; const MAX_SIZE: i32 = 10; let mut rng = RandomNumberGenerator::new(); // place random rooms for _ in 0..MAX_ROOMS { let w = rng.range(MIN_SIZE, MAX_SIZE); let h = rng.range(MIN_SIZE, MAX_SIZE); let x = rng.roll_dice(1, 80 - w - 1) - 1; let y = rng.roll_dice(1, 50 - h - 1) - 1; let new_room = Rect::new(x, y, w, h); let mut ok = true; // ensure the new room won't intersect with other rooms for other_room in rooms.iter() { if new_room.intersect(other_room) { ok = false; } } if ok { apply_room_to_map(&new_room, &mut map); // if we're not the first room, join to other rooms! if !rooms.is_empty() { let (new_x, new_y) = new_room.center(); let (prev_x, prev_y) = rooms[rooms.len() - 1].center(); if rng.range(0, 2) == 1 { apply_horizontal_tunnel(&mut map, prev_x, new_x, prev_y); apply_vertical_tunnel(&mut map, prev_y, new_y, new_x); } else { apply_vertical_tunnel(&mut map, prev_y, new_y, prev_x); apply_horizontal_tunnel(&mut map, prev_x, new_x, new_y); } } rooms.push(new_room); } } (rooms, map) } /// Adds a room of the specified size to the map fn apply_room_to_map(room: &Rect, map: &mut [TileType]) { for y in room.y1+1 ..= room.y2 { for x in room.x1+1 ..= room.x2 { map[xy_idx(x, y)] = TileType::Floor; } } } fn apply_horizontal_tunnel(map: &mut [TileType], x1: i32, x2: i32, y: i32) { for x in min(x1, x2) ..= max(x1, x2) { let idx = xy_idx(x, y); if idx > 0 && idx < 80*50 { map[idx as usize] = TileType::Floor; } } } fn apply_vertical_tunnel(map: &mut [TileType], y1: i32, y2: i32, x: i32) { for y in min(y1, y2) ..= max(y1, y2) { let idx = xy_idx(x, y); if idx > 0 && idx < 80*50 { map[idx as usize] = TileType::Floor; } } }
extern crate openssl; use std::str; use openssl::hash::{hash, MessageDigest}; fn main() { let data: &[u8] = b"Hello, world"; let digest = hash(MessageDigest::sha256(), &data); println!("{}", str::from_utf8(data).ok().unwrap()); println!("hash: {:?}", digest); }
// q0017_letter_combinations_of_a_phone_number struct Solution; impl Solution { pub fn letter_combinations(digits: String) -> Vec<String> { let map = vec![ vec![b'a', b'b', b'c'], vec![b'd', b'e', b'f'], vec![b'g', b'h', b'i'], vec![b'j', b'k', b'l'], vec![b'm', b'n', b'o'], vec![b'p', b'q', b'r', b's'], vec![b't', b'u', b'v'], vec![b'w', b'x', b'y', b'z'], ]; let mut ret = vec![]; for d in digits.as_bytes() { if *d < b'2' || *d > b'9' { return ret; } let k = *d - b'2'; if ret.len() == 0 { for i in map.get(k as usize).unwrap().iter() { ret.push((*i as char).to_string()); } continue; } let mut new_ret = vec![]; for i in ret.iter() { for j in map.get(k as usize).unwrap().iter() { new_ret.push(format!("{}{}", i, *j as char)); } } ret = new_ret; } ret } } #[cfg(test)] mod tests { use super::Solution; #[test] fn it_works() { assert_eq!( vec!["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"], Solution::letter_combinations("23".to_string()) ); } }
#[doc = "Reader of register CMDCTL"] pub type R = crate::R<u32, super::CMDCTL>; #[doc = "Writer for register CMDCTL"] pub type W = crate::W<u32, super::CMDCTL>; #[doc = "Register CMDCTL `reset()`'s with value 0"] impl crate::ResetValue for super::CMDCTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `CMDIDX`"] pub type CMDIDX_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CMDIDX`"] pub struct CMDIDX_W<'a> { w: &'a mut W, } impl<'a> CMDIDX_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x3f) | ((value as u32) & 0x3f); self.w } } #[doc = "Reader of field `CMDRESP`"] pub type CMDRESP_R = crate::R<u8, u8>; #[doc = "Write proxy for field `CMDRESP`"] pub struct CMDRESP_W<'a> { w: &'a mut W, } impl<'a> CMDRESP_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6); self.w } } #[doc = "Reader of field `INTWAIT`"] pub type INTWAIT_R = crate::R<bool, bool>; #[doc = "Write proxy for field `INTWAIT`"] pub struct INTWAIT_W<'a> { w: &'a mut W, } impl<'a> INTWAIT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Reader of field `WAITDEND`"] pub type WAITDEND_R = crate::R<bool, bool>; #[doc = "Write proxy for field `WAITDEND`"] pub struct WAITDEND_W<'a> { w: &'a mut W, } impl<'a> WAITDEND_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Reader of field `CSMEN`"] pub type CSMEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CSMEN`"] pub struct CSMEN_W<'a> { w: &'a mut W, } impl<'a> CSMEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `SUSPEND`"] pub type SUSPEND_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SUSPEND`"] pub struct SUSPEND_W<'a> { w: &'a mut W, } impl<'a> SUSPEND_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } #[doc = "Reader of field `ENCMDC`"] pub type ENCMDC_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ENCMDC`"] pub struct ENCMDC_W<'a> { w: &'a mut W, } impl<'a> ENCMDC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12); self.w } } #[doc = "Reader of field `NINTEN`"] pub type NINTEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `NINTEN`"] pub struct NINTEN_W<'a> { w: &'a mut W, } impl<'a> NINTEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "Reader of field `ATAEN`"] pub type ATAEN_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ATAEN`"] pub struct ATAEN_W<'a> { w: &'a mut W, } impl<'a> ATAEN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14); self.w } } impl R { #[doc = "Bits 0:5 - Command index"] #[inline(always)] pub fn cmdidx(&self) -> CMDIDX_R { CMDIDX_R::new((self.bits & 0x3f) as u8) } #[doc = "Bits 6:7 - Command response type bits"] #[inline(always)] pub fn cmdresp(&self) -> CMDRESP_R { CMDRESP_R::new(((self.bits >> 6) & 0x03) as u8) } #[doc = "Bit 8 - Interrupt wait instead of timeout"] #[inline(always)] pub fn intwait(&self) -> INTWAIT_R { INTWAIT_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Waits for ends of data transfer"] #[inline(always)] pub fn waitdend(&self) -> WAITDEND_R { WAITDEND_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Command state machine (CSM) enable bit"] #[inline(always)] pub fn csmen(&self) -> CSMEN_R { CSMEN_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - SD I/O suspend command(SD I/O only)"] #[inline(always)] pub fn suspend(&self) -> SUSPEND_R { SUSPEND_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 12 - CMD completion signal enabled (CE-ATA only)"] #[inline(always)] pub fn encmdc(&self) -> ENCMDC_R { ENCMDC_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - No CE-ATA Interrupt (CE-ATA only)"] #[inline(always)] pub fn ninten(&self) -> NINTEN_R { NINTEN_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 14 - CE-ATA command enable(CE-ATA only)"] #[inline(always)] pub fn ataen(&self) -> ATAEN_R { ATAEN_R::new(((self.bits >> 14) & 0x01) != 0) } } impl W { #[doc = "Bits 0:5 - Command index"] #[inline(always)] pub fn cmdidx(&mut self) -> CMDIDX_W { CMDIDX_W { w: self } } #[doc = "Bits 6:7 - Command response type bits"] #[inline(always)] pub fn cmdresp(&mut self) -> CMDRESP_W { CMDRESP_W { w: self } } #[doc = "Bit 8 - Interrupt wait instead of timeout"] #[inline(always)] pub fn intwait(&mut self) -> INTWAIT_W { INTWAIT_W { w: self } } #[doc = "Bit 9 - Waits for ends of data transfer"] #[inline(always)] pub fn waitdend(&mut self) -> WAITDEND_W { WAITDEND_W { w: self } } #[doc = "Bit 10 - Command state machine (CSM) enable bit"] #[inline(always)] pub fn csmen(&mut self) -> CSMEN_W { CSMEN_W { w: self } } #[doc = "Bit 11 - SD I/O suspend command(SD I/O only)"] #[inline(always)] pub fn suspend(&mut self) -> SUSPEND_W { SUSPEND_W { w: self } } #[doc = "Bit 12 - CMD completion signal enabled (CE-ATA only)"] #[inline(always)] pub fn encmdc(&mut self) -> ENCMDC_W { ENCMDC_W { w: self } } #[doc = "Bit 13 - No CE-ATA Interrupt (CE-ATA only)"] #[inline(always)] pub fn ninten(&mut self) -> NINTEN_W { NINTEN_W { w: self } } #[doc = "Bit 14 - CE-ATA command enable(CE-ATA only)"] #[inline(always)] pub fn ataen(&mut self) -> ATAEN_W { ATAEN_W { w: self } } }
use std::alloc::{alloc, dealloc, Layout}; use std::ffi::OsString; use std::io::{stderr, Write}; use std::mem::MaybeUninit; use std::os::windows::ffi::OsStringExt; use std::ptr::null_mut; use std::sync::Mutex; use std::thread::sleep; use std::time::Duration; use std::{mem, ptr, slice}; use windows::core::PSTR; use windows::Win32::Media::Audio::{ midiInAddBuffer, midiInClose, midiInGetDevCapsW, midiInGetNumDevs, midiInMessage, midiInOpen, midiInPrepareHeader, midiInReset, midiInStart, midiInStop, midiInUnprepareHeader, midiOutClose, midiOutGetDevCapsW, midiOutGetNumDevs, midiOutLongMsg, midiOutMessage, midiOutOpen, midiOutPrepareHeader, midiOutReset, midiOutShortMsg, midiOutUnprepareHeader, CALLBACK_FUNCTION, CALLBACK_NULL, HMIDIIN, HMIDIOUT, MIDIERR_NOTREADY, MIDIERR_STILLPLAYING, MIDIHDR, MIDIINCAPSW, MIDIOUTCAPSW, }; use windows::Win32::Media::Multimedia::{DRV_QUERYDEVICEINTERFACE, DRV_QUERYDEVICEINTERFACESIZE}; use windows::Win32::Media::{MMSYSERR_ALLOCATED, MMSYSERR_BADDEVICEID, MMSYSERR_NOERROR}; #[allow(non_camel_case_types)] type ULONG = u32; #[allow(non_camel_case_types)] type UINT = u32; #[allow(non_camel_case_types)] type DWORD = u32; #[allow(non_camel_case_types)] type DWORD_PTR = usize; use errors::*; use {Ignore, MidiMessage}; mod handler; const MIDIR_SYSEX_BUFFER_SIZE: usize = 1024; const MIDIR_SYSEX_BUFFER_COUNT: usize = 4; // helper for string conversion fn from_wide_ptr(ptr: *const u16, max_len: usize) -> OsString { unsafe { assert!(!ptr.is_null()); let len = (0..max_len as isize) .position(|i| *ptr.offset(i) == 0) .unwrap(); let slice = slice::from_raw_parts(ptr, len); OsString::from_wide(slice) } } #[derive(Debug)] pub struct MidiInput { ignore_flags: Ignore, } #[derive(Clone)] pub struct MidiInputPort { name: String, interface_id: Box<[u16]>, } impl PartialEq for MidiInputPort { fn eq(&self, other: &Self) -> bool { self.interface_id == other.interface_id } } pub struct MidiInputConnection<T> { handler_data: Box<HandlerData<T>>, } impl MidiInputPort { fn count() -> UINT { unsafe { midiInGetNumDevs() } } fn interface_id(port_number: UINT) -> Result<Box<[u16]>, PortInfoError> { let mut buffer_size: ULONG = 0; let result = unsafe { midiInMessage( HMIDIIN(port_number as isize), DRV_QUERYDEVICEINTERFACESIZE, &mut buffer_size as *mut _ as DWORD_PTR, 0, ) }; if result == MMSYSERR_BADDEVICEID { return Err(PortInfoError::PortNumberOutOfRange); } else if result != MMSYSERR_NOERROR { return Err(PortInfoError::CannotRetrievePortName); } let mut buffer = Vec::<u16>::with_capacity(buffer_size as usize / 2); unsafe { let result = midiInMessage( HMIDIIN(port_number as isize), DRV_QUERYDEVICEINTERFACE, buffer.as_mut_ptr() as usize, buffer_size as DWORD_PTR, ); if result == MMSYSERR_BADDEVICEID { return Err(PortInfoError::PortNumberOutOfRange); } else if result != MMSYSERR_NOERROR { return Err(PortInfoError::CannotRetrievePortName); } buffer.set_len(buffer_size as usize / 2); } //println!("{}", from_wide_ptr(buffer.as_ptr(), buffer.len()).to_string_lossy().into_owned()); Ok(buffer.into_boxed_slice()) } fn name(port_number: UINT) -> Result<String, PortInfoError> { let mut device_caps: MaybeUninit<MIDIINCAPSW> = MaybeUninit::uninit(); let result = unsafe { midiInGetDevCapsW( port_number as usize, device_caps.as_mut_ptr(), mem::size_of::<MIDIINCAPSW>() as u32, ) }; if result == MMSYSERR_BADDEVICEID { return Err(PortInfoError::PortNumberOutOfRange); } else if result != MMSYSERR_NOERROR { return Err(PortInfoError::CannotRetrievePortName); } let device_caps = unsafe { device_caps.assume_init() }; let pname_ptr: *const [u16; 32] = std::ptr::addr_of!(device_caps.szPname); let output = from_wide_ptr(pname_ptr as *const _, 32) .to_string_lossy() .into_owned(); Ok(output) } fn from_port_number(port_number: UINT) -> Result<Self, PortInfoError> { Ok(MidiInputPort { name: Self::name(port_number)?, interface_id: Self::interface_id(port_number)?, }) } fn current_port_number(&self) -> Option<UINT> { for i in 0..Self::count() { if let Ok(name) = Self::name(i) { if name != self.name { continue; } if let Ok(id) = Self::interface_id(i) { if id == self.interface_id { return Some(i); } } } } None } } struct SysexBuffer([*mut MIDIHDR; MIDIR_SYSEX_BUFFER_COUNT]); unsafe impl Send for SysexBuffer {} struct MidiInHandle(Mutex<HMIDIIN>); unsafe impl Send for MidiInHandle {} /// This is all the data that is stored on the heap as long as a connection /// is opened and passed to the callback handler. /// /// It is important that `user_data` is the last field to not influence /// offsets after monomorphization. struct HandlerData<T> { message: MidiMessage, sysex_buffer: SysexBuffer, in_handle: Option<MidiInHandle>, ignore_flags: Ignore, callback: Box<dyn FnMut(u64, &[u8], &mut T) + Send + 'static>, user_data: Option<T>, } impl MidiInput { pub fn new(_client_name: &str) -> Result<Self, InitError> { Ok(MidiInput { ignore_flags: Ignore::None, }) } pub fn ignore(&mut self, flags: Ignore) { self.ignore_flags = flags; } pub(crate) fn ports_internal(&self) -> Vec<::common::MidiInputPort> { let count = MidiInputPort::count(); let mut result = Vec::with_capacity(count as usize); for i in 0..count { let port = match MidiInputPort::from_port_number(i) { Ok(p) => p, Err(_) => continue, }; result.push(::common::MidiInputPort { imp: port }); } result } pub fn port_count(&self) -> usize { MidiInputPort::count() as usize } pub fn port_name(&self, port: &MidiInputPort) -> Result<String, PortInfoError> { Ok(port.name.clone()) } pub fn connect<F, T: Send>( self, port: &MidiInputPort, _port_name: &str, callback: F, data: T, ) -> Result<MidiInputConnection<T>, ConnectError<MidiInput>> where F: FnMut(u64, &[u8], &mut T) + Send + 'static, { let port_number = match port.current_port_number() { Some(p) => p, None => return Err(ConnectError::new(ConnectErrorKind::InvalidPort, self)), }; let mut handler_data = Box::new(HandlerData { message: MidiMessage::new(), sysex_buffer: SysexBuffer([null_mut(); MIDIR_SYSEX_BUFFER_COUNT]), in_handle: None, ignore_flags: self.ignore_flags, callback: Box::new(callback), user_data: Some(data), }); let mut in_handle: MaybeUninit<HMIDIIN> = MaybeUninit::uninit(); let handler_data_ptr: *mut HandlerData<T> = &mut *handler_data; let result = unsafe { midiInOpen( in_handle.as_mut_ptr(), port_number as UINT, handler::handle_input::<T> as DWORD_PTR, handler_data_ptr as DWORD_PTR, CALLBACK_FUNCTION, ) }; if result == MMSYSERR_ALLOCATED { return Err(ConnectError::other( "could not create Windows MM MIDI input port (MMSYSERR_ALLOCATED)", self, )); } else if result != MMSYSERR_NOERROR { return Err(ConnectError::other( "could not create Windows MM MIDI input port", self, )); } let in_handle = unsafe { in_handle.assume_init() }; // Allocate and init the sysex buffers. for i in 0..MIDIR_SYSEX_BUFFER_COUNT { handler_data.sysex_buffer.0[i] = Box::into_raw(Box::new(MIDIHDR { lpData: PSTR(unsafe { alloc(Layout::from_size_align_unchecked( MIDIR_SYSEX_BUFFER_SIZE, 1, )) }), dwBufferLength: MIDIR_SYSEX_BUFFER_SIZE as u32, dwBytesRecorded: 0, dwUser: i as DWORD_PTR, // We use the dwUser parameter as buffer indicator dwFlags: 0, lpNext: ptr::null_mut(), reserved: 0, dwOffset: 0, dwReserved: unsafe { mem::zeroed() }, })); // TODO: are those buffers ever freed if an error occurs here (altough these calls probably only fail with out-of-memory)? // TODO: close port in case of error? let result = unsafe { midiInPrepareHeader( in_handle, handler_data.sysex_buffer.0[i], mem::size_of::<MIDIHDR>() as u32, ) }; if result != MMSYSERR_NOERROR { return Err(ConnectError::other( "could not initialize Windows MM MIDI input port (PrepareHeader)", self, )); } // Register the buffer. let result = unsafe { midiInAddBuffer( in_handle, handler_data.sysex_buffer.0[i], mem::size_of::<MIDIHDR>() as u32, ) }; if result != MMSYSERR_NOERROR { return Err(ConnectError::other( "could not initialize Windows MM MIDI input port (AddBuffer)", self, )); } } handler_data.in_handle = Some(MidiInHandle(Mutex::new(in_handle))); // We can safely access (a copy of) `in_handle` here, although // it has been copied into the Mutex already, because the callback // has not been called yet. let result = unsafe { midiInStart(in_handle) }; if result != MMSYSERR_NOERROR { unsafe { midiInClose(in_handle) }; return Err(ConnectError::other( "could not start Windows MM MIDI input port", self, )); } Ok(MidiInputConnection { handler_data: handler_data, }) } } impl<T> MidiInputConnection<T> { pub fn close(mut self) -> (MidiInput, T) { self.close_internal(); ( MidiInput { ignore_flags: self.handler_data.ignore_flags, }, self.handler_data.user_data.take().unwrap(), ) } fn close_internal(&mut self) { // for information about his lock, see https://groups.google.com/forum/#!topic/mididev/6OUjHutMpEo let in_handle_lock = self .handler_data .in_handle .as_ref() .unwrap() .0 .lock() .unwrap(); // TODO: Call both reset and stop here? The difference seems to be that // reset "returns all pending input buffers to the callback function" unsafe { midiInReset(*in_handle_lock); midiInStop(*in_handle_lock); } for i in 0..MIDIR_SYSEX_BUFFER_COUNT { let result; unsafe { result = midiInUnprepareHeader( *in_handle_lock, self.handler_data.sysex_buffer.0[i], mem::size_of::<MIDIHDR>() as u32, ); dealloc( (*self.handler_data.sysex_buffer.0[i]).lpData.0 as *mut _, Layout::from_size_align_unchecked(MIDIR_SYSEX_BUFFER_SIZE, 1), ); // recreate the Box so that it will be dropped/deallocated at the end of this scope let _ = Box::from_raw(self.handler_data.sysex_buffer.0[i]); } if result != MMSYSERR_NOERROR { let _ = writeln!(stderr(), "Warning: Ignoring error shutting down Windows MM input port (UnprepareHeader)."); } } unsafe { midiInClose(*in_handle_lock) }; } } impl<T> Drop for MidiInputConnection<T> { fn drop(&mut self) { // If user_data has been emptied, we know that we already have closed the connection if self.handler_data.user_data.is_some() { self.close_internal() } } } #[derive(Debug)] pub struct MidiOutput; #[derive(Clone)] pub struct MidiOutputPort { name: String, interface_id: Box<[u16]>, } impl PartialEq for MidiOutputPort { fn eq(&self, other: &Self) -> bool { self.interface_id == other.interface_id } } pub struct MidiOutputConnection { out_handle: HMIDIOUT, } unsafe impl Send for MidiOutputConnection {} impl MidiOutputPort { fn count() -> UINT { unsafe { midiOutGetNumDevs() } } fn interface_id(port_number: UINT) -> Result<Box<[u16]>, PortInfoError> { let mut buffer_size: ULONG = 0; let result = unsafe { midiOutMessage( HMIDIOUT(port_number as isize), DRV_QUERYDEVICEINTERFACESIZE, &mut buffer_size as *mut _ as DWORD_PTR, 0, ) }; if result == MMSYSERR_BADDEVICEID { return Err(PortInfoError::PortNumberOutOfRange); } else if result != MMSYSERR_NOERROR { return Err(PortInfoError::CannotRetrievePortName); } let mut buffer = Vec::<u16>::with_capacity(buffer_size as usize / 2); unsafe { let result = midiOutMessage( HMIDIOUT(port_number as isize), DRV_QUERYDEVICEINTERFACE, buffer.as_mut_ptr() as DWORD_PTR, buffer_size as DWORD_PTR, ); if result == MMSYSERR_BADDEVICEID { return Err(PortInfoError::PortNumberOutOfRange); } else if result != MMSYSERR_NOERROR { return Err(PortInfoError::CannotRetrievePortName); } buffer.set_len(buffer_size as usize / 2); } //println!("{}", from_wide_ptr(buffer.as_ptr(), buffer.len()).to_string_lossy().into_owned()); Ok(buffer.into_boxed_slice()) } fn name(port_number: UINT) -> Result<String, PortInfoError> { let mut device_caps: MaybeUninit<MIDIOUTCAPSW> = MaybeUninit::uninit(); let result = unsafe { midiOutGetDevCapsW( port_number as usize, device_caps.as_mut_ptr(), mem::size_of::<MIDIOUTCAPSW>() as u32, ) }; if result == MMSYSERR_BADDEVICEID { return Err(PortInfoError::PortNumberOutOfRange); } else if result != MMSYSERR_NOERROR { return Err(PortInfoError::CannotRetrievePortName); } let device_caps = unsafe { device_caps.assume_init() }; let pname_ptr: *const [u16; 32] = std::ptr::addr_of!(device_caps.szPname); let output = from_wide_ptr(pname_ptr as *const _, 32) .to_string_lossy() .into_owned(); Ok(output) } fn from_port_number(port_number: UINT) -> Result<Self, PortInfoError> { Ok(MidiOutputPort { name: Self::name(port_number)?, interface_id: Self::interface_id(port_number)?, }) } fn current_port_number(&self) -> Option<UINT> { for i in 0..Self::count() { if let Ok(name) = Self::name(i) { if name != self.name { continue; } if let Ok(id) = Self::interface_id(i) { if id == self.interface_id { return Some(i); } } } } None } } impl MidiOutput { pub fn new(_client_name: &str) -> Result<Self, InitError> { Ok(MidiOutput) } pub(crate) fn ports_internal(&self) -> Vec<::common::MidiOutputPort> { let count = MidiOutputPort::count(); let mut result = Vec::with_capacity(count as usize); for i in 0..count { let port = match MidiOutputPort::from_port_number(i) { Ok(p) => p, Err(_) => continue, }; result.push(::common::MidiOutputPort { imp: port }); } result } pub fn port_count(&self) -> usize { MidiOutputPort::count() as usize } pub fn port_name(&self, port: &MidiOutputPort) -> Result<String, PortInfoError> { Ok(port.name.clone()) } pub fn connect( self, port: &MidiOutputPort, _port_name: &str, ) -> Result<MidiOutputConnection, ConnectError<MidiOutput>> { let port_number = match port.current_port_number() { Some(p) => p, None => return Err(ConnectError::new(ConnectErrorKind::InvalidPort, self)), }; let mut out_handle: MaybeUninit<HMIDIOUT> = MaybeUninit::uninit(); let result = unsafe { midiOutOpen( out_handle.as_mut_ptr(), port_number as UINT, 0, 0, CALLBACK_NULL, ) }; if result == MMSYSERR_ALLOCATED { return Err(ConnectError::other( "could not create Windows MM MIDI output port (MMSYSERR_ALLOCATED)", self, )); } else if result != MMSYSERR_NOERROR { return Err(ConnectError::other( "could not create Windows MM MIDI output port", self, )); } Ok(MidiOutputConnection { out_handle: unsafe { out_handle.assume_init() }, }) } } impl MidiOutputConnection { pub fn close(self) -> MidiOutput { // The actual closing is done by the implementation of Drop MidiOutput // In this API this is a noop } pub fn send(&mut self, message: &[u8]) -> Result<(), SendError> { let nbytes = message.len(); if nbytes == 0 { return Err(SendError::InvalidData( "message to be sent must not be empty", )); } if message[0] == 0xF0 { // Sysex message // Allocate buffer for sysex data and copy message let mut buffer = message.to_vec(); // Create and prepare MIDIHDR structure. let mut sysex = MIDIHDR { lpData: PSTR(buffer.as_mut_ptr()), dwBufferLength: nbytes as u32, dwBytesRecorded: 0, dwUser: 0, dwFlags: 0, lpNext: ptr::null_mut(), reserved: 0, dwOffset: 0, dwReserved: unsafe { mem::zeroed() }, }; let result = unsafe { midiOutPrepareHeader( self.out_handle, &mut sysex, mem::size_of::<MIDIHDR>() as u32, ) }; if result != MMSYSERR_NOERROR { return Err(SendError::Other( "preparation for sending sysex message failed (OutPrepareHeader)", )); } // Send the message. loop { let result = unsafe { midiOutLongMsg( self.out_handle, &mut sysex, mem::size_of::<MIDIHDR>() as u32, ) }; if result == MIDIERR_NOTREADY { sleep(Duration::from_millis(1)); continue; } else { if result != MMSYSERR_NOERROR { return Err(SendError::Other("sending sysex message failed")); } break; } } loop { let result = unsafe { midiOutUnprepareHeader( self.out_handle, &mut sysex, mem::size_of::<MIDIHDR>() as u32, ) }; if result == MIDIERR_STILLPLAYING { sleep(Duration::from_millis(1)); continue; } else { break; } } } else { // Channel or system message. // Make sure the message size isn't too big. if nbytes > 3 { return Err(SendError::InvalidData( "non-sysex message must not be longer than 3 bytes", )); } // Pack MIDI bytes into double word. let packet: u32 = 0; let ptr = &packet as *const u32 as *mut u8; for i in 0..nbytes { unsafe { *ptr.offset(i as isize) = message[i] }; } // Send the message immediately. loop { let result = unsafe { midiOutShortMsg(self.out_handle, packet) }; if result == MIDIERR_NOTREADY { sleep(Duration::from_millis(1)); continue; } else { if result != MMSYSERR_NOERROR { return Err(SendError::Other("sending non-sysex message failed")); } break; } } } Ok(()) } } impl Drop for MidiOutputConnection { fn drop(&mut self) { unsafe { midiOutReset(self.out_handle); midiOutClose(self.out_handle); } } }
fn main() { { let mut xx = String::from("hello"); // 注意必须是可变变量 println!("{:?}", xx); let some = &mut xx; // 赋值给变量一个道理 some.push_str(" god"); println!("{:?}", xx); } }
use actix_web::{delete, get, post, put, web, HttpResponse, Responder}; use serde_json::json; use crate::error_handler::CustomError; use crate::dogs::{Dog, Dogs}; #[get("/dogs")] async fn find_all() -> Result<HttpResponse, CustomError> { let dogs = Dogs::find_all()?; Ok(HttpResponse::Ok().json(dogs)) } #[get("/dogs/{id}")] async fn find(id: web::Path<i32>) -> Result<HttpResponse, CustomError> { let dog = Dogs::find(id.into_inner())?; Ok(HttpResponse::Ok().json(dog)) } #[post("/dogs")] async fn create(dog: web::Json<Dog>) -> Result<HttpResponse, CustomError> { let dog = Dogs::create(dog.into_inner())?; Ok(HttpResponse::Ok().json(dog)) } #[put("/dogs/{id}")] async fn update( id: web::Path<i32>, dog: web::Json<Dog>, ) -> Result<HttpResponse, CustomError> { let dog = Dogs::update(id.into_inner(), dog.into_inner())?; Ok(HttpResponse::Ok().json(dog)) } #[delete("/dogs/{id}")] async fn delete(id: web::Path<i32>) -> Result<HttpResponse, CustomError> { let deleted_dog = Dogs::delete(id.into_inner())?; Ok(HttpResponse::Ok().json(json!({ "deleted": deleted_dog }))) } pub fn init_routes(comfig: &mut web::ServiceConfig) { comfig.service(find_all); comfig.service(find); comfig.service(create); comfig.service(update); comfig.service(delete); }
use rand::rngs::ThreadRng; use rand::Rng; use std::rc::Rc; use crate::material::{dielectric::Dielectric, lambertian::Lambertian, metal::Metal, Material}; use crate::shape::{moving_sphere::MovingSphere, shape_list::ShapeList, sphere::Sphere}; use crate::texture::checkers::Checkers; use crate::vec3::{Color, Point3, Vec3}; pub fn build(rng: &mut ThreadRng) -> ShapeList { let mut world = ShapeList::default(); let ground: Rc<dyn Material> = Rc::new(Lambertian::new(Rc::new(Checkers::from_colors( Color::new(0.2, 0.3, 0.1), Color::new(0.9, 0.9, 0.9), )))); world.add(Rc::new(Sphere::new( Point3::new(0.0, -1000.0, 0.0), 1000.0, ground, ))); for a in -11..11 { for b in -11..11 { let center = Point3::new( a as f64 + 0.9 * rng.gen::<f64>(), 0.2, b as f64 + 0.9 * rng.gen::<f64>(), ); if (center - Point3::new(4.0, 0.2, 0.0)).norm() > 0.9 { let choose_mat = rng.gen::<f64>(); if choose_mat < 0.8 { let color = Color::rand(rng) * Color::rand(rng); let material: Rc<dyn Material> = Rc::new(Lambertian::from_color(color)); let center2 = center + Vec3::new(0.0, 0.5 * rng.gen::<f64>(), 0.0); world.add(Rc::new(MovingSphere::new( center, center2, 0.0, 1.0, 0.2, material, ))); } else if choose_mat < 0.95 { let color = Color::rand_between(0.5, 1.0, rng); let fuzz = 0.5 * rng.gen::<f64>(); let material: Rc<dyn Material> = Rc::new(Metal::new(color, fuzz)); world.add(Rc::new(Sphere::new(center, 0.2, material))); } else { let material: Rc<dyn Material> = Rc::new(Dielectric::new(1.5)); world.add(Rc::new(Sphere::new(center, 0.2, material))); } } } } let material: Rc<dyn Material> = Rc::new(Dielectric::new(1.5)); world.add(Rc::new(Sphere::new( Point3::new(0.0, 1.0, 0.0), 1.0, material, ))); let material: Rc<dyn Material> = Rc::new(Lambertian::from_color(Color::new(0.4, 0.2, 1.0))); world.add(Rc::new(Sphere::new( Point3::new(-4.0, 1.0, 0.0), 1.0, material, ))); let material: Rc<dyn Material> = Rc::new(Metal::new(Color::new(0.7, 0.6, 0.5), 0.0)); world.add(Rc::new(Sphere::new( Point3::new(4.0, 1.0, 0.0), 1.0, material, ))); world }
//! //! This module gives a handler for the page fault exception. //! use super::InterruptStackFrame; bitflags! { flags PageFaultErrorCode: usize { const PROTECTION_VIOLATION = 1 << 0, const WRITE = 1 << 1, const USER_MODE = 1 << 2, const MALFORMED_TABLE = 1 << 3, const INSTRUCTION_FETCH = 1 << 4, } } #[allow(unused_variables)] pub extern "C" fn page_fault_handler(esf: *const InterruptStackFrame, e: usize) -> ! { unsafe { use ::arch::common::control_registers::cr2; panic_args!("Page fault (Error code: 0b{:b}) \ \n\t\tWhile accessing 0x{:x}\ \n\t\tOperation: {:#?}", e, cr2(), PageFaultErrorCode::from_bits(e).unwrap()); } }
mod input; mod log; pub use log::Level; use crossterm::cursor::MoveTo; use crossterm::event::{self, Event as TermEvent, EventStream, KeyCode, KeyEvent, KeyModifiers}; use crossterm::style::{Color, Print, PrintStyledContent, ResetColor, SetForegroundColor, Stylize}; use crossterm::terminal::{ self, Clear, ClearType, DisableLineWrap, EnterAlternateScreen, LeaveAlternateScreen, }; use futures::stream::StreamExt; use input::Input; use log::Log; use std::collections::VecDeque; use std::io::{self, Error, Stdout}; pub struct Screen { stdout: Stdout, stream: EventStream, height: u16, event: Option<TermEvent>, log: Log, input: Input, } impl Screen { pub fn new() -> Result<Self, Error> { // Enter alternate screen so that whatever state the users shell was in // will not be trashed. This is what vim does, for example. let mut stdout = io::stdout(); crossterm::execute!(stdout, EnterAlternateScreen)?; crossterm::execute!(stdout, DisableLineWrap)?; let (width, height) = terminal::size()?; terminal::enable_raw_mode()?; Ok(Self { stdout, stream: EventStream::new(), height, event: Some(TermEvent::Resize(width, height)), log: Log::new(), input: Input::new(), }) } pub fn log(&mut self, level: Level, contents: String) { self.log.log(level, contents); } pub async fn process(&mut self) -> Result<Option<Event>, Error> { let event = match self.event.take() { Some(event) => event, None => self.stream.next().await.unwrap()?, }; let event = match event { TermEvent::Key(key) => match key.code { KeyCode::Char('c' | 'C') if key.modifiers.contains(KeyModifiers::CONTROL) => { Some(Event::Quit) } KeyCode::Char(c) => { self.input.input(c); None } KeyCode::Backspace => { self.input.erase(); None } KeyCode::End => { self.input.last_char(); None } KeyCode::Home => { self.input.first_char(); None } KeyCode::Enter => Some(Event::Input(self.input.enter())), KeyCode::Left => { self.input.prev_char(); None } KeyCode::Right => { self.input.next_char(); None } KeyCode::Up => { self.input.prev_history(); None } KeyCode::Down => { self.input.next_history(); None } _ => None, }, TermEvent::Mouse(_) => None, TermEvent::Resize(0..=1, _) | TermEvent::Resize(_, 0..=1) => Some(Event::Quit), TermEvent::Resize(_, height) => { self.height = height; None } }; Ok(event) } pub fn render(&mut self) -> Result<(), Error> { self.log.render(&mut self.stdout, self.height)?; self.input.render(&mut self.stdout, self.height)?; crossterm::execute!(&mut self.stdout)?; Ok(()) } pub fn close(&mut self) -> Result<(), Error> { terminal::disable_raw_mode()?; crossterm::execute!(self.stdout, LeaveAlternateScreen)?; Ok(()) } } pub enum Event { Input(String), Quit, }
//! A [Latin square](https://en.wikipedia.org/wiki/Latin_square) is a //! n × n array filled with n different symbols, each occurring exactly once in //! each row and exactly once in each column. use crate::ExactCover; use std::collections::HashSet; /// Instance of a Latin square puzzle. #[derive(Debug)] pub struct LatinSquare { /// The list of possible positions + values that could solve the Latin /// square puzzle. pub possibilities: Vec<Possibility>, /// The list of constraints that must be satisfied for this Latin square /// puzzle. pub constraints: Vec<Constraint>, } impl LatinSquare { /// Create a new Latin square puzzle. /// /// The puzzle has dimensions `side_length` × `side_length` and the given /// list of filled values. pub fn new(side_length: usize, filled_values: impl IntoIterator<Item = Possibility>) -> Self { let filled_values: Vec<_> = filled_values .into_iter() .inspect(|poss| { debug_assert!( 0 < poss.value && poss.value <= side_length, "Symbol values should be in range (1..=side_length)" ) }) .collect(); let satisfied: HashSet<_> = filled_values .iter() .copied() .flat_map(Possibility::satisfied_constraints) .collect(); let filled_coordinates: HashSet<_> = filled_values .iter() .map(|poss| (poss.row, poss.column)) .collect(); let possibilities: Vec<_> = Possibility::all(side_length) .filter(|poss| !filled_coordinates.contains(&(poss.row, poss.column))) .collect(); let constraints = Constraint::all(side_length) .filter(|cons| !satisfied.contains(cons)) .collect(); Self { possibilities, constraints, } } } impl ExactCover for LatinSquare { type Constraint = Constraint; type Possibility = Possibility; fn satisfies(&self, poss: &Self::Possibility, cons: &Self::Constraint) -> bool { poss.satisfies(cons) } fn is_optional(&self, _cons: &Self::Constraint) -> bool { false } fn possibilities(&self) -> &[Self::Possibility] { &self.possibilities } fn constraints(&self) -> &[Self::Constraint] { &self.constraints } } /// A position and value for a box inside of a Latin square puzzle. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct Possibility { /// The row position of the box. /// /// The values ranges from 0 to `side_length - 1`. pub row: usize, /// The column position of the box. /// /// The values ranges from 0 to `side_length - 1`. pub column: usize, /// The value present inside of the box. /// /// The values ranges from 1 to `side_length`. pub value: usize, } impl Possibility { /// Return an iterator over all possible `Possibility`s for the given /// `side_length`. pub fn all(side_length: usize) -> impl Iterator<Item = Self> { crate::util::three_combination_iter([side_length, side_length, side_length + 1], [0, 0, 1]) .map(|[column, row, value]| Possibility { row, column, value }) } /// Return an iterator over all `Constraint`s that are satisfied by this /// `Possibility`. pub fn satisfied_constraints(self) -> impl Iterator<Item = Constraint> { [ Constraint::RowNumber { row: self.row, value: self.value, }, Constraint::ColumnNumber { column: self.column, value: self.value, }, Constraint::RowColumn { row: self.row, column: self.column, }, ] .into_iter() } /// Return true if this `Possibility` satisfies the given `Constraint`. pub fn satisfies(&self, constraint: &Constraint) -> bool { use Constraint::*; match constraint { RowNumber { row, value } => self.row == *row && self.value == *value, ColumnNumber { column, value } => self.column == *column && self.value == *value, RowColumn { row, column } => self.row == *row && self.column == *column, } } } /// A condition which must be satisfied in order to solve a Latin square puzzle. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum Constraint { /// A condition that each row should only have a single instance of a /// numeric value. RowNumber { /// The row index row: usize, /// The unique numeric value value: usize, }, /// A condition that each column should only have a single instance of a /// numeric value. ColumnNumber { /// The column index column: usize, /// The unique numeric value value: usize, }, /// A condition that each row, column pair should exist exactly once. RowColumn { /// The row index row: usize, /// The column index column: usize, }, } impl Constraint { /// Return an iterator over all possibly `Constraint`s for the given /// `side_length`. pub fn all(side_length: usize) -> impl Iterator<Item = Constraint> { let row_number_it = crate::util::two_combination_iter([side_length, side_length + 1], [0, 1]) .map(|[row, value]| Constraint::RowNumber { row, value }); let column_number_it = crate::util::two_combination_iter([side_length, side_length + 1], [0, 1]) .map(|[column, value]| Constraint::ColumnNumber { column, value }); let row_column_it = crate::util::two_combination_iter([side_length, side_length], [0, 0]) .map(|[row, column]| Constraint::RowColumn { row, column }); row_number_it.chain(column_number_it).chain(row_column_it) } } #[cfg(test)] pub(crate) mod tests { use super::*; pub(crate) fn p(row: usize, column: usize, value: usize) -> Possibility { Possibility { row, column, value } } fn c_row(row: usize, value: usize) -> Constraint { Constraint::RowNumber { row, value } } fn c_col(column: usize, value: usize) -> Constraint { Constraint::ColumnNumber { column, value } } fn c_row_col(row: usize, column: usize) -> Constraint { Constraint::RowColumn { row, column } } #[test] fn check_all_possibilities() { let some_possibilities: Vec<_> = Possibility::all(2).collect(); assert_eq!( &some_possibilities, &[ p(0, 0, 1), p(0, 0, 2), p(1, 0, 1), p(1, 0, 2), p(0, 1, 1), p(0, 1, 2), p(1, 1, 1), p(1, 1, 2), ] ); } #[test] fn check_generated_possibilities_constraints() { let mut square = LatinSquare::new(2, vec![p(0, 0, 1), p(0, 1, 2)]); square.possibilities.sort(); assert_eq!( square.possibilities, vec![p(1, 0, 1), p(1, 0, 2), p(1, 1, 1), p(1, 1, 2)] ); square.constraints.sort(); assert_eq!( square.constraints, vec![ c_row(1, 1), c_row(1, 2), c_col(0, 2), c_col(1, 1), c_row_col(1, 0), c_row_col(1, 1) ] ); } #[test] fn solve_small_latin_square() { let square = LatinSquare::new(2, vec![p(0, 0, 1), p(0, 1, 2)]); let mut solver = square.solver(); let solutions = solver.all_solutions(); assert_eq!(solutions.len(), 1); assert_eq!(solutions[0], vec![&p(1, 0, 2), &p(1, 1, 1)]); } #[test] fn solve_multi_solution_latin_square() { let square = LatinSquare::new(2, vec![]); let mut solver = square.solver(); let solutions = solver.all_solutions(); assert_eq!(solutions.len(), 2); assert_eq!( solutions[0], vec![&p(0, 0, 1), &p(0, 1, 2), &p(1, 1, 1), &p(1, 0, 2)] ); assert_eq!( solutions[1], vec![&p(0, 1, 1), &p(0, 0, 2), &p(1, 0, 1), &p(1, 1, 2)] ); } #[test] fn solve_impossible_latin_square() { let square = LatinSquare::new(2, vec![p(0, 0, 1), p(0, 1, 1)]); let mut solver = square.solver(); let solutions = solver.all_solutions(); assert_eq!(solutions.len(), 0); } }
use super::{Addr, MemoryArena}; use common::is_power_of_2; use std::mem; const MAX_BLOCK_LEN: u32 = 1u32 << 15; const FIRST_BLOCK: u32 = 4u32; #[inline] pub fn jump_needed(len: u32) -> Option<usize> { match len { 0...3 => None, 4...MAX_BLOCK_LEN => { if is_power_of_2(len as usize) { Some(len as usize) } else { None } } n => { if n % MAX_BLOCK_LEN == 0 { Some(MAX_BLOCK_LEN as usize) } else { None } } } } /// An exponential unrolled link. /// /// The use case is as follows. Tantivy's indexer conceptually acts like a /// `HashMap<Term, Vec<u32>>`. As we come accross a given term in document /// `D`, we lookup the term in the map and append the document id to its vector. /// /// The vector is then only read when it is serialized. /// /// The `ExpUnrolledLinkedList` offers a more efficient solution to this /// problem. /// /// It combines the idea of the unrolled linked list and tries to address the /// problem of selecting an adequate block size using a strategy similar to /// that of the `Vec` amortized resize strategy. /// /// Data is stored in a linked list of blocks. The first block has a size of `4` /// and each block has a length of twice that of the previous block up to /// `MAX_BLOCK_LEN = 32768`. /// /// This strategy is a good trade off to handle numerous very rare terms /// and avoid wasting half of the memory for very frequent terms. #[derive(Debug, Clone, Copy)] pub struct ExpUnrolledLinkedList { len: u32, head: Addr, tail: Addr, } impl ExpUnrolledLinkedList { pub fn new(heap: &mut MemoryArena) -> ExpUnrolledLinkedList { let addr = heap.allocate_space((FIRST_BLOCK as usize) * mem::size_of::<u32>()); ExpUnrolledLinkedList { len: 0u32, head: addr, tail: addr, } } pub fn iter<'a>(&self, heap: &'a MemoryArena) -> ExpUnrolledLinkedListIterator<'a> { ExpUnrolledLinkedListIterator { heap, addr: self.head, len: self.len, consumed: 0, } } /// Appends a new element to the current stack. /// /// If the current block end is reached, a new block is allocated. pub fn push(&mut self, val: u32, heap: &mut MemoryArena) { self.len += 1; if let Some(new_block_len) = jump_needed(self.len) { // We need to allocate another block. // We also allocate an extra `u32` to store the pointer // to the future next block. let new_block_size: usize = (new_block_len + 1) * mem::size_of::<u32>(); let new_block_addr: Addr = heap.allocate_space(new_block_size); unsafe { // logic heap.write(self.tail, new_block_addr) }; self.tail = new_block_addr; } unsafe { // logic heap.write(self.tail, val); self.tail = self.tail.offset(mem::size_of::<u32>() as u32); } } } pub struct ExpUnrolledLinkedListIterator<'a> { heap: &'a MemoryArena, addr: Addr, len: u32, consumed: u32, } impl<'a> Iterator for ExpUnrolledLinkedListIterator<'a> { type Item = u32; fn next(&mut self) -> Option<u32> { if self.consumed == self.len { None } else { self.consumed += 1; let addr: Addr = if jump_needed(self.consumed).is_some() { unsafe { // logic self.heap.read(self.addr) } } else { self.addr }; self.addr = addr.offset(mem::size_of::<u32>() as u32); Some(unsafe { // logic self.heap.read(addr) }) } } } #[cfg(test)] mod tests { use super::super::MemoryArena; use super::jump_needed; use super::*; #[test] fn test_stack() { let mut heap = MemoryArena::new(); let mut stack = ExpUnrolledLinkedList::new(&mut heap); stack.push(1u32, &mut heap); stack.push(2u32, &mut heap); stack.push(4u32, &mut heap); stack.push(8u32, &mut heap); { let mut it = stack.iter(&heap); assert_eq!(it.next().unwrap(), 1u32); assert_eq!(it.next().unwrap(), 2u32); assert_eq!(it.next().unwrap(), 4u32); assert_eq!(it.next().unwrap(), 8u32); assert!(it.next().is_none()); } } #[test] fn test_jump_if_needed() { let mut block_len = 4u32; let mut i = 0; while i < 10_000_000 { assert!(jump_needed(i + block_len - 1).is_none()); assert!(jump_needed(i + block_len + 1).is_none()); assert!(jump_needed(i + block_len).is_some()); let new_block_len = jump_needed(i + block_len).unwrap(); i += block_len; block_len = new_block_len as u32; } } } #[cfg(all(test, feature = "unstable"))] mod bench { use super::ExpUnrolledLinkedList; use tantivy_memory_arena::MemoryArena; use test::Bencher; const NUM_STACK: usize = 10_000; const STACK_SIZE: u32 = 1000; #[bench] fn bench_push_vec(bench: &mut Bencher) { bench.iter(|| { let mut vecs = Vec::with_capacity(100); for _ in 0..NUM_STACK { vecs.push(Vec::new()); } for s in 0..NUM_STACK { for i in 0u32..STACK_SIZE { let t = s * 392017 % NUM_STACK; vecs[t].push(i); } } }); } #[bench] fn bench_push_stack(bench: &mut Bencher) { let heap = MemoryArena::new(); bench.iter(|| { let mut stacks = Vec::with_capacity(100); for _ in 0..NUM_STACK { let (_, stack) = heap.allocate_object::<ExpUnrolledLinkedList>(); stacks.push(stack); } for s in 0..NUM_STACK { for i in 0u32..STACK_SIZE { let t = s * 392017 % NUM_STACK; stacks[t].push(i, &heap); } } heap.clear(); }); } }
//! # Provider tests //! //! This is a dummy crate that implements tests (wycheproof) for now on the //! [RustCrypto traits](https://github.com/RustCrypto/traits) API for different //! providers. //! //! Tested providers: //! * hacspec //! * evercrypt
use std::path::{Path}; use std::fs::{File, Metadata, canonicalize}; use std::io::{BufRead, Read, Write, Result}; use std::cmp::min; use std::borrow::Cow; use std::fmt; use regex::Regex; use crate::ast::*; use crate::grammar::*; use crate::parser::*; use crate::io::*; pub trait HttpHandler { fn handle<F>(&mut self, request: &mut Request, fun: F) -> Result<()> where F: FnMut(&mut Response) -> Result<()> + Sized; } pub trait WriteTo { fn write_to(&mut self, write: &mut dyn Write) -> Result<usize>; } pub struct FileHandler<T: AsRef<Path>> { base: T, } impl<T: AsRef<Path>> FileHandler<T> { pub fn new(base: T) -> FileHandler<T> { FileHandler { base, } } pub fn get(&self, path: &str) -> Result<Response> { let full_path = canonicalize(self.base.as_ref().join(&path[1..]))?; if !full_path.starts_with(&self.base) { return Ok(Response::unauthorized().message("Not allowed outside of base")); } let file: File = File::open(&full_path)?; let metadata: Metadata = file.metadata()?; if metadata.is_dir() { return Ok(Response::not_found().message("Path denotes a directory")); } Ok(Response::ok(). content_type("text/plain".to_string()). content_length(metadata.len()). entity(MessageBody::Reader(Box::new(file)))) } } impl<T: AsRef<Path>> HttpHandler for FileHandler<T> { fn handle<F>(&mut self, request: &mut Request, mut fun: F) -> Result<()> where F: FnMut(&mut Response) -> Result<()> + Sized { fun(&mut match *request { Request { method: "GET", uri: Uri { path, .. }, .. } => { self.get(path).unwrap_or_else(|_|Response::not_found().message("Not Found")) } _ => { Response::method_not_allowed() } }) } } pub struct LogHandler<H> where H: HttpHandler { handler: H, } impl<H> LogHandler<H> where H: HttpHandler { pub fn new(handler: H) -> LogHandler<H> { LogHandler { handler, } } } impl<H> HttpHandler for LogHandler<H> where H: HttpHandler { fn handle<F>(&mut self, request: &mut Request, mut fun: F) -> Result<()> where F: FnMut(&mut Response) -> Result<()> + Sized { let r = format!("{}", request); self.handler.handle(request, |response| { println!("{}{}\n\n", r, response); fun(response) }) } } #[derive(PartialEq, Debug)] pub struct Uri<'a> { pub scheme: Option<&'a str>, pub authority: Option<&'a str>, pub path: &'a str, pub query: Option<&'a str>, pub fragment: Option<&'a str>, } impl<'a> Uri<'a> { pub fn parse(value: &'a str) -> Uri<'a> { lazy_static! { static ref RFC3986: Regex = Regex::new("^(?:([^:/?\\#]+):)?(?://([^/?\\#]*))?([^?\\#]*)(?:\\?([^\\#]*))?(?:\\#(.*))?").unwrap(); } let result = RFC3986.captures(value).unwrap(); Uri { scheme: result.get(1).map(|s|s.as_str()), authority: result.get(2).map(|s|s.as_str()), path: result.get(3).unwrap().as_str(), query: result.get(4).map(|s|s.as_str()), fragment: result.get(5).map(|s|s.as_str()), } } } impl<'a> fmt::Display for Uri<'a> { fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result { if let Some(scheme) = self.scheme { write!(format, "{}:", scheme)?; } if let Some(authority) = self.authority { write!(format, "//{}", authority)?; } format.write_str(self.path)?; if let Some(query) = self.query { write!(format, "?{}", query)?; } if let Some(fragment) = self.fragment { write!(format, "#{}", fragment)?; } Ok(()) } } #[derive(PartialEq, Debug)] pub enum Message<'a> { Request(Request<'a>), Response(Response<'a>), } impl<'a> Message<'a> { pub fn parse(slice: &'a [u8]) -> Result<(Message<'a>, &'a [u8])> { result(http_message(slice)).map(|(message, remainder)| (Message::from(message), remainder)) } pub fn read<R>(slice: &'a [u8], reader: &'a mut R) -> Result<(Message<'a>, usize)> where R: Read { result(message_head(slice)).map(move |(head, remainder)| { let head_length = slice.len() - remainder.len(); let headers = head.headers; let (body, body_read) = MessageBody::read(&headers, remainder, reader); (match head.start_line { StartLine::RequestLine(line) => Message::Request(Request::new(line.method, line.request_target, headers, body)), StartLine::StatusLine(line) => Message::Response(Response::new(line.code, line.description, headers, body)), }, head_length + body_read) }) } } impl<'a> From<HttpMessage<'a>> for Message<'a> { fn from(message: HttpMessage<'a>) -> Message<'a> { match message.start_line { StartLine::RequestLine(line) => Message::Request(Request::new(line.method, line.request_target, message.headers, message.body)), StartLine::StatusLine(line) => Message::Response(Response::new(line.code, line.description, message.headers, message.body)), } } } impl<'a> WriteTo for Message<'a> { fn write_to(&mut self, write: &mut dyn Write) -> Result<usize> { match *self { Message::Request(ref mut request) => request.write_to(write), Message::Response(ref mut response) => response.write_to(write), } } } #[derive(PartialEq, Debug)] pub struct Request<'a> { pub method: &'a str, pub uri: Uri<'a>, pub headers: Headers<'a>, pub entity: MessageBody<'a>, } impl<'a> Request<'a> { pub fn new(method: &'a str, url: &'a str, headers: Headers<'a>, entity: MessageBody<'a>) -> Request<'a> { Request { method, uri: Uri::parse(url), headers, entity } } pub fn request(method: &'a str, url: &'a str) -> Request<'a> { Request::new(method, url, Headers::new(), MessageBody::None) } pub fn parse(slice: &'a [u8]) -> Result<(Request<'a>, &'a [u8])> { result(http_message(slice)).map(|(request, remainder)| (Request::from(request), remainder)) } pub fn get(url: &'a str) -> Request<'a> { Request::request("GET", url) } pub fn post(url: &'a str) -> Request<'a> { Request::request("POST", url) } pub fn put(url: &'a str) -> Request<'a> { Request::request("PUT", url) } pub fn delete(url: &'a str) -> Request<'a> { Request::request("DELETE", url) } pub fn option(url: &'a str) -> Request<'a> { Request::request("OPTION", url) } pub fn method(mut self, method: &'a str) -> Self { self.method = method; self } pub fn header<V>(mut self, name: &'a str, value: V) -> Self where V: Into<Cow<'a, str>> { self.headers.replace(name, value); self } pub fn get_header(&self, name: &str) -> Option<&str> { self.headers.get(name) } pub fn remove_header(mut self, name: &str) -> Self { self.headers.remove(name); self } } impl<'a> From<HttpMessage<'a>> for Request<'a> { fn from(message: HttpMessage<'a>) -> Request<'a> { if let StartLine::RequestLine(line) = message.start_line { return Request::new(line.method, line.request_target, message.headers, message.body); } panic!("Can not convert HttpMessage that is a Response into a Request") } } impl<'a> fmt::Display for Request<'a> { fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result { write!(format, "{}{}\r\n{}", RequestLine { method: self.method, request_target: self.uri.to_string().as_str(), version: HttpVersion { major: 1, minor: 1 } }, self.headers, self.entity) } } impl<'a> WriteTo for Request<'a> { fn write_to(&mut self, write: &mut dyn Write) -> Result<usize> { let text = format!("{}{}\r\n", RequestLine { method: self.method, request_target: self.uri.to_string().as_str(), version: HttpVersion { major: 1, minor: 1 } }, self.headers); let head = write.write(text.as_bytes())?; let body = self.entity.write_to(write)?; Ok(head + body) } } #[derive(PartialEq, Debug)] pub struct Response<'a> { pub code: u16, pub description: &'a str, pub headers: Headers<'a>, pub entity: MessageBody<'a>, } impl<'a> Response<'a> { pub fn new(code: u16, description: &'a str, headers: Headers<'a>, entity: MessageBody<'a>) -> Response<'a> { Response { code, description, headers, entity }.build() } pub fn response(code: u16, description: &'a str) -> Response<'a> { Response::new(code, description, Headers::new(), MessageBody::None) } pub fn ok() -> Response<'a> { Response::response(200, "OK") } pub fn bad_request() -> Response<'a> { Response::response(400, "Bad Request") } pub fn unauthorized() -> Response<'a> { Response::response(401, "Unauthorized") } pub fn not_found() -> Response<'a> { Response::response(404, "Not Found") } pub fn method_not_allowed() -> Response<'a> { Response::response(405, "Method Not Allowed") } pub fn code(mut self, code: u16) -> Response<'a> { self.code = code; self } pub fn description(mut self, description: &'a str) -> Response<'a> { self.description = description; self } pub fn message(self, message: &'a str) -> Response<'a> { let bytes = message.as_bytes(); self.description(message). content_type("text/plain".to_string()). entity(MessageBody::Slice(bytes)) } pub fn header(mut self, name: &'a str, value: String) -> Response<'a> { self.headers.replace(name, value); self } pub fn get_header(&self, name: &str) -> Option<&str> { self.headers.get(name) } pub fn remove_header(mut self, name: &str) -> Response<'a> { self.headers.remove(name); self } pub fn entity(mut self, entity: MessageBody<'a>) -> Response<'a> { self.entity = entity; self.build() } pub fn content_type(self, media_type: String) -> Response<'a> { self.header("Content-Type", media_type) } pub fn content_length(self, length: u64) -> Response<'a> { self.header("Content-Length", format!("{}", length)) } fn calculate_length(&self) -> Option<u64> { match self.entity { MessageBody::None => { Some(0) } MessageBody::Slice(slice) => { Some(slice.len() as u64) } _ => None } } fn build(self) -> Response<'a> { if let Some(length) = self.calculate_length() { return self.content_length(length) } self } } impl<'a> From<HttpMessage<'a>> for Response<'a> { fn from(message: HttpMessage<'a>) -> Response<'a> { if let StartLine::StatusLine(line) = message.start_line { return Response::new(line.code, line.description, message.headers, message.body); } panic!("Can not convert HttpMessage that is a Request into a Response") } } impl<'a> fmt::Display for Response<'a> { fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result { write!(format, "{}{}\r\n{}", StatusLine { code: self.code, description: self.description, version: HttpVersion { major: 1, minor: 1 } }, self.headers, self.entity) } } impl<'a> WriteTo for Response<'a> { fn write_to(&mut self, write: &mut dyn Write) -> Result<usize> { let text = format!("{}{}\r\n", StatusLine { code: self.code, description: self.description, version: HttpVersion { major: 1, minor: 1 } }, self.headers); let head = write.write(text.as_bytes())?; let body = self.entity.write_to(write)?; Ok(head + body) } } pub struct ChunkStream<R> where R: BufRead + Sized { pub read: R, pub state: ChunkStreamState, } #[derive(PartialEq, Debug)] pub enum ChunkStreamState { NotStarted, Consumed(usize), Last(usize), Finished, } impl<R> ChunkStream<R> where R: BufRead + Sized { pub fn new(read: R) -> ChunkStream<R> { ChunkStream { read, state: ChunkStreamState::NotStarted } } pub fn update_state(&mut self) { match self.state { ChunkStreamState::Last(consumed) => { self.read.consume(consumed); self.state = ChunkStreamState::Finished; }, ChunkStreamState::Consumed(consumed) => { self.read.consume(consumed) }, _ => {} } } } impl<'a, R> Drop for ChunkStream<R> where R: BufRead + Sized { fn drop(&mut self) { while let Some(Ok(_)) = self.next() {} } } impl<'a, R> Streamer<'a> for ChunkStream<R> where R: BufRead + Sized { type Item = Result<Chunk<'a>>; fn next(&'a mut self) -> Option<Self::Item> { self.update_state(); if self.state == ChunkStreamState::Finished { return None; } loop { let buffer = self.read.fill_buf().unwrap(); if buffer.len() == 0 { self.state = ChunkStreamState::Finished; return None; } return match Chunk::read(buffer) { Ok((last @ Chunk::Last(..), consumed)) => { self.state = ChunkStreamState::Last(consumed); Some(Ok(last)) }, Ok((chunk, consumed)) => { self.state = ChunkStreamState::Consumed(consumed); Some(Ok(chunk)) }, Err(e) => Some(Err(e)) }; } } } impl<'a, R> Read for ChunkStream<R> where R: BufRead + Sized { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { match self.next() { None => Ok(0), Some(Ok(Chunk::Slice(_, slice))) => { // TODO: handle when buf is too small let size = min(slice.len(), buf.len()); buf[..size].copy_from_slice(slice); Ok(size) }, Some(Ok(Chunk::Last(..))) => { Ok(0) }, Some(Err(e)) => Err(e), } } } #[cfg(test)] mod tests { use super::*; #[test] fn can_parse_uri() { let uri = Uri::parse("http://authority/some/path?query=string#fragment"); assert_eq!(uri.scheme, Some("http")); assert_eq!(uri.authority, Some("authority")); assert_eq!(uri.path, "/some/path"); assert_eq!(uri.query, Some("query=string")); assert_eq!(uri.fragment, Some("fragment")); } #[test] fn supports_relative() { let uri = Uri::parse("some/path"); assert_eq!(uri.scheme, None); assert_eq!(uri.authority, None); assert_eq!(uri.path, "some/path"); assert_eq!(uri.query, None); assert_eq!(uri.fragment, None); } #[test] fn supports_urns() { let uri = Uri::parse("uuid:720f11db-1a29-4a68-a034-43f80b27659d"); assert_eq!(uri.scheme, Some("uuid")); assert_eq!(uri.authority, None); assert_eq!(uri.path, "720f11db-1a29-4a68-a034-43f80b27659d"); assert_eq!(uri.query, None); assert_eq!(uri.fragment, None); } #[test] fn is_reverse_able() { let original = "http://authority/some/path?query=string#fragment"; assert_eq!(Uri::parse(original).to_string(), original.to_string()); let another = "some/path"; assert_eq!(Uri::parse(another).to_string(), another.to_string()); } #[test] fn can_pattern_match_a_request() { let request = Request::get("/some/path").header("Content-Type", "text/plain"); match request { Request { method: "GET", uri: Uri { path: "/some/path", .. }, ref headers, .. } if headers.get("Content-Type") == Some("text/plain") => {}, _ => { panic!("Should have matched"); } } } #[test] fn can_parse_chunk_stream() { use std::io::BufRead; use crate::io::{BufferedRead, Streamer}; use crate::ast::{Chunk, ChunkExtensions, Headers}; let data = &b"4\r\nWiki\r\n5\r\npedia\r\nE\r\n in\r\n\r\nchunks.\r\n0\r\n\r\nGET /new/request HTTP/1.1\r\n"[..]; let buffered = BufferedRead::new(data); let mut stream = ChunkStream::new(buffered); if let Some(Ok(chunk)) = stream.next() { assert_eq!(chunk, Chunk::Slice(ChunkExtensions(vec![]), &b"Wiki"[..])); } if let Some(Ok(chunk)) = stream.next() { assert_eq!(chunk, Chunk::Slice(ChunkExtensions(vec![]), &b"pedia"[..])); } if let Some(Ok(chunk)) = stream.next() { assert_eq!(chunk, Chunk::Slice(ChunkExtensions(vec![]), &b" in\r\n\r\nchunks."[..])); } if let Some(Ok(chunk)) = stream.next() { assert_eq!(chunk, Chunk::Last(ChunkExtensions(vec![]), Headers::new())); } assert!(stream.next().is_none()); let remainder = stream.read.fill_buf().unwrap(); assert_eq!(remainder, &b"GET /new/request HTTP/1.1\r\n"[..]); } #[test] fn can_read_chunked_stream() { use std::io::{BufRead, Read}; use crate::io::{BufferedRead}; let data = &b"4\r\nWiki\r\n5\r\npedia\r\nE\r\n in\r\n\r\nchunks.\r\n0\r\n\r\nGET /new/request HTTP/1.1\r\n"[..]; let mut producer = BufferedRead::new(data); { let mut consumer = BufferedRead::new(ChunkStream::new(&mut producer)); let mut result = String::new(); consumer.read_to_string(&mut result).unwrap(); assert_eq!(result, "Wikipedia in\r\n\r\nchunks.".to_owned()); } { let remainder = producer.fill_buf().unwrap(); assert_eq!(remainder, &b"GET /new/request HTTP/1.1\r\n"[..]); } } #[test] fn chunked_stream_always_reads_to_end() { use std::io::{BufRead}; use crate::io::{BufferedRead}; let data = &b"4\r\nWiki\r\n5\r\npedia\r\nE\r\n in\r\n\r\nchunks.\r\n0\r\n\r\nGET /new/request HTTP/1.1\r\n"[..]; let mut producer = BufferedRead::new(data); { ChunkStream::new(&mut producer); } { let remainder = producer.fill_buf().unwrap(); assert_eq!(remainder, &b"GET /new/request HTTP/1.1\r\n"[..]); } } }
// This file is part of Substrate. // Copyright (C) 2018-2020 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::utils::{ extract_parameter_names_types_and_borrows, fold_fn_decl_for_client_side, generate_call_api_at_fn_name, generate_crate_access, generate_hidden_includes, generate_method_runtime_api_impl_name, generate_native_call_generator_fn_name, generate_runtime_mod_name_for_trait, prefix_function_with_trait, replace_wild_card_parameter_names, return_type_extract_type, AllowSelfRefInParameters, }; use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{ fold::{self, Fold}, parse::{Error, Parse, ParseStream, Result}, parse_macro_input, parse_quote, spanned::Spanned, visit::{self, Visit}, Attribute, FnArg, GenericParam, Generics, Ident, ItemTrait, Lit, Meta, NestedMeta, ReturnType, TraitBound, TraitItem, TraitItemMethod, Type, }; use std::collections::HashMap; use blake2_rfc; /// The ident used for the block generic parameter. const BLOCK_GENERIC_IDENT: &str = "Block"; /// Unique identifier used to make the hidden includes unique for this macro. const HIDDEN_INCLUDES_ID: &str = "DECL_RUNTIME_APIS"; /// The `core_trait` attribute. const CORE_TRAIT_ATTRIBUTE: &str = "core_trait"; /// The `api_version` attribute. /// /// Is used to set the current version of the trait. const API_VERSION_ATTRIBUTE: &str = "api_version"; /// The `changed_in` attribute. /// /// Is used when the function signature changed between different versions of a trait. /// This attribute should be placed on the old signature of the function. const CHANGED_IN_ATTRIBUTE: &str = "changed_in"; /// The `renamed` attribute. /// /// Is used when a trait method was renamed. const RENAMED_ATTRIBUTE: &str = "renamed"; /// The `skip_initialize_block` attribute. /// /// Is used when a trait method does not require that the block is initialized /// before being called. const SKIP_INITIALIZE_BLOCK_ATTRIBUTE: &str = "skip_initialize_block"; /// The `initialize_block` attribute. /// /// A trait method tagged with this attribute, initializes the runtime at /// certain block. const INITIALIZE_BLOCK_ATTRIBUTE: &str = "initialize_block"; /// All attributes that we support in the declaration of a runtime api trait. const SUPPORTED_ATTRIBUTE_NAMES: &[&str] = &[ CORE_TRAIT_ATTRIBUTE, API_VERSION_ATTRIBUTE, CHANGED_IN_ATTRIBUTE, RENAMED_ATTRIBUTE, SKIP_INITIALIZE_BLOCK_ATTRIBUTE, INITIALIZE_BLOCK_ATTRIBUTE, ]; /// The structure used for parsing the runtime api declarations. struct RuntimeApiDecls { decls: Vec<ItemTrait>, } impl Parse for RuntimeApiDecls { fn parse(input: ParseStream) -> Result<Self> { let mut decls = Vec::new(); while !input.is_empty() { decls.push(ItemTrait::parse(input)?); } Ok(Self { decls }) } } /// Extend the given generics with `Block: BlockT` as first generic parameter. fn extend_generics_with_block(generics: &mut Generics) { let c = generate_crate_access(HIDDEN_INCLUDES_ID); generics.lt_token = Some(Default::default()); generics.params.insert(0, parse_quote!( Block: #c::BlockT )); generics.gt_token = Some(Default::default()); } /// Remove all attributes from the vector that are supported by us in the declaration of a runtime /// api trait. The returned hashmap contains all found attribute names as keys and the rest of the /// attribute body as `TokenStream`. fn remove_supported_attributes(attrs: &mut Vec<Attribute>) -> HashMap<&'static str, Attribute> { let mut result = HashMap::new(); attrs.retain(|v| match SUPPORTED_ATTRIBUTE_NAMES.iter().find(|a| v.path.is_ident(a)) { Some(attribute) => { result.insert(*attribute, v.clone()); false }, None => true, }); result } /// Visits the ast and checks if `Block` ident is used somewhere. struct IsUsingBlock { result: bool, } impl<'ast> Visit<'ast> for IsUsingBlock { fn visit_ident(&mut self, i: &'ast Ident) { if i == BLOCK_GENERIC_IDENT { self.result = true; } } } /// Visits the ast and checks if `Block` ident is used somewhere. fn type_is_using_block(ty: &Type) -> bool { let mut visitor = IsUsingBlock { result: false }; visitor.visit_type(ty); visitor.result } /// Visits the ast and checks if `Block` ident is used somewhere. fn return_type_is_using_block(ty: &ReturnType) -> bool { let mut visitor = IsUsingBlock { result: false }; visitor.visit_return_type(ty); visitor.result } /// Replace all occurrences of `Block` with `NodeBlock` struct ReplaceBlockWithNodeBlock {} impl Fold for ReplaceBlockWithNodeBlock { fn fold_ident(&mut self, input: Ident) -> Ident { if input == BLOCK_GENERIC_IDENT { Ident::new("NodeBlock", Span::call_site()) } else { input } } } /// Replace all occurrences of `Block` with `NodeBlock` fn fn_arg_replace_block_with_node_block(fn_arg: FnArg) -> FnArg { let mut replace = ReplaceBlockWithNodeBlock {}; fold::fold_fn_arg(&mut replace, fn_arg) } /// Replace all occurrences of `Block` with `NodeBlock` fn return_type_replace_block_with_node_block(return_type: ReturnType) -> ReturnType { let mut replace = ReplaceBlockWithNodeBlock {}; fold::fold_return_type(&mut replace, return_type) } /// Generate the functions that generate the native call closure for each trait method. fn generate_native_call_generators(decl: &ItemTrait) -> Result<TokenStream> { let fns = decl.items.iter().filter_map(|i| match i { TraitItem::Method(ref m) => Some(&m.sig), _ => None, }); let mut result = Vec::new(); let trait_ = &decl.ident; let crate_ = generate_crate_access(HIDDEN_INCLUDES_ID); // Auxiliary function that is used to convert between types that use different block types. // The function expects that both are convertible by encoding the one and decoding the other. result.push(quote!( #[cfg(any(feature = "std", test))] fn convert_between_block_types <I: #crate_::Encode, R: #crate_::Decode>( input: &I, error_desc: &'static str, ) -> std::result::Result<R, String> { <R as #crate_::DecodeLimit>::decode_with_depth_limit( #crate_::MAX_EXTRINSIC_DEPTH, &mut &#crate_::Encode::encode(input)[..], ).map_err(|e| format!("{} {}", error_desc, e.what())) } )); // Generate a native call generator for each function of the given trait. for fn_ in fns { let params = extract_parameter_names_types_and_borrows(&fn_, AllowSelfRefInParameters::No)?; let trait_fn_name = &fn_.ident; let fn_name = generate_native_call_generator_fn_name(&fn_.ident); let output = return_type_replace_block_with_node_block(fn_.output.clone()); let output_ty = return_type_extract_type(&output); let output = quote!( std::result::Result<#output_ty, String> ); // Every type that is using the `Block` generic parameter, we need to encode/decode, // to make it compatible between the runtime/node. let conversions = params.iter().filter(|v| type_is_using_block(&v.1)).map(|(n, t, _)| { let name_str = format!("Could not convert parameter `{}` between node and runtime:", quote!(#n)); quote!( let #n: #t = convert_between_block_types(&#n, #name_str)?; ) }); // Same as for the input types, we need to check if we also need to convert the output, // before returning it. let output_conversion = if return_type_is_using_block(&fn_.output) { quote!(convert_between_block_types( &res, "Could not convert return value from runtime to node!" )) } else { quote!(Ok(res)) }; let input_names = params.iter().map(|v| &v.0); // If the type is using the block generic type, we will encode/decode it to make it // compatible. To ensure that we forward it by ref/value, we use the value given by the // the user. Otherwise if it is not using the block, we don't need to add anything. let input_borrows = params.iter().map(|v| if type_is_using_block(&v.1) { v.2.clone() } else { None }); // Replace all `Block` with `NodeBlock`, add `'a` lifetime to references and collect // all the function inputs. let fn_inputs = fn_.inputs.iter().map(|v| fn_arg_replace_block_with_node_block(v.clone())).map(|v| { match v { FnArg::Typed(ref arg) => { let mut arg = arg.clone(); if let Type::Reference(ref mut r) = *arg.ty { r.lifetime = Some(parse_quote!( 'a )); } FnArg::Typed(arg) }, r => r, } }); let (impl_generics, ty_generics, where_clause) = decl.generics.split_for_impl(); // We need to parse them again, to get an easy access to the actual parameters. let impl_generics: Generics = parse_quote!( #impl_generics ); let impl_generics_params = impl_generics.params.iter().map(|p| { match p { GenericParam::Type(ref ty) => { let mut ty = ty.clone(); ty.bounds.push(parse_quote!( 'a )); GenericParam::Type(ty) }, // We should not see anything different than type params here. r => r.clone(), } }); // Generate the generator function result.push(quote!( #[cfg(any(feature = "std", test))] pub fn #fn_name< 'a, ApiImpl: #trait_ #ty_generics, NodeBlock: #crate_::BlockT #(, #impl_generics_params)* >( #( #fn_inputs ),* ) -> impl FnOnce() -> #output + 'a #where_clause { move || { #( #conversions )* let res = ApiImpl::#trait_fn_name(#( #input_borrows #input_names ),*); #output_conversion } } )); } Ok(quote!( #( #result )* )) } /// Try to parse the given `Attribute` as `renamed` attribute. fn parse_renamed_attribute(renamed: &Attribute) -> Result<(String, u32)> { let meta = renamed.parse_meta()?; let err = Err(Error::new( meta.span(), &format!( "Unexpected `{renamed}` attribute. The supported format is `{renamed}(\"old_name\", version_it_was_renamed)`", renamed = RENAMED_ATTRIBUTE, ) ) ); match meta { Meta::List(list) => if list.nested.len() > 2 && list.nested.is_empty() { err } else { let mut itr = list.nested.iter(); let old_name = match itr.next() { Some(NestedMeta::Lit(Lit::Str(i))) => i.value(), _ => return err, }; let version = match itr.next() { Some(NestedMeta::Lit(Lit::Int(i))) => i.base10_parse()?, _ => return err, }; Ok((old_name, version)) }, _ => err, } } /// Generate the functions that call the api at a given block for a given trait method. fn generate_call_api_at_calls(decl: &ItemTrait) -> Result<TokenStream> { let fns = decl.items.iter().filter_map(|i| match i { TraitItem::Method(ref m) => Some((&m.attrs, &m.sig)), _ => None, }); let mut result = Vec::new(); let crate_ = generate_crate_access(HIDDEN_INCLUDES_ID); // Generate a native call generator for each function of the given trait. for (attrs, fn_) in fns { let trait_name = &decl.ident; let trait_fn_name = prefix_function_with_trait(&trait_name, &fn_.ident); let fn_name = generate_call_api_at_fn_name(&fn_.ident); let attrs = remove_supported_attributes(&mut attrs.clone()); if attrs.contains_key(RENAMED_ATTRIBUTE) && attrs.contains_key(CHANGED_IN_ATTRIBUTE) { return Err(Error::new( fn_.span(), format!( "`{}` and `{}` are not supported at once.", RENAMED_ATTRIBUTE, CHANGED_IN_ATTRIBUTE ), )) } // We do not need to generate this function for a method that signature was changed. if attrs.contains_key(CHANGED_IN_ATTRIBUTE) { continue } let skip_initialize_block = attrs.contains_key(SKIP_INITIALIZE_BLOCK_ATTRIBUTE); let update_initialized_block = if attrs.contains_key(INITIALIZE_BLOCK_ATTRIBUTE) { quote!(|| *initialized_block.borrow_mut() = Some(*at)) } else { quote!(|| ()) }; // Parse the renamed attributes. let mut renames = Vec::new(); if let Some((_, a)) = attrs.iter().find(|a| a.0 == &RENAMED_ATTRIBUTE) { let (old_name, version) = parse_renamed_attribute(a)?; renames.push((version, prefix_function_with_trait(&trait_name, &old_name))); } renames.sort_by(|l, r| r.cmp(l)); let (versions, old_names) = renames.into_iter().fold( (Vec::new(), Vec::new()), |(mut versions, mut old_names), (version, old_name)| { versions.push(version); old_names.push(old_name); (versions, old_names) }, ); // Generate the generator function result.push(quote!( #[cfg(any(feature = "std", test))] pub fn #fn_name< R: #crate_::Encode + #crate_::Decode + PartialEq, NC: FnOnce() -> std::result::Result<R, String> + std::panic::UnwindSafe, Block: #crate_::BlockT, T: #crate_::CallApiAt<Block>, C: #crate_::Core<Block, Error = T::Error>, >( call_runtime_at: &T, core_api: &C, at: &#crate_::BlockId<Block>, args: Vec<u8>, changes: &std::cell::RefCell<#crate_::OverlayedChanges>, offchain_changes: &std::cell::RefCell<#crate_::OffchainOverlayedChanges>, storage_transaction_cache: &std::cell::RefCell< #crate_::StorageTransactionCache<Block, T::StateBackend> >, initialized_block: &std::cell::RefCell<Option<#crate_::BlockId<Block>>>, native_call: Option<NC>, context: #crate_::ExecutionContext, recorder: &Option<#crate_::ProofRecorder<Block>>, ) -> std::result::Result<#crate_::NativeOrEncoded<R>, T::Error> { let version = call_runtime_at.runtime_version_at(at)?; use #crate_::InitializeBlock; let initialize_block = if #skip_initialize_block { InitializeBlock::Skip } else { InitializeBlock::Do(&initialized_block) }; let update_initialized_block = #update_initialized_block; #( // Check if we need to call the function by an old name. if version.apis.iter().any(|(s, v)| { s == &ID && *v < #versions }) { let params = #crate_::CallApiAtParams::<_, _, fn() -> _, _> { core_api, at, function: #old_names, native_call: None, arguments: args, overlayed_changes: changes, offchain_changes, storage_transaction_cache, initialize_block, context, recorder, }; let ret = call_runtime_at.call_api_at(params)?; update_initialized_block(); return Ok(ret) } )* let params = #crate_::CallApiAtParams { core_api, at, function: #trait_fn_name, native_call, arguments: args, overlayed_changes: changes, offchain_changes, storage_transaction_cache, initialize_block, context, recorder, }; let ret = call_runtime_at.call_api_at(params)?; update_initialized_block(); Ok(ret) } )); } Ok(quote!( #( #result )* )) } /// Generate the declaration of the trait for the runtime. fn generate_runtime_decls(decls: &[ItemTrait]) -> Result<TokenStream> { let mut result = Vec::new(); for decl in decls { let mut decl = decl.clone(); extend_generics_with_block(&mut decl.generics); let mod_name = generate_runtime_mod_name_for_trait(&decl.ident); let found_attributes = remove_supported_attributes(&mut decl.attrs); let api_version = get_api_version(&found_attributes).map(|v| generate_runtime_api_version(v as u32))?; let id = generate_runtime_api_id(&decl.ident.to_string()); let call_api_at_calls = generate_call_api_at_calls(&decl)?; // Remove methods that have the `changed_in` attribute as they are not required for the // runtime anymore. decl.items = decl .items .iter_mut() .filter_map(|i| match i { TraitItem::Method(ref mut method) => { if remove_supported_attributes(&mut method.attrs) .contains_key(CHANGED_IN_ATTRIBUTE) { None } else { // Make sure we replace all the wild card parameter names. replace_wild_card_parameter_names(&mut method.sig); Some(TraitItem::Method(method.clone())) } }, r => Some(r.clone()), }) .collect(); let native_call_generators = generate_native_call_generators(&decl)?; result.push(quote!( #[doc(hidden)] #[allow(dead_code)] #[allow(deprecated)] pub mod #mod_name { use super::*; #decl pub #api_version pub #id #native_call_generators #call_api_at_calls } )); } Ok(quote!( #( #result )* )) } /// Modify the given runtime api declaration to be usable on the client side. struct ToClientSideDecl<'a> { block_id: &'a TokenStream, crate_: &'a TokenStream, found_attributes: &'a mut HashMap<&'static str, Attribute>, /// Any error that we found while converting this declaration. errors: &'a mut Vec<TokenStream>, trait_: &'a Ident, } impl<'a> ToClientSideDecl<'a> { fn fold_item_trait_items(&mut self, items: Vec<TraitItem>) -> Vec<TraitItem> { let mut result = Vec::new(); items.into_iter().for_each(|i| match i { TraitItem::Method(method) => { let (fn_decl, fn_impl, fn_decl_ctx) = self.fold_trait_item_method(method); result.push(fn_decl.into()); result.push(fn_decl_ctx.into()); if let Some(fn_impl) = fn_impl { result.push(fn_impl.into()); } }, r => result.push(r), }); result } fn fold_trait_item_method( &mut self, method: TraitItemMethod, ) -> (TraitItemMethod, Option<TraitItemMethod>, TraitItemMethod) { let crate_ = self.crate_; let context = quote!( #crate_::ExecutionContext::OffchainCall(None) ); let fn_impl = self.create_method_runtime_api_impl(method.clone()); let fn_decl = self.create_method_decl(method.clone(), context); let fn_decl_ctx = self.create_method_decl_with_context(method); (fn_decl, fn_impl, fn_decl_ctx) } fn create_method_decl_with_context(&mut self, method: TraitItemMethod) -> TraitItemMethod { let crate_ = self.crate_; let context_arg: syn::FnArg = parse_quote!( context: #crate_::ExecutionContext ); let mut fn_decl_ctx = self.create_method_decl(method, quote!(context)); fn_decl_ctx.sig.ident = Ident::new(&format!("{}_with_context", &fn_decl_ctx.sig.ident), Span::call_site()); fn_decl_ctx.sig.inputs.insert(2, context_arg); fn_decl_ctx } /// Takes the given method and creates a `method_runtime_api_impl` method that will be /// implemented in the runtime for the client side. fn create_method_runtime_api_impl( &mut self, mut method: TraitItemMethod, ) -> Option<TraitItemMethod> { if remove_supported_attributes(&mut method.attrs).contains_key(CHANGED_IN_ATTRIBUTE) { return None } let fn_sig = &method.sig; let ret_type = return_type_extract_type(&fn_sig.output); // Get types and if the value is borrowed from all parameters. // If there is an error, we push it as the block to the user. let param_types = match extract_parameter_names_types_and_borrows(fn_sig, AllowSelfRefInParameters::No) { Ok(res) => res .into_iter() .map(|v| { let ty = v.1; let borrow = v.2; quote!( #borrow #ty ) }) .collect::<Vec<_>>(), Err(e) => { self.errors.push(e.to_compile_error()); Vec::new() }, }; let name = generate_method_runtime_api_impl_name(&self.trait_, &method.sig.ident); let block_id = self.block_id; let crate_ = self.crate_; Some(parse_quote! { #[doc(hidden)] fn #name( &self, at: &#block_id, context: #crate_::ExecutionContext, params: Option<( #( #param_types ),* )>, params_encoded: Vec<u8>, ) -> std::result::Result<#crate_::NativeOrEncoded<#ret_type>, Self::Error>; }) } /// Takes the method declared by the user and creates the declaration we require for the runtime /// api client side. This method will call by default the `method_runtime_api_impl` for doing /// the actual call into the runtime. fn create_method_decl( &mut self, mut method: TraitItemMethod, context: TokenStream, ) -> TraitItemMethod { let params = match extract_parameter_names_types_and_borrows( &method.sig, AllowSelfRefInParameters::No, ) { Ok(res) => res.into_iter().map(|v| v.0).collect::<Vec<_>>(), Err(e) => { self.errors.push(e.to_compile_error()); Vec::new() }, }; let params2 = params.clone(); let ret_type = return_type_extract_type(&method.sig.output); fold_fn_decl_for_client_side(&mut method.sig, &self.block_id); let name_impl = generate_method_runtime_api_impl_name(&self.trait_, &method.sig.ident); let crate_ = self.crate_; let found_attributes = remove_supported_attributes(&mut method.attrs); // If the method has a `changed_in` attribute, we need to alter the method name to // `method_before_version_VERSION`. let (native_handling, param_tuple) = match get_changed_in(&found_attributes) { Ok(Some(version)) => { // Make sure that the `changed_in` version is at least the current `api_version`. if get_api_version(&self.found_attributes).ok() < Some(version) { self.errors.push( Error::new( method.span(), "`changed_in` version can not be greater than the `api_version`", ) .to_compile_error(), ); } let ident = Ident::new( &format!("{}_before_version_{}", method.sig.ident, version), method.sig.ident.span(), ); method.sig.ident = ident; method.attrs.push(parse_quote!( #[deprecated] )); let panic = format!("Calling `{}` should not return a native value!", method.sig.ident); (quote!(panic!(#panic)), quote!(None)) }, Ok(None) => (quote!(Ok(n)), quote!( Some(( #( #params2 ),* )) )), Err(e) => { self.errors.push(e.to_compile_error()); (quote!(unimplemented!()), quote!(None)) }, }; let function_name = method.sig.ident.to_string(); // Generate the default implementation that calls the `method_runtime_api_impl` method. method.default = Some(parse_quote! { { let runtime_api_impl_params_encoded = #crate_::Encode::encode(&( #( &#params ),* )); self.#name_impl( __runtime_api_at_param__, #context, #param_tuple, runtime_api_impl_params_encoded, ).and_then(|r| match r { #crate_::NativeOrEncoded::Native(n) => { #native_handling }, #crate_::NativeOrEncoded::Encoded(r) => { <#ret_type as #crate_::Decode>::decode(&mut &r[..]) .map_err(|err| format!( "Failed to decode result of `{}`: {}", #function_name, err.what(), ).into() ) } } ) } }); method } } impl<'a> Fold for ToClientSideDecl<'a> { fn fold_item_trait(&mut self, mut input: ItemTrait) -> ItemTrait { extend_generics_with_block(&mut input.generics); *self.found_attributes = remove_supported_attributes(&mut input.attrs); // Check if this is the `Core` runtime api trait. let is_core_trait = self.found_attributes.contains_key(CORE_TRAIT_ATTRIBUTE); let block_ident = Ident::new(BLOCK_GENERIC_IDENT, Span::call_site()); if is_core_trait { // Add all the supertraits we want to have for `Core`. let crate_ = &self.crate_; input.supertraits = parse_quote!( 'static + Send + Sync + #crate_::ApiErrorExt ); } else { // Add the `Core` runtime api as super trait. let crate_ = &self.crate_; input.supertraits.push(parse_quote!( #crate_::Core<#block_ident> )); } // The client side trait is only required when compiling with the feature `std` or `test`. input.attrs.push(parse_quote!( #[cfg(any(feature = "std", test))] )); input.items = self.fold_item_trait_items(input.items); fold::fold_item_trait(self, input) } } /// Parse the given attribute as `API_VERSION_ATTRIBUTE`. fn parse_runtime_api_version(version: &Attribute) -> Result<u64> { let meta = version.parse_meta()?; let err = Err(Error::new( meta.span(), &format!( "Unexpected `{api_version}` attribute. The supported format is `{api_version}(1)`", api_version = API_VERSION_ATTRIBUTE ), )); match meta { Meta::List(list) => if list.nested.len() != 1 { err } else if let Some(NestedMeta::Lit(Lit::Int(i))) = list.nested.first() { i.base10_parse() } else { err }, _ => err, } } /// Generates the identifier as const variable for the given `trait_name` /// by hashing the `trait_name`. fn generate_runtime_api_id(trait_name: &str) -> TokenStream { let mut res = [0; 8]; res.copy_from_slice(blake2_rfc::blake2b::blake2b(8, &[], trait_name.as_bytes()).as_bytes()); quote!( const ID: [u8; 8] = [ #( #res ),* ]; ) } /// Generates the const variable that holds the runtime api version. fn generate_runtime_api_version(version: u32) -> TokenStream { quote!( const VERSION: u32 = #version; ) } /// Generates the implementation of `RuntimeApiInfo` for the given trait. fn generate_runtime_info_impl(trait_: &ItemTrait, version: u64) -> TokenStream { let trait_name = &trait_.ident; let crate_ = generate_crate_access(HIDDEN_INCLUDES_ID); let id = generate_runtime_api_id(&trait_name.to_string()); let version = generate_runtime_api_version(version as u32); let impl_generics = trait_ .generics .type_params() .map(|t| { let ident = &t.ident; let colon_token = &t.colon_token; let bounds = &t.bounds; quote! { #ident #colon_token #bounds } }) .chain(std::iter::once(quote! { __Sr_Api_Error__ })); let ty_generics = trait_ .generics .type_params() .map(|t| { let ident = &t.ident; quote! { #ident } }) .chain(std::iter::once(quote! { Error = __Sr_Api_Error__ })); quote!( #[cfg(any(feature = "std", test))] impl < #( #impl_generics, )* > #crate_::RuntimeApiInfo for #trait_name < #( #ty_generics, )* > { #id #version } ) } /// Get changed in version from the user given attribute or `Ok(None)`, if no attribute was given. fn get_changed_in(found_attributes: &HashMap<&'static str, Attribute>) -> Result<Option<u64>> { found_attributes .get(&CHANGED_IN_ATTRIBUTE) .map(|v| parse_runtime_api_version(v).map(Some)) .unwrap_or(Ok(None)) } /// Get the api version from the user given attribute or `Ok(1)`, if no attribute was given. fn get_api_version(found_attributes: &HashMap<&'static str, Attribute>) -> Result<u64> { found_attributes.get(&API_VERSION_ATTRIBUTE).map(parse_runtime_api_version).unwrap_or(Ok(1)) } /// Generate the declaration of the trait for the client side. fn generate_client_side_decls(decls: &[ItemTrait]) -> Result<TokenStream> { let mut result = Vec::new(); for decl in decls { let decl = decl.clone(); let crate_ = generate_crate_access(HIDDEN_INCLUDES_ID); let block_id = quote!( #crate_::BlockId<Block> ); let mut found_attributes = HashMap::new(); let mut errors = Vec::new(); let trait_ = decl.ident.clone(); let decl = { let mut to_client_side = ToClientSideDecl { crate_: &crate_, block_id: &block_id, found_attributes: &mut found_attributes, errors: &mut errors, trait_: &trait_, }; to_client_side.fold_item_trait(decl) }; let api_version = get_api_version(&found_attributes); let runtime_info = api_version.map(|v| generate_runtime_info_impl(&decl, v))?; result.push(quote!( #decl #runtime_info #( #errors )* )); } Ok(quote!( #( #result )* )) } /// Checks that a trait declaration is in the format we expect. struct CheckTraitDecl { errors: Vec<Error>, } impl CheckTraitDecl { /// Check the given trait. /// /// All errors will be collected in `self.errors`. fn check(&mut self, trait_: &ItemTrait) { self.check_method_declarations(trait_.items.iter().filter_map(|i| match i { TraitItem::Method(method) => Some(method), _ => None, })); visit::visit_item_trait(self, trait_); } /// Check that the given method declarations are correct. /// /// Any error is stored in `self.errors`. fn check_method_declarations<'a>( &mut self, methods: impl Iterator<Item = &'a TraitItemMethod>, ) { let mut method_to_signature_changed = HashMap::<Ident, Vec<Option<u64>>>::new(); methods.into_iter().for_each(|method| { let attributes = remove_supported_attributes(&mut method.attrs.clone()); let changed_in = match get_changed_in(&attributes) { Ok(r) => r, Err(e) => { self.errors.push(e); return }, }; method_to_signature_changed .entry(method.sig.ident.clone()) .or_default() .push(changed_in); }); method_to_signature_changed.into_iter().for_each(|(f, changed)| { // If `changed_in` is `None`, it means it is the current "default" method that calls // into the latest implementation. if changed.iter().filter(|c| c.is_none()).count() == 0 { self.errors.push(Error::new( f.span(), "There is no 'default' method with this name (without `changed_in` attribute).\n\ The 'default' method is used to call into the latest implementation.", )); } }); } } impl<'ast> Visit<'ast> for CheckTraitDecl { fn visit_fn_arg(&mut self, input: &'ast FnArg) { if let FnArg::Receiver(_) = input { self.errors.push(Error::new(input.span(), "`self` as argument not supported.")) } visit::visit_fn_arg(self, input); } fn visit_generic_param(&mut self, input: &'ast GenericParam) { match input { GenericParam::Type(ty) if ty.ident == BLOCK_GENERIC_IDENT => self.errors.push(Error::new( input.span(), "`Block: BlockT` generic parameter will be added automatically by the \ `decl_runtime_apis!` macro!", )), _ => {}, } visit::visit_generic_param(self, input); } fn visit_trait_bound(&mut self, input: &'ast TraitBound) { if let Some(last_ident) = input.path.segments.last().map(|v| &v.ident) { if last_ident == "BlockT" || last_ident == BLOCK_GENERIC_IDENT { self.errors.push(Error::new( input.span(), "`Block: BlockT` generic parameter will be added automatically by the \ `decl_runtime_apis!` macro! If you try to use a different trait than the \ substrate `Block` trait, please rename it locally.", )) } } visit::visit_trait_bound(self, input) } } /// Check that the trait declarations are in the format we expect. fn check_trait_decls(decls: &[ItemTrait]) -> Result<()> { let mut checker = CheckTraitDecl { errors: Vec::new() }; decls.iter().for_each(|decl| checker.check(decl)); if let Some(err) = checker.errors.pop() { Err(checker.errors.into_iter().fold(err, |mut err, other| { err.combine(other); err })) } else { Ok(()) } } /// The implementation of the `decl_runtime_apis!` macro. pub fn decl_runtime_apis_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStream { // Parse all trait declarations let RuntimeApiDecls { decls: api_decls } = parse_macro_input!(input as RuntimeApiDecls); decl_runtime_apis_impl_inner(&api_decls).unwrap_or_else(|e| e.to_compile_error()).into() } fn decl_runtime_apis_impl_inner(api_decls: &[ItemTrait]) -> Result<TokenStream> { check_trait_decls(&api_decls)?; let hidden_includes = generate_hidden_includes(HIDDEN_INCLUDES_ID); let runtime_decls = generate_runtime_decls(api_decls)?; let client_side_decls = generate_client_side_decls(api_decls)?; Ok(quote!( #hidden_includes #runtime_decls #client_side_decls )) }
//! Types which are needed to use the input system independent from the root `riddle` crate. mod platform_system; pub use platform_system::*;
use ::Asn1DerError; /// A wrapper around a DER value #[derive(Debug, Clone, Eq, PartialEq)] pub struct DerValue { /// The value pub data: Vec<u8> } impl DerValue { /// DER-deserializes the data from `source` pub fn deserialize<'a>(mut source: impl Iterator<Item = &'a u8>, len: impl Into<usize>) -> Result<Self, Asn1DerError> { // Create buffer and fill it with `len` bytes let mut data_buf = Vec::new(); for _ in 0..len.into() { data_buf.push(*source.next().ok_or(Asn1DerError::LengthMismatch)?); } Ok(data_buf.into()) } /// The length of the DER-serialized representation of `self` pub fn serialized_len(&self) -> usize { self.data.len() } /// DER-serializes `self` into `buf` and returns the amount of bytes written pub fn serialize<'a>(&self, mut buf: impl Iterator<Item = &'a mut u8>) -> Result<usize, Asn1DerError> { for b in self.data.iter() { *buf.next().ok_or(Asn1DerError::LengthMismatch)? = *b } Ok(self.data.len()) } /// Efficiently computes the length of the DER-serialized representation of `payload_len` bytes pub fn compute_serialized_len(payload_len: usize) -> usize { payload_len } } impl From<Vec<u8>> for DerValue { fn from(data: Vec<u8>) -> Self { DerValue { data } } } impl From<DerValue> for Vec<u8> { fn from(value: DerValue) -> Self { value.data } }
// no restriction on the order of function defs fn main() { call_fn_later(100); } fn is_divisible_by(lhs: u32, rhs: u32) -> bool { if rhs == 0 { return false; } // expression so return is not needed lhs % rhs == 0 } fn void_fn(n: u32) -> () { if is_divisible_by(n, 15) { println!("div by 15"); } else if is_divisible_by(n, 3) { println!("div by 3"); } else if is_divisible_by(n, 5) { println!("div by 5"); } else { println!("{}", n); } } // void can also be used without `-> ()` fn call_fn_later(n: u32) { for n in 1..n + 1 { void_fn(n); } }
#![feature(proc_macro_hygiene, decl_macro)] extern crate postgres; extern crate postgres_types; #[macro_use] extern crate rocket; #[macro_use] extern crate rocket_contrib; extern crate serde; #[macro_use] extern crate serde_derive; extern crate serde_json; use std::fs; use postgres::{Client, Error, NoTls}; mod models; mod requests; fn main() -> Result<(), Error> { dotenv::dotenv().ok(); match validate_db_connection() { Ok(mut client) => { client.batch_execute(&fs::read_to_string("src/sql/init_tables.sql").unwrap())? } Err(error) => return Err(error), } rocket::ignite() .mount("/test", routes![requests::util::test_connection]) .mount( "/books", routes![ requests::book::all, requests::book::by_id, requests::book::by_published, requests::book::by_author_id, requests::book::add, requests::book::update, requests::book::delete, requests::book::delete_by_author ], ) .mount( "/authors", routes![ requests::author::all, requests::author::by_id, requests::author::add, requests::author::update, requests::author::delete ], ) .launch(); Ok(()) } fn validate_db_connection() -> Result<Client, Error> { Client::connect( &format!( "{}://{}:{}@{}/{}", dotenv::var("DATABASE_TYPE").unwrap(), dotenv::var("DATABASE_USERNAME").unwrap(), dotenv::var("DATABASE_PASSWORD").unwrap(), dotenv::var("DATABASE_HOST").unwrap(), dotenv::var("DATABASE_NAME").unwrap() ), NoTls, ) }
pub mod sqlitedb; pub mod yamldb; use crate::Target; use std::error::Error; pub trait Db<T: Error> { fn init(&self) -> Result<usize, T>; fn targets(&self) -> Result<Vec<Target>, T>; }
use crate::MyApp; use seed::{*, prelude::*}; use crate::traits::component_trait::{ActionComponent, Component}; use crate::messages::{Msg, StateChangeMessage}; use super::todo_item_component::TodoItemComponent; use super::add_item_component::AddItemComponent; //The container for the todo items pub struct ToDoContainerComponent; impl Component<MyApp, Msg> for ToDoContainerComponent { fn view(model: &MyApp) -> Node<Msg> { if !model.show_add { div![ id!("todo_wrapper"), div![ model.todos.iter().map(|todo| { TodoItemComponent::view_with_props(&todo) }) ], AddItemComponent::view(model), div![ simple_ev(Ev::Click, Msg::StateChange(StateChangeMessage::ShowAddTodo)), id!("btn_wrapper"), i![ class!("fas fa-plus-circle") ] ] ] } else { div![ id!("todo_wrapper"), div![ model.todos.iter().map(|todo| { TodoItemComponent::view_with_props(&todo) }) ], AddItemComponent::view(model), ] } } }
use std::{ io, pin::Pin, task::{Context, Poll}, }; use futures_util::ready; use crate::{ io::Stream as InnerStream, }; struct Guard<'a> { buf: &'a mut Vec<u8>, len: usize, } impl Drop for Guard<'_> { fn drop(&mut self) { unsafe { self.buf.set_len(self.len); } } } pub(crate) fn read_to_end( mut rd: Pin<&mut InnerStream>, cx: &mut Context<'_>, buf: &mut Vec<u8>, ) -> Poll<io::Result<usize>> { let start_len = buf.len(); let mut g = Guard { len: buf.len(), buf, }; let ret; loop { if g.len == g.buf.len() { unsafe { g.buf.reserve(32); let capacity = g.buf.capacity(); g.buf.set_len(capacity); } } match ready!(rd.as_mut().poll_read(cx, &mut g.buf[g.len..])) { Ok(0) => { ret = Poll::Ready(Ok(g.len - start_len)); break; } Ok(n) => g.len += n, Err(e) => { ret = Poll::Ready(Err(e)); break; } } } ret }
//! Borrows a slice of bytes of the input document. //! //! Choose this implementation if: //! //! 1. You already have the data loaded in-memory and it is properly aligned. //! //! ## Performance characteristics //! //! This type of input is the fastest to process for the engine, //! since there is no additional overhead from loading anything to memory. use log::debug; use super::*; use crate::{query::JsonString, result::InputRecorder}; /// Input wrapping a borrowed [`[u8]`] buffer. pub struct BorrowedBytes<'a> { bytes: &'a [u8], last_block: LastBlock, } /// Iterator over blocks of [`BorrowedBytes`] of size exactly `N`. pub struct BorrowedBytesBlockIterator<'a, 'r, const N: usize, R> { input: &'a [u8], last_block: &'a LastBlock, idx: usize, recorder: &'r R, } impl<'a> BorrowedBytes<'a> { /// Create a new instance of [`BorrowedBytes`] wrapping the given buffer. /// /// # Safety /// The buffer must satisfy all invariants of [`BorrowedBytes`], /// since it is not copied or modified. It must: /// - have length divisible by [`MAX_BLOCK_SIZE`] (the function checks this); /// - be aligned to [`MAX_BLOCK_SIZE`]. /// /// The latter condition cannot be reliably checked. /// Violating it may result in memory errors where the engine relies /// on proper alignment. /// /// # Panics /// /// If `bytes.len()` is not divisible by [`MAX_BLOCK_SIZE`]. #[must_use] #[inline(always)] pub unsafe fn new(bytes: &'a [u8]) -> Self { assert_eq!(bytes.len() % MAX_BLOCK_SIZE, 0); let last_block = in_slice::pad_last_block(bytes); Self { bytes, last_block } } /// Get a reference to the bytes as a slice. #[must_use] #[inline(always)] pub fn as_slice(&self) -> &[u8] { self.bytes } /// Copy the bytes to an [`OwnedBytes`] instance. #[must_use] #[inline(always)] pub fn to_owned(&self) -> OwnedBytes { OwnedBytes::from(self) } } impl<'a> AsRef<[u8]> for BorrowedBytes<'a> { #[inline(always)] fn as_ref(&self) -> &[u8] { self.bytes } } impl<'a, 'r, const N: usize, R> BorrowedBytesBlockIterator<'a, 'r, N, R> where R: InputRecorder<&'a [u8]>, { #[must_use] #[inline(always)] pub(super) fn new(bytes: &'a [u8], last_block: &'a LastBlock, recorder: &'r R) -> Self { Self { input: bytes, idx: 0, last_block, recorder, } } } impl<'a> Input for BorrowedBytes<'a> { type BlockIterator<'b, 'r, const N: usize, R> = BorrowedBytesBlockIterator<'b, 'r, N, R> where Self: 'b, R: InputRecorder<&'b [u8]> + 'r; type Block<'b, const N: usize> = &'b [u8] where Self: 'b; #[inline(always)] fn iter_blocks<'b, 'r, R, const N: usize>(&'b self, recorder: &'r R) -> Self::BlockIterator<'b, 'r, N, R> where R: InputRecorder<&'b [u8]>, { Self::BlockIterator { input: self.bytes, idx: 0, last_block: &self.last_block, recorder, } } #[inline] fn seek_backward(&self, from: usize, needle: u8) -> Option<usize> { in_slice::seek_backward(self.bytes, from, needle) } #[inline] fn seek_forward<const N: usize>(&self, from: usize, needles: [u8; N]) -> Result<Option<(usize, u8)>, InputError> { Ok(in_slice::seek_forward(self.as_slice(), from, needles)) } #[inline] fn seek_non_whitespace_forward(&self, from: usize) -> Result<Option<(usize, u8)>, InputError> { Ok(in_slice::seek_non_whitespace_forward(self.bytes, from)) } #[inline] fn seek_non_whitespace_backward(&self, from: usize) -> Option<(usize, u8)> { in_slice::seek_non_whitespace_backward(self.bytes, from) } #[inline] fn is_member_match(&self, from: usize, to: usize, member: &JsonString) -> bool { in_slice::is_member_match(self.bytes, from, to, member) } } impl<'a, 'r, const N: usize, R> FallibleIterator for BorrowedBytesBlockIterator<'a, 'r, N, R> where R: InputRecorder<&'a [u8]>, { type Item = &'a [u8]; type Error = InputError; #[inline] fn next(&mut self) -> Result<Option<Self::Item>, Self::Error> { debug!("next!"); if self.idx >= self.input.len() { Ok(None) } else if self.idx >= self.last_block.absolute_start { let i = self.idx - self.last_block.absolute_start; self.idx += N; let block = &self.last_block.bytes[i..i + N]; self.recorder.record_block_start(block); Ok(Some(block)) } else { let block = &self.input[self.idx..self.idx + N]; self.idx += N; self.recorder.record_block_start(block); Ok(Some(block)) } } } impl<'a, 'r, const N: usize, R> InputBlockIterator<'a, N> for BorrowedBytesBlockIterator<'a, 'r, N, R> where R: InputRecorder<&'a [u8]> + 'r, { type Block = &'a [u8]; #[inline(always)] fn offset(&mut self, count: isize) { assert!(count >= 0); debug!("offsetting input iter by {count}"); self.idx += count as usize * N; } #[inline(always)] fn get_offset(&self) -> usize { debug!("getting input iter {}", self.idx); self.idx } }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum Player { First, Second, } impl Player { fn next_player(&mut self) { *self = match self { Player::First => Player::Second, Player::Second => Player::First, } } } pub type Cell = Option<Player>; pub type Board = [Cell; 9]; #[derive(Debug)] pub struct GameState { board: Board, pub turn: Player, pub winner: Option<Player>, } impl GameState { pub fn new() -> GameState { GameState { board: [None; 9], turn: Player::First, winner: None, } } pub fn make_move(&mut self, idx: u32) -> bool { let idx = idx as usize; assert!(idx < 9); if self.board[idx].is_some() || self.winner.is_some() { return false; } self.board[idx] = Some(self.turn); self.turn.next_player(); let lines = [ [0, 1, 2], [3, 4, 5], [6, 7, 8], [0, 3, 6], [1, 4, 7], [2, 5, 8], [0, 4, 8], [2, 4, 6], ]; for line in lines.iter() { if let Some(player) = self.board[line[0]] { if line.iter().all(|&x| self.board[x] == Some(player)) { self.winner = Some(player); break; } } } true } pub fn get_board(&self) -> &Board { &self.board } }
/* * Slack Web API * * One way to interact with the Slack platform is its HTTP RPC-based Web API, a collection of methods requiring OAuth 2.0-based user, bot, or workspace tokens blessed with related OAuth scopes. * * The version of the OpenAPI document: 1.7.0 * * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ObjsEnterpriseUser { #[serde(rename = "enterprise_id")] pub enterprise_id: String, #[serde(rename = "enterprise_name")] pub enterprise_name: String, #[serde(rename = "id")] pub id: String, #[serde(rename = "is_admin")] pub is_admin: bool, #[serde(rename = "is_owner")] pub is_owner: bool, #[serde(rename = "teams")] pub teams: Vec<String>, } impl ObjsEnterpriseUser { pub fn new(enterprise_id: String, enterprise_name: String, id: String, is_admin: bool, is_owner: bool, teams: Vec<String>) -> ObjsEnterpriseUser { ObjsEnterpriseUser { enterprise_id, enterprise_name, id, is_admin, is_owner, teams, } } }
#[allow(non_snake_case)] #[allow(dead_code)] pub fn Question5() { let student_data: (&str , i8 , &str , &str ) = ("Farhan Aziz",24,"IOT051142","6:45 to 9:45"); let (name , age , roll_number , class) = student_data; println!("Student Name = {}",name ); println!("Student Age = {}", age); println!("Student Roll Number = {}", roll_number); println!("Student Class = {}", class); }
/* * Copyright Stalwart Labs Ltd. See the COPYING * file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or * https://www.apache.org/licenses/LICENSE-2.0> or the MIT license * <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your * option. This file may not be copied, modified, or distributed * except according to those terms. */ use crate::{ email::{MailCapabilities, SubmissionCapabilities}, URI, }; use ahash::AHashMap; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Session { #[serde(rename = "capabilities")] capabilities: AHashMap<String, Capabilities>, #[serde(rename = "accounts")] accounts: AHashMap<String, Account>, #[serde(rename = "primaryAccounts")] primary_accounts: AHashMap<String, String>, #[serde(rename = "username")] username: String, #[serde(rename = "apiUrl")] api_url: String, #[serde(rename = "downloadUrl")] download_url: String, #[serde(rename = "uploadUrl")] upload_url: String, #[serde(rename = "eventSourceUrl")] event_source_url: String, #[serde(rename = "state")] state: String, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Account { #[serde(rename = "name")] name: String, #[serde(rename = "isPersonal")] is_personal: bool, #[serde(rename = "isReadOnly")] is_read_only: bool, #[serde(rename = "accountCapabilities")] account_capabilities: AHashMap<String, Capabilities>, } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum Capabilities { Core(CoreCapabilities), Mail(MailCapabilities), Submission(SubmissionCapabilities), WebSocket(WebSocketCapabilities), Sieve(SieveCapabilities), Empty(EmptyCapabilities), Other(serde_json::Value), } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CoreCapabilities { #[serde(rename = "maxSizeUpload")] max_size_upload: usize, #[serde(rename = "maxConcurrentUpload")] max_concurrent_upload: usize, #[serde(rename = "maxSizeRequest")] max_size_request: usize, #[serde(rename = "maxConcurrentRequests")] max_concurrent_requests: usize, #[serde(rename = "maxCallsInRequest")] max_calls_in_request: usize, #[serde(rename = "maxObjectsInGet")] max_objects_in_get: usize, #[serde(rename = "maxObjectsInSet")] max_objects_in_set: usize, #[serde(rename = "collationAlgorithms")] collation_algorithms: Vec<String>, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WebSocketCapabilities { #[serde(rename = "url")] url: String, #[serde(rename = "supportsPush")] supports_push: bool, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct SieveCapabilities { #[serde(rename = "implementation")] implementation: Option<String>, #[serde(rename = "maxSizeScriptName")] max_script_name: Option<usize>, #[serde(rename = "maxSizeScript")] max_script_size: Option<usize>, #[serde(rename = "maxNumberScripts")] max_scripts: Option<usize>, #[serde(rename = "maxNumberRedirects")] max_redirects: Option<usize>, #[serde(rename = "sieveExtensions")] extensions: Vec<String>, #[serde(rename = "notificationMethods")] notification_methods: Option<Vec<String>>, #[serde(rename = "externalLists")] ext_lists: Option<Vec<String>>, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct EmptyCapabilities {} impl Session { pub fn capabilities(&self) -> impl Iterator<Item = &String> { self.capabilities.keys() } pub fn capability(&self, capability: impl AsRef<str>) -> Option<&Capabilities> { self.capabilities.get(capability.as_ref()) } pub fn has_capability(&self, capability: impl AsRef<str>) -> bool { self.capabilities.contains_key(capability.as_ref()) } pub fn websocket_capabilities(&self) -> Option<&WebSocketCapabilities> { self.capabilities .get(URI::WebSocket.as_ref()) .and_then(|v| match v { Capabilities::WebSocket(capabilities) => Some(capabilities), _ => None, }) } pub fn core_capabilities(&self) -> Option<&CoreCapabilities> { self.capabilities .get(URI::Core.as_ref()) .and_then(|v| match v { Capabilities::Core(capabilities) => Some(capabilities), _ => None, }) } pub fn mail_capabilities(&self) -> Option<&MailCapabilities> { self.capabilities .get(URI::Mail.as_ref()) .and_then(|v| match v { Capabilities::Mail(capabilities) => Some(capabilities), _ => None, }) } pub fn submission_capabilities(&self) -> Option<&SubmissionCapabilities> { self.capabilities .get(URI::Submission.as_ref()) .and_then(|v| match v { Capabilities::Submission(capabilities) => Some(capabilities), _ => None, }) } pub fn sieve_capabilities(&self) -> Option<&SieveCapabilities> { self.capabilities .get(URI::Sieve.as_ref()) .and_then(|v| match v { Capabilities::Sieve(capabilities) => Some(capabilities), _ => None, }) } pub fn accounts(&self) -> impl Iterator<Item = &String> { self.accounts.keys() } pub fn account(&self, account: &str) -> Option<&Account> { self.accounts.get(account) } pub fn primary_accounts(&self) -> impl Iterator<Item = (&String, &String)> { self.primary_accounts.iter() } pub fn username(&self) -> &str { &self.username } pub fn api_url(&self) -> &str { &self.api_url } pub fn download_url(&self) -> &str { &self.download_url } pub fn upload_url(&self) -> &str { &self.upload_url } pub fn event_source_url(&self) -> &str { &self.event_source_url } pub fn state(&self) -> &str { &self.state } } impl Account { pub fn name(&self) -> &str { &self.name } pub fn is_personal(&self) -> bool { self.is_personal } pub fn is_read_only(&self) -> bool { self.is_read_only } pub fn capabilities(&self) -> impl Iterator<Item = &String> { self.account_capabilities.keys() } pub fn capability(&self, capability: &str) -> Option<&Capabilities> { self.account_capabilities.get(capability) } } impl CoreCapabilities { pub fn max_size_upload(&self) -> usize { self.max_size_upload } pub fn max_concurrent_upload(&self) -> usize { self.max_concurrent_upload } pub fn max_size_request(&self) -> usize { self.max_size_request } pub fn max_concurrent_requests(&self) -> usize { self.max_concurrent_requests } pub fn max_calls_in_request(&self) -> usize { self.max_calls_in_request } pub fn max_objects_in_get(&self) -> usize { self.max_objects_in_get } pub fn max_objects_in_set(&self) -> usize { self.max_objects_in_set } pub fn collation_algorithms(&self) -> &[String] { &self.collation_algorithms } } impl WebSocketCapabilities { pub fn url(&self) -> &str { &self.url } pub fn supports_push(&self) -> bool { self.supports_push } } impl SieveCapabilities { pub fn max_script_name_size(&self) -> usize { self.max_script_name.unwrap_or(512) } pub fn max_script_size(&self) -> Option<usize> { self.max_script_size } pub fn max_number_scripts(&self) -> Option<usize> { self.max_scripts } pub fn max_number_redirects(&self) -> Option<usize> { self.max_redirects } pub fn sieve_extensions(&self) -> &[String] { &self.extensions } pub fn notification_methods(&self) -> Option<&[String]> { self.notification_methods.as_deref() } pub fn external_lists(&self) -> Option<&[String]> { self.ext_lists.as_deref() } } pub trait URLParser: Sized { fn parse(value: &str) -> Option<Self>; } pub enum URLPart<T: URLParser> { Value(String), Parameter(T), } impl<T: URLParser> URLPart<T> { pub fn parse(url: &str) -> crate::Result<Vec<URLPart<T>>> { let mut parts = Vec::new(); let mut buf = String::with_capacity(url.len()); let mut in_parameter = false; for ch in url.chars() { match ch { '{' => { if !buf.is_empty() { parts.push(URLPart::Value(buf.clone())); buf.clear(); } in_parameter = true; } '}' => { if in_parameter && !buf.is_empty() { parts.push(URLPart::Parameter(T::parse(&buf).ok_or_else(|| { crate::Error::Internal(format!( "Invalid parameter '{}' in URL: {}", buf, url )) })?)); buf.clear(); } else { return Err(crate::Error::Internal(format!("Invalid URL: {}", url))); } in_parameter = false; } _ => { buf.push(ch); } } } if !buf.is_empty() { if !in_parameter { parts.push(URLPart::Value(buf.clone())); } else { return Err(crate::Error::Internal(format!("Invalid URL: {}", url))); } } Ok(parts) } }
fn main() { let start = std::time::Instant::now(); let data = std::fs::read_to_string("../2021-06/input.txt").expect("Unable to create String from input.txt"); println!("Read file in {:?}", start.elapsed()); let start = std::time::Instant::now(); let timers = parse(&data); //println!("timer = {:#?}", timers); let duration = start.elapsed(); println!("Parse file in {:?}", duration); let start = std::time::Instant::now(); let answer = count_fish(timers, 80); let duration = start.elapsed(); println!("2021 day 06 Part 1 = {} in {:?}", answer, duration); let start = std::time::Instant::now(); let answer = count_fish(timers, 256); let duration = start.elapsed(); println!("2021 day 06 Part 2 = {} in {:?}", answer, duration); let start = std::time::Instant::now(); let answer = count_fish2(timers, 80); let duration = start.elapsed(); println!("2021 day 06 Part 1+ = {} in {:?}", answer, duration); let start = std::time::Instant::now(); let answer = count_fish2(timers, 256); let duration = start.elapsed(); println!("2021 day 06 Part 2+ = {} in {:?}", answer, duration); } fn parse(data: &str) -> [usize;9] { let mut timers: [usize;9] = [0;9]; for byte in data.bytes() { match byte { b'1' => timers[1] += 1, b'2' => timers[2] += 1, b'3' => timers[3] += 1, b'4' => timers[4] += 1, b'5' => timers[5] += 1, b'6' => timers[6] += 1, b',' => (), b'\n' => (), _ => println!("Unexpected character '{}'", byte) } } timers } fn count_fish(mut timers: [usize;9], days: usize) -> usize { for _ in 0..days { let zeros = timers[0]; for i in 0..8 { timers[i] = timers[i+1]; } timers[6] += zeros; timers[8] = zeros; } timers.iter().sum() } fn count_fish2(mut timers: [usize;9], days: usize) -> usize { // Everyday, the data in i+1 goes to i and 0 goes to n // If we think of the list as a circle, nothing changes // (except the spawning fish are added again), // the index of the spawning fish is the day number modulo 9 // This reduces the number of operations per day from 30 to 9 // // However: // reading the file takes ~594µs +/- 179µs // parsing the string to integer data takes about 4.6µs // counting the old way takes about 0.32µs and 0.28µs this way (for 80 days) // So we have improved efficiency by 12% in 0.05% of the problem // A great example of an unnecessary optimization. // This solution may be easier to read/understand for i in 0..days { let start = i % 9; let restart = (start + 7) % 9; timers[restart] += timers[start]; } timers.iter().sum() } #[cfg(test)] mod tests { use super::*; const TEST_INPUT: &str = "3,4,3,1,2 "; #[test] pub fn test2106a() { assert_eq!(count_fish2(parse(TEST_INPUT), 18), 26); } #[test] pub fn test2106b() { assert_eq!(count_fish2(parse(TEST_INPUT), 80), 5934); } #[test] pub fn test2106c() { assert_eq!(count_fish2(parse(TEST_INPUT), 256), 26984457539); } }
use jni::objects::{JClass, JObject}; use jni::JNIEnv; use jni::sys::{jboolean, jlong, jbyte}; use parity_bn::Fq; #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_newFq( env: JNIEnv, _class: JClass, bytes: JObject, ret: JObject, ) -> jboolean { let vec: Vec<u8> = env .convert_byte_array(bytes.into_inner()) .expect("Unable to read byte array"); let mut byte_array = [0; 32]; byte_array[(32 - vec.len())..].copy_from_slice(&vec); if let Ok(fq) = Fq::from_slice(&byte_array) { fq_return(env, fq, ret); true as jboolean } else { false as jboolean } } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_nadd( env: JNIEnv, _class: JClass, la: jlong, lb: jlong, lc: jlong, ld: jlong, ra: jlong, rb: jlong, rc: jlong, rd: jlong, ret: JObject, ) { let left = fq_from_jlongs([la, lb, lc, ld]); let right = fq_from_jlongs([ra, rb, rc, rd]); let fq = left + right; fq_return(env, fq, ret) } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_nmul( env: JNIEnv, _class: JClass, la: jlong, lb: jlong, lc: jlong, ld: jlong, ra: jlong, rb: jlong, rc: jlong, rd: jlong, ret: JObject, ) { let left = fq_from_jlongs([la, lb, lc, ld]); let right = fq_from_jlongs([ra, rb, rc, rd]); let fq = left * right; fq_return(env, fq, ret) } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_nsub( env: JNIEnv, _class: JClass, la: jlong, lb: jlong, lc: jlong, ld: jlong, ra: jlong, rb: jlong, rc: jlong, rd: jlong, ret: JObject, ) { let left = fq_from_jlongs([la, lb, lc, ld]); let right = fq_from_jlongs([ra, rb, rc, rd]); let fq = left - right; fq_return(env, fq, ret) } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_nsquared( env: JNIEnv, _class: JClass, a: jlong, b: jlong, c: jlong, d: jlong, ret: JObject, ) { let fq = fq_from_jlongs([a, b, c, d]); fq_return(env, fq * fq, ret) } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_ndbl( env: JNIEnv, _class: JClass, a: jlong, b: jlong, c: jlong, d: jlong, ret: JObject, ) { let fq = fq_from_jlongs([a, b, c, d]); fq_return(env, fq + fq, ret) } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_ninverse( env: JNIEnv, _class: JClass, a: jlong, b: jlong, c: jlong, d: jlong, ret: JObject, ) { if let Some(fq) = fq_from_jlongs([a, b, c, d]).inverse() { fq_return(env, fq, ret) } } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_nnegate( env: JNIEnv, _class: JClass, a: jlong, b: jlong, c: jlong, d: jlong, ret: JObject, ) { let fq = -fq_from_jlongs([a, b, c, d]); fq_return(env, fq, ret) } #[no_mangle] #[allow(non_snake_case)] pub extern "system" fn Java_org_ethereum_crypto_altbn128_Fp_nbytes( env: JNIEnv, _class: JClass, a: jlong, b: jlong, c: jlong, d: jlong, ret: JObject, ) { let fq = fq_from_jlongs([a, b, c, d]); let mut bytes: [u8; 32] = [0; 32]; fq.to_big_endian(&mut bytes).unwrap(); let fq_bytes: [jbyte; 32] = unsafe { std::mem::transmute(bytes) }; env.set_byte_array_region(ret.into_inner(), 0, &fq_bytes) .expect("Unable to set return array for fq"); } #[inline] pub(crate) fn fq_from_jlongs(buf: [jlong; 4]) -> Fq { unsafe { std::ptr::read(buf.as_ptr() as *const _) } } #[inline] fn fq_return(env: JNIEnv, fq: Fq, ret: JObject) { let fq_bytes: [jlong; 4] = unsafe { std::mem::transmute(fq) }; env.set_long_array_region(ret.into_inner(), 0, &fq_bytes) .expect("Unable to set return array for fq"); }
use super::{Animation, TimingAnimation}; pub struct LinearTiming<T: TimingAnimation> { animation: T, start_value: f64, end_value: f64, current_value: f64 } impl<T: TimingAnimation> LinearTiming<T> { pub fn new(animation: T, start_value: f64, end_value: f64) -> Self { return LinearTiming { animation, start_value, end_value, current_value: 0. }; } } impl<T: TimingAnimation> Animation for LinearTiming<T> { fn frame(&mut self, _current_frame: i32, _total_frames: i32, current_time: f64, total_time: f64) { self.current_value = current_time / total_time * (self.end_value - self.start_value) + self.start_value; self.animation.progress(self.current_value, current_time, total_time); } fn end(&mut self, _total_frames: i32, total_time: f64) { self.current_value = self.end_value; self.animation.progress(self.end_value, total_time, total_time); } } pub mod test { use super::super::super::super::ctx::Ctx; use super::super::{AnimationObject, TimingAnimation}; use super::{LinearTiming}; pub struct TestAnimation(); impl TimingAnimation for TestAnimation { fn progress(&mut self, _current_value: f64, _current_time: f64, _total_time: f64) { println!("Animation progress: {}", _current_value); } } pub fn test() -> i32 { let mut ani_obj = Ctx::new(AnimationObject::new(Ctx::new(LinearTiming::new(TestAnimation(), 0., 100.)))); AnimationObject::exec(&mut ani_obj, 0, 3000.); return 0; } }
#[doc = "Reader of register SCR"] pub type R = crate::R<u32, super::SCR>; #[doc = "Writer for register SCR"] pub type W = crate::W<u32, super::SCR>; #[doc = "Register SCR `reset()`'s with value 0"] impl crate::ResetValue for super::SCR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SEVONPEND`"] pub type SEVONPEND_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SEVONPEND`"] pub struct SEVONPEND_W<'a> { w: &'a mut W, } impl<'a> SEVONPEND_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Reader of field `SLEEPDEEP`"] pub type SLEEPDEEP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SLEEPDEEP`"] pub struct SLEEPDEEP_W<'a> { w: &'a mut W, } impl<'a> SLEEPDEEP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Reader of field `SLEEPONEXIT`"] pub type SLEEPONEXIT_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SLEEPONEXIT`"] pub struct SLEEPONEXIT_W<'a> { w: &'a mut W, } impl<'a> SLEEPONEXIT_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } impl R { #[doc = "Bit 4 - Send Event on Pending bit:\\n 0 = Only enabled interrupts or events can wakeup the processor, disabled interrupts are excluded.\\n 1 = Enabled events and all interrupts, including disabled interrupts, can wakeup the processor.\\n When an event or interrupt becomes pending, the event signal wakes up the processor from WFE. If the\\n processor is not waiting for an event, the event is registered and affects the next WFE.\\n The processor also wakes up on execution of an SEV instruction or an external event."] #[inline(always)] pub fn sevonpend(&self) -> SEVONPEND_R { SEVONPEND_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 2 - Controls whether the processor uses sleep or deep sleep as its low power mode:\\n 0 = Sleep.\\n 1 = Deep sleep."] #[inline(always)] pub fn sleepdeep(&self) -> SLEEPDEEP_R { SLEEPDEEP_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 1 - Indicates sleep-on-exit when returning from Handler mode to Thread mode:\\n 0 = Do not sleep when returning to Thread mode.\\n 1 = Enter sleep, or deep sleep, on return from an ISR to Thread mode.\\n Setting this bit to 1 enables an interrupt driven application to avoid returning to an empty main application."] #[inline(always)] pub fn sleeponexit(&self) -> SLEEPONEXIT_R { SLEEPONEXIT_R::new(((self.bits >> 1) & 0x01) != 0) } } impl W { #[doc = "Bit 4 - Send Event on Pending bit:\\n 0 = Only enabled interrupts or events can wakeup the processor, disabled interrupts are excluded.\\n 1 = Enabled events and all interrupts, including disabled interrupts, can wakeup the processor.\\n When an event or interrupt becomes pending, the event signal wakes up the processor from WFE. If the\\n processor is not waiting for an event, the event is registered and affects the next WFE.\\n The processor also wakes up on execution of an SEV instruction or an external event."] #[inline(always)] pub fn sevonpend(&mut self) -> SEVONPEND_W { SEVONPEND_W { w: self } } #[doc = "Bit 2 - Controls whether the processor uses sleep or deep sleep as its low power mode:\\n 0 = Sleep.\\n 1 = Deep sleep."] #[inline(always)] pub fn sleepdeep(&mut self) -> SLEEPDEEP_W { SLEEPDEEP_W { w: self } } #[doc = "Bit 1 - Indicates sleep-on-exit when returning from Handler mode to Thread mode:\\n 0 = Do not sleep when returning to Thread mode.\\n 1 = Enter sleep, or deep sleep, on return from an ISR to Thread mode.\\n Setting this bit to 1 enables an interrupt driven application to avoid returning to an empty main application."] #[inline(always)] pub fn sleeponexit(&mut self) -> SLEEPONEXIT_W { SLEEPONEXIT_W { w: self } } }
use certmaster::cert_issuer::CertIssuer; use certmaster::certificate::{self, Certificate}; use certmaster::consts::labels::{CACHED, CERT_ISSUER, MANAGED_BY_KEY, MANAGED_BY_VALUE}; use certmaster::store::Store; use futures::prelude::*; use kube::{ api::{Api, ListParams}, Client, }; use kube_runtime::watcher; use tokio; use tokio::task; use tokio::task::JoinError; #[tokio::main] async fn main() -> anyhow::Result<()> { env_logger::init(); let client = Client::try_default().await?; let cert_issuer: Api<CertIssuer> = Api::all(client.clone()); let store = Store::new(client.clone()); let certs: Api<Certificate> = Api::all(client.clone()); let _ = tokio::join!( cert_issuer_watcher(cert_issuer, store.clone()), cert_watcher(certs, store) ); Ok(()) } async fn cert_issuer_watcher(api: Api<CertIssuer>, store: Store) -> Result<(), JoinError> { task::spawn(async move { let watcher = watcher(api, ListParams::default()); let _ = watcher .try_for_each(|event| handle_cert_issuer_events(event, store.clone())) .await; }) .await } async fn cert_watcher(api: Api<Certificate>, store: Store) -> Result<(), JoinError> { task::spawn(async move { let lp = ListParams::default().timeout(60).labels(&format!( "{managed_by_key}={managed_by_value},{cert_issuer},{cached}!=true", managed_by_key = MANAGED_BY_KEY, managed_by_value = MANAGED_BY_VALUE, cert_issuer = CERT_ISSUER, cached = CACHED )); let watcher = watcher(api, lp); let _ = watcher .try_for_each(|event| handle_cert_events(event, store.clone())) .await; }) .await } async fn handle_cert_issuer_events( event: watcher::Event<CertIssuer>, store: Store, ) -> Result<(), watcher::Error> { match event { watcher::Event::Applied(cert_issuer) => { let res = certificate::cache_and_create_for_namespaces(&store, cert_issuer).await; if let Ok(_certificates) = res { //TODO: // save certificates to store } () } _ => (), } Ok(()) } async fn handle_cert_events( event: watcher::Event<Certificate>, _store: Store, ) -> Result<(), watcher::Error> { println!("CERT: {:?}", event); Ok(()) }
use csv; use finox::roses; use hound; use std::error::Error; use std::f32::consts::PI; use std::i16; #[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)] #[serde(rename_all = "camelCase")] pub struct Row { pub symbol: String, pub t: String, pub x: f64, pub v: u64, } fn main() -> Result<(), hound::Error> { //let strs = roses::read_tickers("../ref_data/tickers_stocks.txt"); //for s in strs.iter() { do_one("SPY").unwrap(); //} Ok(()) } fn do_one(s: &str) -> Result<(), Box<dyn Error>> { let spec = hound::WavSpec { channels: 1, sample_rate: 44100, bits_per_sample: 16, sample_format: hound::SampleFormat::Int, }; let file_name = format!("../data/nasdaq/realtime-trades/{}.csv", s); let output_name = format!("../data/sound/{}.wav", s); //let mut rdr = csv::Reader::from_path().unwrap(); //let mut iter = rdr.deserialize(); let rows = roses::read_into::<Row>(file_name); let mut wtr = hound::WavWriter::create(output_name, spec).unwrap(); for r in rows { let sample = r.x * r.v as f64; let amplitude = i16::MAX as f32; wtr.write_sample((sample * amplitude) as i16).unwrap(); } Ok(()) }
use lazy_static::lazy_static; fn gen_page(title: &str, body: &str) -> String { format!( r#" <!DOCTYPE html> <html lang = "en"> <head> <meta charset = "utf8"> <title>{}</title> </head> <body> {} </body> </html> "#, title, body, ) } lazy_static! { pub static ref INDEX: String = gen_page( "Am You Unique", r#" <p id = "unique"></p> <script> (async () => { let userAgentStr = navigator.userAgent; let userAgentJson = JSON.stringify({'user_agent': userAgentStr}); console.log(userAgentStr); let response = await fetch("/add-user-agent", { method: "POST", headers: { 'Content-Type': 'application/json' }, body: userAgentJson }); let body = await response.text(); console.log(body); document.getElementById("unique").innerHTML = body; })(); </script> "# ); }
//! Acquires a [`Read`] instance and reads it in on-demand in a buffer. //! All of the bytes read are kept in memory. //! //! Choose this implementation if: //! //! 1. You have a [`Read`] source that might contain relatively large amounts //! of data. //! 2. You want to run the JSONPath query on the input and then discard it. //! //! ## Performance characteristics //! //! This is the best choice for a relatively large read-once input that is not a file //! (or when memory maps are not supported). It is faster than first reading all of //! the contents and then passing them to [`BorrowedBytes`](`super::BorrowedBytes`). It is, however, //! slow compared to other choices. If you know the approximate length of input, //! use the [`with_capacity`](`BufferedInput::with_capacity`) function to avoid //! reallocating the internal buffers. use super::{ error::InputError, in_slice, repr_align_block_size, Input, InputBlock, InputBlockIterator, MAX_BLOCK_SIZE, }; use crate::{error::InternalRsonpathError, query::JsonString, result::InputRecorder, FallibleIterator}; use std::{cell::RefCell, io::Read, ops::Deref, slice}; const BUF_SIZE: usize = 64 * 1024; static_assertions::const_assert!(BUF_SIZE >= MAX_BLOCK_SIZE); static_assertions::const_assert!(BUF_SIZE % MAX_BLOCK_SIZE == 0); /// Input supporting a buffered read over a [`Read`] implementation. pub struct BufferedInput<R>(RefCell<InternalBuffer<R>>); struct InternalBuffer<R> { source: R, bytes: Vec<BufferedChunk>, chunk_idx: usize, eof: bool, } repr_align_block_size! { struct BufferedChunk([u8; BUF_SIZE]); } /// Iterator over a [`BufferedInput`]. pub struct BufferedInputBlockIterator<'a, 'r, R, IR, const N: usize> { input: &'a BufferedInput<R>, idx: usize, recorder: &'r IR, } /// Block returned from a [`BufferedInputBlockIterator`]. pub struct BufferedInputBlock<const N: usize>([u8; N]); impl<R: Read> InternalBuffer<R> { fn as_slice(&self) -> &[u8] { let len = self.len(); let ptr = self.bytes.as_slice().as_ptr().cast(); // SAFETY: BufferedChunk has the same layout as an array of bytes due to repr(C). // `BUF_SIZE >= MAX_BLOCK_SIZE`, and `BUF_SIZE` is a multiple of `MAX_BLOCK_SIZE` // (static asserts at the top), so [BufferedChunk; N] has the same repr as [[u8; BUF_SIZE]; N], // which in turn is guaranteed to have the same repr as [u8; BUF_SIZE * N]. // https://doc.rust-lang.org/reference/type-layout.html#array-layout unsafe { slice::from_raw_parts(ptr, len) } } fn len(&self) -> usize { self.chunk_idx * BUF_SIZE } fn read_more(&mut self) -> Result<bool, InputError> { if self.eof { return Ok(false); } if self.chunk_idx == self.bytes.len() { self.bytes.push(BufferedChunk([0; BUF_SIZE])); } let buf = &mut self.bytes[self.chunk_idx].0; let mut total = 0; self.chunk_idx += 1; while total < BUF_SIZE && !self.eof { let size = self.source.read(&mut buf[total..])?; if size == 0 { self.eof = true; } total += size; } Ok(total > 0) } } impl<R: Read> BufferedInput<R> { /// Create a new [`BufferedInput`] reading from the given `source`. #[inline] pub fn new(source: R) -> Self { Self(RefCell::new(InternalBuffer { source, bytes: vec![], eof: false, chunk_idx: 0, })) } /// Create a new [`BufferedInput`] reading from the given `source`, /// preallocating at least `capacity` bytes up front. #[inline] pub fn with_capacity(source: R, capacity: usize) -> Self { let blocks_needed = capacity / MAX_BLOCK_SIZE + 1; Self(RefCell::new(InternalBuffer { source, bytes: Vec::with_capacity(blocks_needed), eof: false, chunk_idx: 0, })) } } impl<R: Read> Input for BufferedInput<R> { type BlockIterator<'a, 'r, const N: usize, IR> = BufferedInputBlockIterator<'a, 'r, R, IR, N> where Self: 'a, IR: InputRecorder<BufferedInputBlock<N>> + 'r; type Block<'a, const N: usize> = BufferedInputBlock<N> where Self: 'a; #[inline(always)] fn iter_blocks<'i, 'r, IR, const N: usize>(&'i self, recorder: &'r IR) -> Self::BlockIterator<'i, 'r, N, IR> where IR: InputRecorder<Self::Block<'i, N>>, { BufferedInputBlockIterator { input: self, idx: 0, recorder, } } #[inline(always)] fn seek_backward(&self, from: usize, needle: u8) -> Option<usize> { let buf = self.0.borrow(); let slice = buf.as_slice(); in_slice::seek_backward(slice, from, needle) } #[inline] fn seek_forward<const N: usize>(&self, from: usize, needles: [u8; N]) -> Result<Option<(usize, u8)>, InputError> { let mut buf = self.0.borrow_mut(); let mut moving_from = from; loop { let res = { let slice = buf.as_slice(); in_slice::seek_forward(slice, moving_from, needles) }; moving_from = buf.len(); if res.is_some() { return Ok(res); } else if !buf.read_more()? { return Ok(None); } } } #[inline] fn seek_non_whitespace_forward(&self, from: usize) -> Result<Option<(usize, u8)>, InputError> { let mut buf = self.0.borrow_mut(); let mut moving_from = from; loop { let res = { let slice = buf.as_slice(); in_slice::seek_non_whitespace_forward(slice, moving_from) }; moving_from = buf.len(); if res.is_some() { return Ok(res); } else if !buf.read_more()? { return Ok(None); } } } #[inline(always)] fn seek_non_whitespace_backward(&self, from: usize) -> Option<(usize, u8)> { let buf = self.0.borrow(); let slice = buf.as_slice(); in_slice::seek_non_whitespace_backward(slice, from) } #[inline(always)] fn is_member_match(&self, from: usize, to: usize, member: &JsonString) -> bool { let buf = self.0.borrow(); let slice = buf.as_slice(); in_slice::is_member_match(slice, from, to, member) } } impl<'a, 'r, R: Read, IR, const N: usize> FallibleIterator for BufferedInputBlockIterator<'a, 'r, R, IR, N> where IR: InputRecorder<BufferedInputBlock<N>>, { type Item = BufferedInputBlock<N>; type Error = InputError; #[inline] fn next(&mut self) -> Result<Option<Self::Item>, Self::Error> { let buf = self.input.0.borrow(); if self.idx + N < buf.len() { let slice = &buf.as_slice()[self.idx..self.idx + N]; let block: [u8; N] = slice .try_into() .map_err(|err| InternalRsonpathError::from_error(err, "slice of size N is not of size N"))?; self.idx += N; self.recorder.record_block_start(BufferedInputBlock(block)); Ok(Some(BufferedInputBlock(block))) } else { drop(buf); let mut buf_mut = self.input.0.borrow_mut(); if !buf_mut.read_more()? { Ok(None) } else { drop(buf_mut); self.next() } } } } impl<'a, 'r, R: Read, IR, const N: usize> InputBlockIterator<'a, N> for BufferedInputBlockIterator<'a, 'r, R, IR, N> where IR: InputRecorder<BufferedInputBlock<N>>, { type Block = BufferedInputBlock<N>; #[inline(always)] fn offset(&mut self, count: isize) { assert!(count >= 0); self.idx += count as usize * N; } #[inline(always)] fn get_offset(&self) -> usize { self.idx } } impl<const N: usize> Deref for BufferedInputBlock<N> { type Target = [u8]; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl<'a, const N: usize> InputBlock<'a, N> for BufferedInputBlock<N> { #[inline(always)] fn halves(&self) -> (&[u8], &[u8]) { assert_eq!(N % 2, 0); (&self[..N / 2], &self[N / 2..]) } }
#![warn(clippy::pedantic)] use heroku_nodejs_utils::inv::{Inventory, BUCKET, REGION}; use heroku_nodejs_utils::nodebin_s3; use std::convert::TryFrom; const FAILED_EXIT_CODE: i32 = 1; fn main() { let args: Vec<String> = std::env::args().collect(); if args.len() < 2 { eprintln!("$ list_versions <node|yarn>"); std::process::exit(FAILED_EXIT_CODE); } let name = &args[1]; let result = nodebin_s3::list_objects(BUCKET, REGION, name).unwrap_or_else(|e| { eprintln!("Failed to fetch from S3: {e}"); std::process::exit(FAILED_EXIT_CODE); }); let inv = Inventory::try_from(result).unwrap_or_else(|e| { eprintln!("Failed to parse AWS S3 XML: {e}"); std::process::exit(FAILED_EXIT_CODE); }); let toml_string = toml::to_string(&inv).unwrap_or_else(|e| { eprintln!("Failed to convert to toml: {e}"); std::process::exit(FAILED_EXIT_CODE); }); println!("{toml_string}"); }
#![feature(try_from)] extern crate telegram_bot; extern crate regex; extern crate chrono; #[macro_use] extern crate log; extern crate env_logger; extern crate typemap; mod manager; mod bot; mod message; mod error; mod send; mod handler; mod model; mod chat_room; use telegram_bot::*; fn main() { env_logger::init().unwrap(); let api = Api::from_env("TELEGRAM_BOT_TOKEN").unwrap(); info!("getMe: {:?}", api.get_me()); let res = listen(api); if let Err(e) = res { error!("An error occured: {}", e); } } fn listen(api: Api) -> Result<()> { let mut listener = api.listener(ListeningMethod::LongPoll(None)); // Fetch new updates via long poll method let mut bot = bot::Bot::new(&api); let res = listener.listen(|u| { if let Some(message) = u.message { debug!("Raw message: {:?}", &message); bot.process_message(message); } Ok(ListeningAction::Continue) }); return res; }
// 16.16 Sub Sort fn sub_sort(array: &[i32]) -> (usize, usize) { // Returns inclusive bounds of minimal subarray that, if sorted, // makes so that the whole array is sorted. // Returns (0,0) if already sorted. // O(n) time, O(3n) space. I suspect it could be improved to O(n) time, // O(1) space. if array.is_empty() { return (0, 0); } if array.len() == 1 { return (0, 0); } // Get tables with partial maxs and mins let mut min_from_right = vec![0; array.len()]; let mut max_from_left = vec![0; array.len()]; max_from_left[0] = array[0]; for index in 1..array.len() { if array[index] > max_from_left[index - 1] { max_from_left[index] = array[index]; } else { max_from_left[index] = max_from_left[index - 1]; } } *min_from_right.last_mut().unwrap() = *array.last().unwrap(); for index in (0..array.len() - 1).rev() { if array[index] < min_from_right[index + 1] { min_from_right[index] = array[index]; } else { min_from_right[index] = min_from_right[index + 1]; } } // The range that needs to be sorted is from the first to the last place // where the partial min and max differ. let mut found_left = false; let mut left = 0; let mut right = 0; for index in 0..array.len() { if min_from_right[index] != max_from_left[index] { if !found_left { found_left = true; left = index; } right = index; } } (left, right) } #[test] fn test() { assert!(sub_sort(&[1, 2, 4, 7, 10, 11, 7, 12, 6, 7, 16, 18, 19]) == (3, 9)); assert!(sub_sort(&[1, 2, 3]) == (0, 0)); assert!(sub_sort(&[3, 2, 1]) == (0, 2)); assert!(sub_sort(&[]) == (0, 0)); println!("Ex 16.16 ok!"); }
use serde::ser::Serialize; use std::collections::HashMap; struct ContextBuilder(tera::Context); #[derive(Copy, Clone, Debug, PartialEq)] enum Target { Scalar, Sse2, Wasm32, CoreSimd, } impl ContextBuilder { pub fn new() -> Self { Self(tera::Context::new()) } fn new_tvecn_swizzle_impl(dim: u32, prefix: &str) -> Self { ContextBuilder::new() .with_template("swizzle_impl.rs.tera") .target_scalar() .with_key_val("vec2_t", &format!("{prefix}Vec2")) .with_key_val("vec3_t", &format!("{prefix}Vec3")) .with_key_val("vec4_t", &format!("{prefix}Vec4")) .with_self_t(&format!("{prefix}Vec{dim}")) .with_dimension(dim) } pub fn new_vec2_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(2, "") } pub fn new_vec3_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(3, "") } pub fn new_vec3a_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(3, "") .with_key_val("vec3_t", "Vec3A") .with_self_t("Vec3A") } pub fn new_vec4_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(4, "") } pub fn new_dvec2_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(2, "D") } pub fn new_dvec3_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(3, "D") } pub fn new_dvec4_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(4, "D") } pub fn new_ivec2_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(2, "I") } pub fn new_ivec3_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(3, "I") } pub fn new_ivec4_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(4, "I") } pub fn new_uvec2_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(2, "U") } pub fn new_uvec3_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(3, "U") } pub fn new_uvec4_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(4, "U") } pub fn new_i64vec2_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(2, "I64") } pub fn new_i64vec3_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(3, "I64") } pub fn new_i64vec4_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(4, "I64") } pub fn new_u64vec2_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(2, "U64") } pub fn new_u64vec3_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(3, "U64") } pub fn new_u64vec4_swizzle_impl() -> Self { Self::new_tvecn_swizzle_impl(4, "U64") } fn new_taffinen(dim: u32, scalar_t: &str) -> Self { ContextBuilder::new() .with_template("affine.rs.tera") .target_scalar() .with_scalar_t(scalar_t) .with_dimension(dim) .with_is_align(false) } pub fn new_affine2() -> Self { Self::new_taffinen(2, "f32") } pub fn new_affine3a() -> Self { Self::new_taffinen(3, "f32").with_is_align(true) } pub fn new_daffine2() -> Self { Self::new_taffinen(2, "f64") } pub fn new_daffine3() -> Self { Self::new_taffinen(3, "f64") } pub fn new_bvecn(dim: u32, scalar_t: &str) -> Self { ContextBuilder::new() .with_template("vec_mask.rs.tera") .with_scalar_t(scalar_t) .target_scalar() .with_dimension(dim) } pub fn new_bvec2() -> Self { Self::new_bvecn(2, "bool") } pub fn new_bvec3() -> Self { Self::new_bvecn(3, "bool") } pub fn new_bvec4() -> Self { Self::new_bvecn(4, "bool") } pub fn new_bvec3a() -> Self { Self::new_bvecn(3, "u32") } pub fn new_bvec4a() -> Self { Self::new_bvecn(4, "u32") } pub fn new_vecn(dim: u32) -> Self { ContextBuilder::new() .with_template("vec.rs.tera") .target_scalar() .with_dimension(dim) .with_is_align(false) } pub fn new_vec2() -> Self { Self::new_vecn(2).with_scalar_t("f32") } pub fn new_vec3() -> Self { Self::new_vecn(3).with_scalar_t("f32") } pub fn new_vec3a() -> Self { Self::new_vecn(3).with_scalar_t("f32").with_is_align(true) } pub fn new_vec4() -> Self { Self::new_vecn(4).with_scalar_t("f32") } pub fn new_dvec2() -> Self { Self::new_vecn(2).with_scalar_t("f64") } pub fn new_dvec3() -> Self { Self::new_vecn(3).with_scalar_t("f64") } pub fn new_dvec4() -> Self { Self::new_vecn(4).with_scalar_t("f64") } pub fn new_ivec2() -> Self { Self::new_vecn(2).with_scalar_t("i32") } pub fn new_ivec3() -> Self { Self::new_vecn(3).with_scalar_t("i32") } pub fn new_ivec4() -> Self { Self::new_vecn(4).with_scalar_t("i32") } pub fn new_uvec2() -> Self { Self::new_vecn(2).with_scalar_t("u32") } pub fn new_uvec3() -> Self { Self::new_vecn(3).with_scalar_t("u32") } pub fn new_uvec4() -> Self { Self::new_vecn(4).with_scalar_t("u32") } pub fn new_i64vec2() -> Self { Self::new_vecn(2).with_scalar_t("i64") } pub fn new_i64vec3() -> Self { Self::new_vecn(3).with_scalar_t("i64") } pub fn new_i64vec4() -> Self { Self::new_vecn(4).with_scalar_t("i64") } pub fn new_u64vec2() -> Self { Self::new_vecn(2).with_scalar_t("u64") } pub fn new_u64vec3() -> Self { Self::new_vecn(3).with_scalar_t("u64") } pub fn new_u64vec4() -> Self { Self::new_vecn(4).with_scalar_t("u64") } pub fn new_quat() -> Self { ContextBuilder::new() .with_template("quat.rs.tera") .target_scalar() .with_scalar_t("f32") } pub fn new_dquat() -> Self { Self::new_quat().with_scalar_t("f64") } fn new_tmatn(dim: u32, scalar_t: &str) -> Self { ContextBuilder::new() .with_template("mat.rs.tera") .target_scalar() .with_scalar_t(scalar_t) .with_dimension(dim) } pub fn new_mat2() -> Self { Self::new_tmatn(2, "f32") } pub fn new_dmat2() -> Self { Self::new_tmatn(2, "f64") } pub fn new_mat3() -> Self { Self::new_tmatn(3, "f32") } pub fn new_mat3a() -> Self { Self::new_tmatn(3, "f32").with_is_align(true) } pub fn new_dmat3() -> Self { Self::new_tmatn(3, "f64") } pub fn new_mat4() -> Self { Self::new_tmatn(4, "f32") } pub fn new_dmat4() -> Self { Self::new_tmatn(4, "f64") } pub fn with_template(mut self, template_path: &str) -> Self { self.0.insert("template_path", template_path); self } pub fn with_scalar_t(mut self, scalar_t: &str) -> Self { self.0.insert("scalar_t", scalar_t); self } pub fn with_target(mut self, target: Target) -> Self { self.0.insert("is_sse2", &(target == Target::Sse2)); self.0.insert("is_coresimd", &(target == Target::CoreSimd)); self.0.insert("is_wasm32", &(target == Target::Wasm32)); self.0.insert("is_scalar", &(target == Target::Scalar)); self } pub fn target_sse2(self) -> Self { self.with_target(Target::Sse2) } pub fn target_wasm32(self) -> Self { self.with_target(Target::Wasm32) } pub fn target_scalar(self) -> Self { self.with_target(Target::Scalar) } pub fn target_coresimd(self) -> Self { self.with_target(Target::CoreSimd) } fn with_self_t(mut self, self_t: &str) -> Self { self.0.insert("self_t", self_t); self } fn with_dimension(mut self, dim: u32) -> Self { self.0.insert("dim", &dim); self } fn with_is_align(mut self, is_align: bool) -> Self { self.0.insert("is_align", &is_align); self } fn with_key_val<T: Serialize + ?Sized, S: Into<String>>(mut self, key: S, val: &T) -> Self { self.0.insert(key, val); self } pub fn build(self) -> tera::Context { self.0 } } pub fn build_output_pairs() -> HashMap<&'static str, tera::Context> { HashMap::from([ ( "src/swizzles/vec_traits.rs", ContextBuilder::new() .with_template("swizzle_traits.rs.tera") .build(), ), ( "src/swizzles/vec2_impl.rs", ContextBuilder::new_vec2_swizzle_impl().build(), ), ( "src/swizzles/vec3_impl.rs", ContextBuilder::new_vec3_swizzle_impl().build(), ), ( "src/swizzles/scalar/vec3a_impl.rs", ContextBuilder::new_vec3a_swizzle_impl().build(), ), ( "src/swizzles/sse2/vec3a_impl.rs", ContextBuilder::new_vec3a_swizzle_impl() .target_sse2() .build(), ), ( "src/swizzles/wasm32/vec3a_impl.rs", ContextBuilder::new_vec3a_swizzle_impl() .target_wasm32() .build(), ), ( "src/swizzles/coresimd/vec3a_impl.rs", ContextBuilder::new_vec3a_swizzle_impl() .target_coresimd() .build(), ), ( "src/swizzles/scalar/vec4_impl.rs", ContextBuilder::new_vec4_swizzle_impl().build(), ), ( "src/swizzles/sse2/vec4_impl.rs", ContextBuilder::new_vec4_swizzle_impl() .target_sse2() .build(), ), ( "src/swizzles/wasm32/vec4_impl.rs", ContextBuilder::new_vec4_swizzle_impl() .target_wasm32() .build(), ), ( "src/swizzles/coresimd/vec4_impl.rs", ContextBuilder::new_vec4_swizzle_impl() .target_coresimd() .build(), ), ( "src/swizzles/dvec2_impl.rs", ContextBuilder::new_dvec2_swizzle_impl().build(), ), ( "src/swizzles/dvec3_impl.rs", ContextBuilder::new_dvec3_swizzle_impl().build(), ), ( "src/swizzles/dvec4_impl.rs", ContextBuilder::new_dvec4_swizzle_impl().build(), ), ( "src/swizzles/ivec2_impl.rs", ContextBuilder::new_ivec2_swizzle_impl().build(), ), ( "src/swizzles/ivec3_impl.rs", ContextBuilder::new_ivec3_swizzle_impl().build(), ), ( "src/swizzles/ivec4_impl.rs", ContextBuilder::new_ivec4_swizzle_impl().build(), ), ( "src/swizzles/uvec2_impl.rs", ContextBuilder::new_uvec2_swizzle_impl().build(), ), ( "src/swizzles/uvec3_impl.rs", ContextBuilder::new_uvec3_swizzle_impl().build(), ), ( "src/swizzles/uvec4_impl.rs", ContextBuilder::new_uvec4_swizzle_impl().build(), ), ( "src/swizzles/i64vec2_impl.rs", ContextBuilder::new_i64vec2_swizzle_impl().build(), ), ( "src/swizzles/i64vec3_impl.rs", ContextBuilder::new_i64vec3_swizzle_impl().build(), ), ( "src/swizzles/i64vec4_impl.rs", ContextBuilder::new_i64vec4_swizzle_impl().build(), ), ( "src/swizzles/u64vec2_impl.rs", ContextBuilder::new_u64vec2_swizzle_impl().build(), ), ( "src/swizzles/u64vec3_impl.rs", ContextBuilder::new_u64vec3_swizzle_impl().build(), ), ( "src/swizzles/u64vec4_impl.rs", ContextBuilder::new_u64vec4_swizzle_impl().build(), ), ("src/f32/affine2.rs", ContextBuilder::new_affine2().build()), ( "src/f32/affine3a.rs", ContextBuilder::new_affine3a().build(), ), ( "src/f64/daffine2.rs", ContextBuilder::new_daffine2().build(), ), ( "src/f64/daffine3.rs", ContextBuilder::new_daffine3().build(), ), ("src/bool/bvec2.rs", ContextBuilder::new_bvec2().build()), ("src/bool/bvec3.rs", ContextBuilder::new_bvec3().build()), ("src/bool/bvec4.rs", ContextBuilder::new_bvec4().build()), ( "src/bool/scalar/bvec3a.rs", ContextBuilder::new_bvec3a().build(), ), ( "src/bool/sse2/bvec3a.rs", ContextBuilder::new_bvec3a().target_sse2().build(), ), ( "src/bool/wasm32/bvec3a.rs", ContextBuilder::new_bvec3a().target_wasm32().build(), ), ( "src/bool/coresimd/bvec3a.rs", ContextBuilder::new_bvec3a().target_coresimd().build(), ), ( "src/bool/scalar/bvec4a.rs", ContextBuilder::new_bvec4a().build(), ), ( "src/bool/sse2/bvec4a.rs", ContextBuilder::new_bvec4a().target_sse2().build(), ), ( "src/bool/wasm32/bvec4a.rs", ContextBuilder::new_bvec4a().target_wasm32().build(), ), ( "src/bool/coresimd/bvec4a.rs", ContextBuilder::new_bvec4a().target_coresimd().build(), ), ("src/f32/vec2.rs", ContextBuilder::new_vec2().build()), ("src/f32/vec3.rs", ContextBuilder::new_vec3().build()), ( "src/f32/scalar/vec3a.rs", ContextBuilder::new_vec3a().build(), ), ( "src/f32/sse2/vec3a.rs", ContextBuilder::new_vec3a().target_sse2().build(), ), ( "src/f32/wasm32/vec3a.rs", ContextBuilder::new_vec3a().target_wasm32().build(), ), ( "src/f32/coresimd/vec3a.rs", ContextBuilder::new_vec3a().target_coresimd().build(), ), ("src/f32/scalar/vec4.rs", ContextBuilder::new_vec4().build()), ( "src/f32/sse2/vec4.rs", ContextBuilder::new_vec4().target_sse2().build(), ), ( "src/f32/wasm32/vec4.rs", ContextBuilder::new_vec4().target_wasm32().build(), ), ( "src/f32/coresimd/vec4.rs", ContextBuilder::new_vec4().target_coresimd().build(), ), ("src/f64/dvec2.rs", ContextBuilder::new_dvec2().build()), ("src/f64/dvec3.rs", ContextBuilder::new_dvec3().build()), ("src/f64/dvec4.rs", ContextBuilder::new_dvec4().build()), ("src/i32/ivec2.rs", ContextBuilder::new_ivec2().build()), ("src/i32/ivec3.rs", ContextBuilder::new_ivec3().build()), ("src/i32/ivec4.rs", ContextBuilder::new_ivec4().build()), ("src/u32/uvec2.rs", ContextBuilder::new_uvec2().build()), ("src/u32/uvec3.rs", ContextBuilder::new_uvec3().build()), ("src/u32/uvec4.rs", ContextBuilder::new_uvec4().build()), ("src/i64/i64vec2.rs", ContextBuilder::new_i64vec2().build()), ("src/i64/i64vec3.rs", ContextBuilder::new_i64vec3().build()), ("src/i64/i64vec4.rs", ContextBuilder::new_i64vec4().build()), ("src/u64/u64vec2.rs", ContextBuilder::new_u64vec2().build()), ("src/u64/u64vec3.rs", ContextBuilder::new_u64vec3().build()), ("src/u64/u64vec4.rs", ContextBuilder::new_u64vec4().build()), ("src/f32/scalar/quat.rs", ContextBuilder::new_quat().build()), ( "src/f32/sse2/quat.rs", ContextBuilder::new_quat().target_sse2().build(), ), ( "src/f32/wasm32/quat.rs", ContextBuilder::new_quat().target_wasm32().build(), ), ( "src/f32/coresimd/quat.rs", ContextBuilder::new_quat().target_coresimd().build(), ), ("src/f64/dquat.rs", ContextBuilder::new_dquat().build()), ("src/f32/scalar/mat2.rs", ContextBuilder::new_mat2().build()), ( "src/f32/sse2/mat2.rs", ContextBuilder::new_mat2().target_sse2().build(), ), ( "src/f32/wasm32/mat2.rs", ContextBuilder::new_mat2().target_wasm32().build(), ), ( "src/f32/coresimd/mat2.rs", ContextBuilder::new_mat2().target_coresimd().build(), ), ("src/f64/dmat2.rs", ContextBuilder::new_dmat2().build()), ("src/f32/mat3.rs", ContextBuilder::new_mat3().build()), ( "src/f32/scalar/mat3a.rs", ContextBuilder::new_mat3a().build(), ), ( "src/f32/sse2/mat3a.rs", ContextBuilder::new_mat3a().target_sse2().build(), ), ( "src/f32/wasm32/mat3a.rs", ContextBuilder::new_mat3a().target_wasm32().build(), ), ( "src/f32/coresimd/mat3a.rs", ContextBuilder::new_mat3a().target_coresimd().build(), ), ("src/f32/scalar/mat4.rs", ContextBuilder::new_mat4().build()), ( "src/f32/sse2/mat4.rs", ContextBuilder::new_mat4().target_sse2().build(), ), ( "src/f32/wasm32/mat4.rs", ContextBuilder::new_mat4().target_wasm32().build(), ), ( "src/f32/coresimd/mat4.rs", ContextBuilder::new_mat4().target_coresimd().build(), ), ("src/f64/dmat3.rs", ContextBuilder::new_dmat3().build()), ("src/f64/dmat4.rs", ContextBuilder::new_dmat4().build()), ]) }
use itertools::{EitherOrBoth::*, Itertools as _}; use std::cmp::Ordering; pub fn cmp_ignore_case_ascii(a: &str, b: &str) -> bool { a.bytes() .zip_longest(b.bytes()) .map(|ab| match ab { Left(_) => Ordering::Greater, Right(_) => Ordering::Less, Both(a, b) => { if a == b' ' && b == b' ' { Ordering::Equal } else { a.to_ascii_lowercase().cmp(&b.to_ascii_lowercase()) } } }) .find(|&ordering| ordering != Ordering::Equal) .is_none() }
use super::IDT_SIZE; extern { static _asm_irq_handler_array: [u64, ..IDT_SIZE as uint]; } pub fn get_irq_handler(num: u16) -> u64 { _asm_irq_handler_array[num as uint] }
pub fn run() { println!("{}", sum(44, 52)); // closure sum let closure_sum = |x: i32, y: i32| x + y; println!("Closure sum: {}", closure_sum(44, 52)); } fn sum(x: i32, y: i32) -> i32 { // don not semicolon to return; x + y }
use handlebars::Handlebars; pub mod helpers; pub fn handlebars() -> Handlebars { let mut handlebars = Handlebars::new(); handlebars.register_helper("comma-list", Box::new(helpers::comma_delimited_list_helper)); handlebars.register_helper("equal", Box::new(helpers::equal_helper)); handlebars.register_helper("eq", Box::new(helpers::equal_helper)); handlebars.register_helper("or", Box::new(helpers::or_helper)); handlebars.register_helper("yaml-string", Box::new(helpers::yaml_string_helper)); handlebars.register_helper("url-rm-slash", Box::new(helpers::url_rm_slash_helper)); handlebars.register_helper("url-add-slash", Box::new(helpers::url_add_slash_helper)); handlebars.register_helper("url-rm-path", Box::new(helpers::url_rm_path)); handlebars.register_helper("lowercase", Box::new(helpers::lowercase_string_helper)); handlebars }
//! //! # `Code Section` //! //! +----------------+----------------+-------------+--------------+--------------+----------+ //! | | | | | | | //! | Code Kind | Flags | Gas Mode | SVM Version | Code Length | Code | //! | (2 bytes) | (8 bytes) | (8 bytes) | (4 bytes) | (4 bytes) | (Blob) | //! | | | | | | | //! +----------------+----------------+-------------+--------------+--------------+----------+ //! //! use std::io::Cursor; use svm_types::{CodeKind, CodeSection, GasMode}; use crate::section::{SectionDecoder, SectionEncoder}; use crate::{Field, ParseError, ReadExt, WriteExt}; pub const WASM: u16 = 0x00_01; pub const GAS_MODE_FIXED: u64 = 0x00_01; impl SectionEncoder for CodeSection { fn encode(&self, w: &mut Vec<u8>) { // `Code Kind` encode_code_kind(self.kind(), w); // `Flags` encode_code_flags(self.flags(), w); // `Gas Mode` encode_gas_mode(self.gas_mode(), w); // `SVM Version` encode_svm_version(self.svm_version(), w); // `Code Length` let code = self.code(); let length = code.len(); assert!(length < std::u32::MAX as usize); w.write_u32_be(length as u32); // `Code` w.write_bytes(code); } } impl SectionDecoder for CodeSection { fn decode(cursor: &mut Cursor<&[u8]>) -> Result<Self, crate::ParseError> { // `Code Kind` let kind = decode_code_kind(cursor)?; // `Flags` let flags = decode_code_flags(cursor)?; // `Gas Mode` let gas_mode = decode_gas_mode(cursor)?; // `SVM Version` let svm_version = decode_svm_version(cursor)?; // `Code Length` match cursor.read_u32_be() { Err(..) => Err(ParseError::NotEnoughBytes(Field::Code)), Ok(length) => { // `Code` match cursor.read_bytes(length as usize) { Ok(code) => { let section = CodeSection::new(kind, code, flags, gas_mode, svm_version); Ok(section) } Err(..) => Err(ParseError::NotEnoughBytes(Field::Code)), } } } } } fn encode_code_kind(kind: CodeKind, w: &mut Vec<u8>) { let raw = match kind { CodeKind::Wasm => WASM, }; w.write_u16_be(raw); } fn decode_code_kind(cursor: &mut Cursor<&[u8]>) -> Result<CodeKind, ParseError> { let value = cursor.read_u16_be(); if value.is_err() { return Err(ParseError::NotEnoughBytes(Field::CodeKind)); } match value.unwrap() { WASM => Ok(CodeKind::Wasm), _ => unreachable!(), } } fn encode_code_flags(flags: u64, w: &mut Vec<u8>) { w.write_u64_be(flags); } fn decode_code_flags(cursor: &mut Cursor<&[u8]>) -> Result<u64, ParseError> { let value = cursor.read_u64_be(); value.map_err(|_| ParseError::NotEnoughBytes(Field::CodeFlags)) } fn encode_gas_mode(gas_mode: GasMode, w: &mut Vec<u8>) { match gas_mode { GasMode::Fixed => w.write_u64_be(GAS_MODE_FIXED), GasMode::Metering => unreachable!(), } } fn encode_svm_version(svm_ver: u32, w: &mut Vec<u8>) { w.write_u32_be(svm_ver); } fn decode_gas_mode(cursor: &mut Cursor<&[u8]>) -> Result<GasMode, ParseError> { let value = cursor.read_u64_be(); if value.is_err() { return Err(ParseError::NotEnoughBytes(Field::GasMode)); } match value.unwrap() { GAS_MODE_FIXED => Ok(GasMode::Fixed), _ => unreachable!(), } } fn decode_svm_version(cursor: &mut Cursor<&[u8]>) -> Result<u32, ParseError> { let value = cursor.read_u32_be(); value.map_err(|_| ParseError::NotEnoughBytes(Field::SvmVersion)) }
use crate::Rule; use serde::Deserialize; #[derive(Clone, Deserialize)] pub struct Config { pub rules: std::vec::Vec<Rule>, pub enabled: bool, } impl Default for Config { fn default() -> Self { Config::default_config() } } impl Config { fn default_config() -> Self { Config { rules: vec![], enabled: false, } } pub fn get_rules(&self) -> &std::vec::Vec<Rule> { self.rules.as_ref() } pub fn enabled(&self) -> bool { self.enabled } }
use types::{SharedMut, Stereo, Wrap}; use dsp::{ ControllableLink, Filter, SignalFlow, SignalLink, SignalSink, SignalSource, SoftLimiter, VoiceManager, }; use event::{ControlEvent, Controllable}; use rb::{Producer, RbProducer}; pub struct Flow { source: VoiceManager, links: Vec<SharedMut<ControllableLink>>, sink: BufferSink, } impl Flow { pub fn new(source: VoiceManager, sink: BufferSink, sample_rate: usize) -> Self { Flow { source, links: vec![ SharedMut::wrap(Filter::new(sample_rate)), SharedMut::wrap(SoftLimiter {}), ], sink, } } } impl Controllable for Flow { fn handle(&mut self, msg: &ControlEvent) { match *msg { _ => { self.source.handle(msg); for link in &self.links { link.borrow_mut().handle(msg) } } } } } impl SignalFlow for Flow { fn tick(&mut self) { let mut sample = self.source.tick(); for link in &self.links { sample = link.borrow_mut().tick(sample); } self.sink.tick(sample); } } pub struct IdentityLink {} impl SignalLink for IdentityLink { fn tick(&mut self, input: Stereo) -> Stereo { input } } pub struct BufferSink { position: usize, buffer: Vec<Stereo>, ring_buffer: Producer<Stereo>, } impl BufferSink { pub fn new(ring_buffer: Producer<Stereo>, chunk_size: usize) -> Self { BufferSink { position: 0, buffer: vec![Stereo::default(); chunk_size], ring_buffer, } } } impl SignalSink for BufferSink { fn tick(&mut self, input: Stereo) { self.buffer[self.position] = input; if self.position == self.buffer.len() - 1 { self.ring_buffer.write_blocking(&self.buffer[..]).unwrap(); } self.position = (self.position + 1) % self.buffer.len(); } }
#[doc = r"Register block"] #[repr(C)] pub struct RegisterBlock { #[doc = "0x00 - control register 1"] pub cr1: CR1, #[doc = "0x04 - control register 2"] pub cr2: CR2, #[doc = "0x08 - interrupt and status register"] pub isr: ISR, #[doc = "0x0c - interrupt and status clear register"] pub clrisr: CLRISR, _reserved4: [u8; 0x04], #[doc = "0x14 - injected channel group selection register"] pub jchgr: JCHGR, _reserved5: [u8; 0x08], #[doc = "0x20 - configuration 0 register"] pub conf0r: CONF0R, #[doc = "0x24 - configuration 1 register"] pub conf1r: CONF1R, #[doc = "0x28 - configuration 2 register"] pub conf2r: CONF2R, _reserved8: [u8; 0x14], #[doc = "0x40 - channel configuration register 1"] pub confchr1: CONFCHR1, #[doc = "0x44 - channel configuration register 2"] pub confchr2: CONFCHR2, _reserved10: [u8; 0x18], #[doc = "0x60 - data register for injected group"] pub jdatar: JDATAR, #[doc = "0x64 - data register for the regular channel"] pub rdatar: RDATAR, _reserved12: [u8; 0x08], #[doc = "0x70 - SDADC1 and SDADC2 injected data register"] pub jdata12r: JDATA12R, #[doc = "0x74 - SDADC1 and SDADC2 regular data register"] pub rdata12r: RDATA12R, #[doc = "0x78 - SDADC1 and SDADC3 injected data register"] pub jdata13r: JDATA13R, #[doc = "0x7c - SDADC1 and SDADC3 regular data register"] pub rdata13r: RDATA13R, } #[doc = "CR1 (rw) register accessor: control register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr1`] module"] pub type CR1 = crate::Reg<cr1::CR1_SPEC>; #[doc = "control register 1"] pub mod cr1; #[doc = "CR2 (rw) register accessor: control register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cr2`] module"] pub type CR2 = crate::Reg<cr2::CR2_SPEC>; #[doc = "control register 2"] pub mod cr2; #[doc = "ISR (r) register accessor: interrupt and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`isr`] module"] pub type ISR = crate::Reg<isr::ISR_SPEC>; #[doc = "interrupt and status register"] pub mod isr; #[doc = "CLRISR (rw) register accessor: interrupt and status clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`clrisr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`clrisr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`clrisr`] module"] pub type CLRISR = crate::Reg<clrisr::CLRISR_SPEC>; #[doc = "interrupt and status clear register"] pub mod clrisr; #[doc = "JCHGR (rw) register accessor: injected channel group selection register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`jchgr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`jchgr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`jchgr`] module"] pub type JCHGR = crate::Reg<jchgr::JCHGR_SPEC>; #[doc = "injected channel group selection register"] pub mod jchgr; #[doc = "CONF0R (rw) register accessor: configuration 0 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`conf0r::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`conf0r::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`conf0r`] module"] pub type CONF0R = crate::Reg<conf0r::CONF0R_SPEC>; #[doc = "configuration 0 register"] pub mod conf0r; #[doc = "CONF1R (rw) register accessor: configuration 1 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`conf1r::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`conf1r::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`conf1r`] module"] pub type CONF1R = crate::Reg<conf1r::CONF1R_SPEC>; #[doc = "configuration 1 register"] pub mod conf1r; #[doc = "CONF2R (rw) register accessor: configuration 2 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`conf2r::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`conf2r::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`conf2r`] module"] pub type CONF2R = crate::Reg<conf2r::CONF2R_SPEC>; #[doc = "configuration 2 register"] pub mod conf2r; #[doc = "CONFCHR1 (rw) register accessor: channel configuration register 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`confchr1::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`confchr1::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`confchr1`] module"] pub type CONFCHR1 = crate::Reg<confchr1::CONFCHR1_SPEC>; #[doc = "channel configuration register 1"] pub mod confchr1; #[doc = "CONFCHR2 (rw) register accessor: channel configuration register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`confchr2::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`confchr2::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`confchr2`] module"] pub type CONFCHR2 = crate::Reg<confchr2::CONFCHR2_SPEC>; #[doc = "channel configuration register 2"] pub mod confchr2; #[doc = "JDATAR (r) register accessor: data register for injected group\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`jdatar::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`jdatar`] module"] pub type JDATAR = crate::Reg<jdatar::JDATAR_SPEC>; #[doc = "data register for injected group"] pub mod jdatar; #[doc = "RDATAR (r) register accessor: data register for the regular channel\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdatar::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdatar`] module"] pub type RDATAR = crate::Reg<rdatar::RDATAR_SPEC>; #[doc = "data register for the regular channel"] pub mod rdatar; #[doc = "JDATA12R (r) register accessor: SDADC1 and SDADC2 injected data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`jdata12r::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`jdata12r`] module"] pub type JDATA12R = crate::Reg<jdata12r::JDATA12R_SPEC>; #[doc = "SDADC1 and SDADC2 injected data register"] pub mod jdata12r; #[doc = "RDATA12R (r) register accessor: SDADC1 and SDADC2 regular data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdata12r::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdata12r`] module"] pub type RDATA12R = crate::Reg<rdata12r::RDATA12R_SPEC>; #[doc = "SDADC1 and SDADC2 regular data register"] pub mod rdata12r; #[doc = "JDATA13R (r) register accessor: SDADC1 and SDADC3 injected data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`jdata13r::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`jdata13r`] module"] pub type JDATA13R = crate::Reg<jdata13r::JDATA13R_SPEC>; #[doc = "SDADC1 and SDADC3 injected data register"] pub mod jdata13r; #[doc = "RDATA13R (r) register accessor: SDADC1 and SDADC3 regular data register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rdata13r::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rdata13r`] module"] pub type RDATA13R = crate::Reg<rdata13r::RDATA13R_SPEC>; #[doc = "SDADC1 and SDADC3 regular data register"] pub mod rdata13r;
use crate::types::*; /// Specification of a foreign key #[derive(Debug, Clone)] pub struct TableForeignKey { pub(crate) name: Option<String>, pub(crate) table: Option<DynIden>, pub(crate) ref_table: Option<DynIden>, pub(crate) columns: Vec<DynIden>, pub(crate) ref_columns: Vec<DynIden>, pub(crate) on_delete: Option<ForeignKeyAction>, pub(crate) on_update: Option<ForeignKeyAction>, } /// Foreign key on update & on delete actions #[derive(Debug, Clone)] pub enum ForeignKeyAction { Restrict, Cascade, SetNull, NoAction, SetDefault, } impl Default for TableForeignKey { fn default() -> Self { Self::new() } } impl TableForeignKey { /// Construct a new foreign key pub fn new() -> Self { Self { name: None, table: None, ref_table: None, columns: Vec::new(), ref_columns: Vec::new(), on_delete: None, on_update: None, } } /// Set foreign key name pub fn name(&mut self, name: &str) -> &mut Self { self.name = Some(name.into()); self } /// Set key table pub fn from_tbl<T>(&mut self, table: T) -> &mut Self where T: IntoIden, { self.table = Some(table.into_iden()); self } /// Set referencing table pub fn to_tbl<R>(&mut self, ref_table: R) -> &mut Self where R: IntoIden, { self.ref_table = Some(ref_table.into_iden()); self } /// Add key column pub fn from_col<T>(&mut self, column: T) -> &mut Self where T: IntoIden, { self.columns.push(column.into_iden()); self } /// Add referencing column pub fn to_col<R>(&mut self, ref_column: R) -> &mut Self where R: IntoIden, { self.ref_columns.push(ref_column.into_iden()); self } /// Set on delete action pub fn on_delete(&mut self, action: ForeignKeyAction) -> &mut Self { self.on_delete = Some(action); self } /// Set on update action pub fn on_update(&mut self, action: ForeignKeyAction) -> &mut Self { self.on_update = Some(action); self } pub fn get_ref_table(&self) -> Option<String> { self.ref_table.as_ref().map(|ref_tbl| ref_tbl.to_string()) } pub fn get_columns(&self) -> Vec<String> { self.columns.iter().map(|col| col.to_string()).collect() } pub fn get_ref_columns(&self) -> Vec<String> { self.ref_columns .iter() .map(|ref_col| ref_col.to_string()) .collect() } }
#![allow(dead_code)] use crate::*; use days::day05::{Data, Context}; use std::convert::{TryFrom}; use image::ImageBuffer; use ndarray::Array2; use crate::helper::dir::{Dir}; use geo::Point; const DAY: usize = 11; #[derive(Clone, Copy)] pub enum Color { Black, White } impl TryFrom<u8> for Color { type Error = AocErr; fn try_from(value: u8) -> Result<Self, Self::Error> { Ok(match value { 0 => Color::Black, 1 => Color::White, i => return Err(custom_err(format!("Invalid color: {}", i))) }) } } impl Into<u8> for Color { fn into(self) -> u8 { match self { Color::Black => 0, Color::White => 1 } } } fn run(data: Data, start: Color, save: bool) -> AocResult<usize> { const N: usize = 512; const CENTER: usize = N/2; let mut grid = Array2::from_elem((N, N), (start,0)); grid.swap_axes(1, 0); let mut ctx = Context::from_data_fill_up(data, &[]); let mut pos = Point::new(CENTER, CENTER); let mut dir = Dir::North; loop { let color: u8 = grid[pos.x_y()].0.into(); ctx.push_input(color as isize); ctx.resume()?; ctx.resume()?; if ctx.halted() { break; } let turn = ctx.pop_output().unwrap(); let color = ctx.pop_output().unwrap(); let counter = grid[pos.x_y()].1; let color = Color::try_from(color as u8)?; grid[pos.x_y()] = (color, counter+1); dir = match turn { 0 => dir.left(), 1 => dir.right(), _ => unreachable!() }; pos = dir.next_pos(pos); } if save { let img = ImageBuffer::from_fn(N as u32, N as u32, |x, y| { match grid[(x as usize,y as usize)].0 { Color::Black => image::Luma([0u8]), Color::White => image::Luma([255u8]) } }); img.save("out11.png").unwrap(); } Ok(grid.iter() .filter(|g| g.1 > 0) .count()) } #[cfg(test)] mod tests { use super::*; #[test] fn part1() -> AocResult<()> { let data: Data = parse_file(FileType::Input, DAY, 01)?; assert_eq!(run(data, Color::Black, false)?, 1909); Ok(()) } #[test] fn part2() -> AocResult<()> { let data: Data = parse_file(FileType::Input, DAY, 01)?; assert_eq!(run(data, Color::White, true)?, 249); Ok(()) } }
#[cfg(test)] #[path = "../../../tests/unit/solver/population/rosomaxa_test.rs"] mod rosomaxa_test; use super::super::rand::prelude::SliceRandom; use super::*; use crate::algorithms::gsom::{get_network_state, Input, Network, NetworkConfig, NodeLink, Storage}; use crate::algorithms::nsga2::Objective; use crate::algorithms::statistics::relative_distance; use crate::construction::heuristics::*; use crate::models::Problem; use crate::utils::{as_mut, Environment, Random}; use std::convert::TryInto; use std::fmt::Formatter; use std::ops::{Deref, RangeBounds}; use std::sync::Arc; /// Specifies rosomaxa configuration settings. pub struct RosomaxaConfig { /// Selection size. pub selection_size: usize, /// Elite population size. pub elite_size: usize, /// Node population size. pub node_size: usize, /// Spread factor of GSOM. pub spread_factor: f64, /// Distribution factor of GSOM. pub distribution_factor: f64, /// Learning rate of GSOM. pub learning_rate: f64, /// A node rebalance memory of GSOM. pub rebalance_memory: usize, /// A rebalance count. pub rebalance_count: usize, /// A ratio of exploration phase. pub exploration_ratio: f64, } impl RosomaxaConfig { /// Creates an instance of `RosomaxaConfig` using default parameters, but taking into /// account data parallelism settings. pub fn new_with_defaults(selection_size: usize) -> Self { Self { selection_size, elite_size: 2, node_size: 2, spread_factor: 0.25, distribution_factor: 0.25, learning_rate: 0.1, rebalance_memory: 100, rebalance_count: 2, exploration_ratio: 0.9, } } } /// Implements custom algorithm, code name Routing Optimizations with Self Organizing /// MAps and eXtrAs (pronounced as "rosomaha", from russian "росомаха" - "wolverine"). pub struct Rosomaxa { problem: Arc<Problem>, environment: Arc<Environment>, config: RosomaxaConfig, elite: Elitism, phase: RosomaxaPhases, } impl Population for Rosomaxa { fn add_all(&mut self, individuals: Vec<Individual>) -> bool { // NOTE avoid extra deep copy let best_known = self.elite.ranked().map(|(i, _)| i).next(); let elite = individuals .iter() .filter(|individual| self.is_comparable_with_best_known(individual, best_known)) .map(|individual| individual.deep_copy()) .collect::<Vec<_>>(); let is_improved = self.elite.add_all(elite); match &mut self.phase { RosomaxaPhases::Initial { individuals: known_individuals } => { known_individuals.extend(individuals.into_iter()) } RosomaxaPhases::Exploration { time, network, .. } => { network.store_batch(individuals, *time, IndividualInput::new); } RosomaxaPhases::Exploitation => {} } is_improved } fn add(&mut self, individual: Individual) -> bool { let best_known = self.elite.ranked().map(|(i, _)| i).next(); let is_improved = if self.is_comparable_with_best_known(&individual, best_known) { self.elite.add(individual.deep_copy()) } else { false }; match &mut self.phase { RosomaxaPhases::Initial { individuals } => individuals.push(individual), RosomaxaPhases::Exploration { time, network, .. } => network.store(IndividualInput::new(individual), *time), RosomaxaPhases::Exploitation => {} } is_improved } fn on_generation(&mut self, statistics: &Statistics) { self.update_phase(statistics) } fn cmp(&self, a: &Individual, b: &Individual) -> Ordering { self.elite.cmp(a, b) } fn select<'a>(&'a self) -> Box<dyn Iterator<Item = &Individual> + 'a> { let (elite_explore_size, node_explore_size) = match self.config.selection_size { value if value > 6 => { let elite_size = self.environment.random.uniform_int(2, 4) as usize; (elite_size, 2) } value if value > 4 => (2, 2), value if value > 2 => (2, 1), _ => (1, 1), }; match &self.phase { RosomaxaPhases::Exploration { populations, .. } => Box::new( self.elite .select() .take(elite_explore_size) .chain(populations.iter().flat_map(move |population| { let explore_size = self.environment.random.uniform_int(1, node_explore_size) as usize; population.0.select().take(explore_size) })) .take(self.config.selection_size), ), _ => Box::new(self.elite.select()), } } fn ranked<'a>(&'a self) -> Box<dyn Iterator<Item = (&Individual, usize)> + 'a> { self.elite.ranked() } fn size(&self) -> usize { self.elite.size() } fn selection_phase(&self) -> SelectionPhase { match &self.phase { RosomaxaPhases::Initial { .. } => SelectionPhase::Initial, RosomaxaPhases::Exploration { .. } => SelectionPhase::Exploration, RosomaxaPhases::Exploitation => SelectionPhase::Exploitation, } } } type IndividualNetwork = Network<IndividualInput, IndividualStorage>; impl Rosomaxa { /// Creates a new instance of `Rosomaxa`. pub fn new(problem: Arc<Problem>, environment: Arc<Environment>, config: RosomaxaConfig) -> Result<Self, String> { if config.elite_size < 2 || config.node_size < 2 || config.selection_size < 2 { return Err("Rosomaxa algorithm requires some parameters to be above thresholds".to_string()); } Ok(Self { problem: problem.clone(), environment: environment.clone(), elite: Elitism::new(problem, environment.random.clone(), config.elite_size, config.selection_size), phase: RosomaxaPhases::Initial { individuals: vec![] }, config, }) } fn update_phase(&mut self, statistics: &Statistics) { match &mut self.phase { RosomaxaPhases::Initial { individuals, .. } => { if individuals.len() >= 4 { let mut network = Self::create_network( self.problem.clone(), self.environment.clone(), &self.config, individuals.drain(0..4).collect(), ); individuals.drain(0..).for_each(|individual| network.store(IndividualInput::new(individual), 0)); self.phase = RosomaxaPhases::Exploration { time: 0, network, populations: vec![] }; } } RosomaxaPhases::Exploration { time, network, populations, .. } => { if statistics.termination_estimate < self.config.exploration_ratio { *time = statistics.generation; let best_individual = self.elite.select().next().expect("expected individuals in elite"); let best_fitness = best_individual.get_fitness_values().collect::<Vec<_>>(); Self::optimize_network( network, statistics, best_fitness.as_slice(), self.config.rebalance_memory, self.config.rebalance_count, ); Self::fill_populations( network, populations, best_fitness.as_slice(), statistics, self.environment.random.as_ref(), ); } else { self.phase = RosomaxaPhases::Exploitation } } RosomaxaPhases::Exploitation => {} } } fn is_comparable_with_best_known(&self, individual: &Individual, best_known: Option<&Individual>) -> bool { best_known .map_or(true, |best_known| self.problem.objective.total_order(&individual, best_known) != Ordering::Greater) } fn fill_populations( network: &IndividualNetwork, populations: &mut Vec<(Arc<Elitism>, f64)>, best_fitness: &[f64], statistics: &Statistics, random: &(dyn Random + Send + Sync), ) { populations.clear(); populations.extend(network.get_nodes().map(|node| node.read().unwrap().storage.population.clone()).filter_map( |population| { population.select().next().map(|individual| { ( population.clone(), relative_distance(best_fitness.iter().cloned(), individual.get_fitness_values()), ) }) }, )); let shuffle_amount = Self::get_shuffle_amount(statistics, populations.len()); if shuffle_amount != populations.len() { // partially randomize order populations.sort_by(|(_, a), (_, b)| compare_floats(*a, *b)); populations.partial_shuffle(&mut random.get_rng(), shuffle_amount); } else { populations.shuffle(&mut random.get_rng()); } } fn get_shuffle_amount(statistics: &Statistics, length: usize) -> usize { let ratio = match statistics.improvement_1000_ratio { v if v > 0.5 => { // https://www.wolframalpha.com/input/?i=plot+0.66+*+%281-+1%2F%281%2Be%5E%28-10+*%28x+-+0.5%29%29%29%29%2C+x%3D0+to+1 let progress = statistics.termination_estimate; let ratio = 0.5 * (1. - 1. / (1. + std::f64::consts::E.powf(-10. * (progress - 0.5)))); ratio.clamp(0.1, 0.5) } v if v > 0.2 => 0.5, _ => 1., }; (length as f64 * ratio).round() as usize } fn optimize_network( network: &mut IndividualNetwork, statistics: &Statistics, best_fitness: &[f64], rebalance_memory: usize, rebalance_count: usize, ) { let rebalance_memory = rebalance_memory as f64; let keep_size = match statistics.improvement_1000_ratio { v if v > 0.2 => { // https://www.wolframalpha.com/input/?i=plot+%281+-+1%2F%281%2Be%5E%28-10+*%28x+-+0.5%29%29%29%29%2C+x%3D0+to+1 let x = statistics.termination_estimate.clamp(0., 1.); let ratio = 1. - 1. / (1. + std::f64::consts::E.powf(-10. * (x - 0.5))); rebalance_memory + rebalance_memory * ratio } v if v > 0.1 => 2. * rebalance_memory, v if v > 0.01 => 3. * rebalance_memory, _ => 4. * rebalance_memory, } as usize; if statistics.generation == 0 || network.size() <= keep_size { return; } let get_distance = |node: &NodeLink<IndividualInput, IndividualStorage>| { let node = node.read().unwrap(); let individual = node.storage.population.select().next(); individual .map(|individual| relative_distance(best_fitness.iter().cloned(), individual.get_fitness_values())) }; // determine percentile value let mut distances = network.get_nodes().filter_map(get_distance).collect::<Vec<_>>(); distances.sort_by(|a, b| compare_floats(*b, *a)); let percentile_idx = if distances.len() > keep_size { distances.len() - keep_size } else { const PERCENTILE_THRESHOLD: f64 = 0.1; (distances.len() as f64 * PERCENTILE_THRESHOLD) as usize }; if let Some(distance_threshold) = distances.get(percentile_idx).cloned() { network.retrain(rebalance_count, &|node| { get_distance(node).map_or(false, |distance| distance < distance_threshold) }); } } fn create_network( problem: Arc<Problem>, environment: Arc<Environment>, config: &RosomaxaConfig, individuals: Vec<Individual>, ) -> IndividualNetwork { let inputs_vec = individuals.into_iter().map(IndividualInput::new).collect::<Vec<_>>(); let inputs_slice = inputs_vec.into_boxed_slice(); let inputs_array: Box<[IndividualInput; 4]> = match inputs_slice.try_into() { Ok(ba) => ba, Err(o) => panic!("expected individuals of length {} but it was {}", 4, o.len()), }; Network::new( *inputs_array, NetworkConfig { spread_factor: config.spread_factor, distribution_factor: config.distribution_factor, learning_rate: config.learning_rate, rebalance_memory: config.rebalance_memory, }, Box::new({ let node_size = config.node_size; let random = environment.random.clone(); move || IndividualStorage { population: Arc::new(Elitism::new(problem.clone(), random.clone(), node_size, node_size)), } }), ) } } impl Display for Rosomaxa { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match &self.phase { RosomaxaPhases::Exploration { network, .. } => { let state = get_network_state(network); write!(f, "{}", state) } _ => write!(f, "{}", self.elite), } } } enum RosomaxaPhases { Initial { individuals: Vec<InsertionContext> }, Exploration { time: usize, network: IndividualNetwork, populations: Vec<(Arc<Elitism>, f64)> }, Exploitation, } struct IndividualInput { weights: Vec<f64>, individual: InsertionContext, } impl IndividualInput { pub fn new(individual: InsertionContext) -> Self { let weights = IndividualInput::get_weights(&individual); Self { weights, individual } } fn get_weights(individual: &InsertionContext) -> Vec<f64> { vec![ get_max_load_variance(individual), get_customers_deviation(individual), get_duration_mean(individual), get_distance_mean(individual), get_waiting_mean(individual), get_distance_gravity_mean(individual), individual.solution.routes.len() as f64, ] } } impl Input for IndividualInput { fn weights(&self) -> &[f64] { self.weights.as_slice() } } struct IndividualStorage { population: Arc<Elitism>, } impl IndividualStorage { fn get_population_mut(&mut self) -> &mut Elitism { // NOTE use black magic here to avoid RefCell, should not break memory safety guarantee unsafe { as_mut(self.population.deref()) } } } impl Storage for IndividualStorage { type Item = IndividualInput; fn add(&mut self, input: Self::Item) { self.get_population_mut().add(input.individual); } fn drain<R>(&mut self, range: R) -> Vec<Self::Item> where R: RangeBounds<usize>, { self.get_population_mut().drain(range).into_iter().map(IndividualInput::new).collect() } fn distance(&self, a: &[f64], b: &[f64]) -> f64 { relative_distance(a.iter().cloned(), b.iter().cloned()) } fn size(&self) -> usize { self.population.size() } } impl Display for IndividualStorage { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.population.as_ref()) } }
pub mod command { extern crate inflector; use crate::builders::{ csharp::CSharpBuilder, go::GoBuilder, java::JavaBuilder, node::NodeBuilder, python::PythonBuilder, rust::RustBuilder, scala::ScalaBuilder, Application, ProjectBuilder, }; use crate::{check_command, get_templates, get_user_dir, Emojis}; use clap::ArgMatches; use inflector::Inflector; use linked_hash_map::LinkedHashMap; use std::fs; use std::path::Path; use std::process::Command; use std::io::{self, Write}; use std::fs::File; const PROXY_PORT: &str = "9000"; const FUNCTION_PORT: &str = "8080"; const CLOUD_STATE_NAMESPACE: &str = "cloudstate"; const CLOUDSTATE_PROXY_DEV_MODE: &str = "cloudstateio/cloudstate-proxy-native-dev-mode:latest"; const CLOUD_STATE_OPERATOR_DEPLOYMENT: &str = "https://raw.githubusercontent.com/cloudstateio/cloudstate/master/operator/cloudstate.yaml"; pub fn upgrade() { let status = self_update::backends::github::Update::configure() .repo_owner("sleipnir") .repo_name("cloudstate-cli") .bin_name("cloudstate") .show_download_progress(true) .current_version(env!("CARGO_PKG_VERSION")) .build() .unwrap() .update() .unwrap(); println!("Update status: `{}`!", status.version()); } pub fn scale(args: &ArgMatches) { //kubectl scale --replicas=3 deployment/shopping-cart } pub fn logs(args: &ArgMatches) { let application = args.value_of("name").unwrap(); let space = match args.value_of("namespace") { Some(namespace) => args.value_of("namespace").unwrap_or(CLOUD_STATE_NAMESPACE), _ => CLOUD_STATE_NAMESPACE, }; log_container( application, space, args.is_present("tail"), args.is_present("all"), args.is_present("since"), args.value_of("since").unwrap_or("1m"), ); } pub fn run(app: Application, args: &ArgMatches) { if args.is_present("only-proxy") { run_proxy(&args); } else { run_all(&app, &args); } } pub fn check() { let mut commands = LinkedHashMap::new(); commands.insert( "docker", format!( "{} Docker not found in system path", Emojis::default().bomb() ), ); commands.insert( "kubectl", format!( "{} Kubectl not found in system path", Emojis::default().bomb() ), ); commands.insert( "minikube", format!( "{} Minikube not found in system path", Emojis::default().bomb() ), ); commands.insert("dotnet", "Dependency .NET not found in system path. If you use csharp please proceed to install it.".parse().unwrap()); commands.insert( "go", "Dependency GO not found in system path. If you use GO please proceed to install it." .parse() .unwrap(), ); commands.insert("java", "Dependency Java not found in system path. If you use Java please proceed to install it.".parse().unwrap()); commands.insert("mvn", "Dependency Java not found in system path. If you use Java please proceed to install it.".parse().unwrap()); commands.insert("npm", "Dependency NPM not found in system path. If you use NodeJS please proceed to install it.".parse().unwrap()); commands.insert("python", "Dependency Python not found in system path. If you use Python please proceed to install it.".parse().unwrap()); commands.insert("cargo", "Dependency Rust not found in system path. If you use Rust please proceed to install it.".parse().unwrap()); commands.insert("sbt", "Dependency Sbt not found in system path. If you use Scala please proceed to install it.".parse().unwrap()); for (command, expect) in &commands { let result = check_command(&command); if result.unwrap() == 0 { println!( "{0: <1} Dependency {1: <10} OK!", Emojis::default().ok(), command.to_title_case() ) } else { println!("{} {}", Emojis::default().nok(), expect); } } } pub fn init() { // First download templates let home_dir = get_user_dir(); if Path::new(home_dir.as_str()).exists() { fs::remove_dir_all(home_dir.clone()); } get_templates(home_dir); if let Ok(()) = create_namespace(CLOUD_STATE_NAMESPACE.parse().unwrap()) { init_operator(CLOUD_STATE_NAMESPACE.parse().unwrap()); } } pub fn destroy() { //kubectl delete all --all -n {namespace} println!( "{} Destroying CloudState resources", Emojis::default().fire() ); let result = Command::new("kubectl") .arg("delete") .arg("all") .arg("--all") .arg("-n") .arg(CLOUD_STATE_NAMESPACE) .status(); if result.is_ok() { println!("{} Deleted all resources", Emojis::default().crying()); let destroy_result = Command::new("kubectl") .arg("delete") .arg("namespace") .arg(CLOUD_STATE_NAMESPACE) .status(); if destroy_result.is_ok() { println!("{} CloudState dead", Emojis::default().broken_heart()); } else { println!("{} CloudState survivor", Emojis::default().stuck_out()); } } else { println!("{} CloudState survivor", Emojis::default().stuck_out()); } } pub fn build(app: Application) { // Retrive project configuration match app.profile.as_str() { "java" => JavaBuilder {}.build(app), "kotlin" => JavaBuilder {}.build(app), "node" => NodeBuilder {}.build(app), "go" => GoBuilder {}.build(app), "csharp" => CSharpBuilder {}.build(app), "rust" => RustBuilder {}.build(app), "python" => PythonBuilder {}.build(app), "scala" => ScalaBuilder {}.build(app), _ => println!("Invalid profile option"), } } pub fn push(app: Application) { // Retrive project configuration match app.profile.as_str() { "java" => JavaBuilder {}.push(app), "kotlin" => JavaBuilder {}.push(app), "node" => NodeBuilder {}.push(app), "go" => GoBuilder {}.push(app), "csharp" => CSharpBuilder {}.push(app), "rust" => RustBuilder {}.push(app), "python" => PythonBuilder {}.push(app), "scala" => ScalaBuilder {}.push(app), _ => println!("Invalid profile option"), } } pub fn deploy(app: Application) { // Retrive project configuration match app.profile.as_str() { "java" => JavaBuilder {}.deploy(app), "kotlin" => JavaBuilder {}.deploy(app), "node" => NodeBuilder {}.deploy(app), "go" => GoBuilder {}.deploy(app), "csharp" => CSharpBuilder {}.deploy(app), "rust" => RustBuilder {}.deploy(app), "python" => PythonBuilder {}.deploy(app), "scala" => ScalaBuilder {}.deploy(app), _ => println!("Invalid profile option"), } } pub fn create_project(app: Application) { let home_dir = get_user_dir(); if !(Path::new(home_dir.as_str()).exists()) { println!("You must first boot CloudState with cloudstate --init. See cloudstate --help for help"); } else { match app.profile.as_str() { "java" => JavaBuilder {}.create(app), "kotlin" => JavaBuilder {}.create(app), "node" => NodeBuilder {}.create(app), "go" => GoBuilder {}.create(app), "csharp" => CSharpBuilder {}.create(app), "rust" => RustBuilder {}.create(app), "python" => PythonBuilder {}.create(app), "scala" => ScalaBuilder {}.create(app), _ => println!("Invalid profile option"), } } } pub fn list_profiles() { let mut profiles = LinkedHashMap::new(); profiles.insert("csharp", "dotnet"); profiles.insert("go", "go"); profiles.insert("java", "java, [maven | sbt]"); profiles.insert("kotlin", "kotlin, [maven | gradle]"); profiles.insert("node", "node"); profiles.insert("python", "python, virtualenv"); profiles.insert("rust", "rust, cargo"); profiles.insert("scala", "java, scala, sbt"); println!( "{0: <10} | {1: <20} | {2: <10} | {3: <12} |", "Profile", "Dependencies", "Resolved", "Maturity Level" ); for (profile, dependencies) in &profiles { println!( "{0: <10} | {1: <20} | {2: <10} | {3: <13} |", profile, dependencies, resolve_dependencies(profile), maturity_level(profile.clone()) ); } println!(); println!("Subtitle:"); println!("{} Stable for production usage", Emojis::default().stable()); println!("{} Unstable but usable", Emojis::default().unstable()); println!("{} Work in progress", Emojis::default().work_in_progress()); println!("{} Unknown", Emojis::default().unknown()); } fn run_all(app: &Application, args: &ArgMatches) { let proxy_port = args.value_of("proxy-port").unwrap_or(PROXY_PORT); let function_port = args.value_of("function-port").unwrap_or(FUNCTION_PORT); let proxy_image = args.value_of("proxy-image").unwrap_or(CLOUDSTATE_PROXY_DEV_MODE); let deployment_path = Path::new(&app.work_dir).join("stack.yml"); let path = format!("{}{}", &app.home_dir, "/templates/deployments/stack.yml"); let stack_path = Path::new(&path); let stack_template_content = fs::read_to_string(stack_path.clone()).unwrap(); let image_name = &app.registry; let proxy_name = stack_template_content.replace("{proxy-image}", proxy_image.as_ref()); let deployment_proxy_port = proxy_name.replace("{expose-port}", proxy_port.as_ref()); let deployment_name = deployment_proxy_port.replace("{application-name}", app.name.as_ref()); let deployment_image = deployment_name.replace("{user-func-imagename}", image_name.as_str()); let deployment_user_port = deployment_image.replace("{user-port}", function_port.as_ref()); let deployment_content = deployment_user_port.replace("{tag}", app.tag.as_ref()); let mut stack_file = File::create(deployment_path.clone()).unwrap(); stack_file.write_all(deployment_content.as_ref()); println!("Running all containers in Swarm mode... "); let output = Command::new("docker") .arg("stack") .arg("deploy") .arg("--compose-file") .arg(&deployment_path) .arg("cloudstatestack") .output() .expect("Failed to deploy Stack in Swarm mode"); println!("status: {}", output.status); io::stdout().write_all(&output.stdout).unwrap(); io::stderr().write_all(&output.stderr).unwrap(); } fn run_proxy(args: &ArgMatches) { let proxy_port = args.value_of("proxy-port").unwrap_or(PROXY_PORT); let function_port = args.value_of("function-port").unwrap_or(FUNCTION_PORT); let proxy_image = args.value_of("proxy-image").unwrap_or(CLOUDSTATE_PROXY_DEV_MODE); println!("Running only proxy container"); if args.is_present("show") { println!( "Command: docker run --rm --net=host --name proxy --env HTTP_PORT:{} --env USER_FUNCTION_PORT:{} {}", proxy_port, function_port, proxy_image ); } println!("For stop press ctrl+c"); let output = Command::new("docker") .arg("run") .arg("--rm") .arg("--net=host") .arg("--name=proxy") .arg("--env") .arg(format!("HTTP_PORT={}", proxy_port)) .arg("--env") .arg(format!("USER_FUNCTION_PORT={}", function_port)) .arg(proxy_image) .output() .expect("Failed to execute proxy container"); println!("status: {}", output.status); io::stdout().write_all(&output.stdout).unwrap(); io::stderr().write_all(&output.stderr).unwrap(); } fn log_container( application: &str, namespace: &str, tail: bool, all_containers: bool, have_since: bool, since: &str, ) { let mut log = Command::new("kubectl"); log.arg("logs"); log.arg("-n"); log.arg(namespace); log.arg("-l"); log.arg(format!("user-container={}", application)); if tail { log.arg("-f"); } if all_containers { println!( "{} Get logs for {} and Sidecar containers", Emojis::default().magnifying_glass(), application ); log.arg("--all-containers"); } else { println!( "{} Get logs for {} container", Emojis::default().magnifying_glass(), application ); log.arg("-c").arg("user-container"); } if have_since { log.arg("--since").arg(since); } log.status(); } fn maturity_level(profile: &str) -> char { match profile { "java" => Emojis::default().stable(), "kotlin" => Emojis::default().stable(), "node" => Emojis::default().stable(), "scala" => Emojis::default().work_in_progress(), "go" => Emojis::default().unstable(), "csharp" => Emojis::default().work_in_progress(), "rust" => Emojis::default().work_in_progress(), "python" => Emojis::default().work_in_progress(), _ => Emojis::default().unknown(), } } fn resolve_dependencies(profile: &str) -> bool { match profile { "java" => JavaBuilder {}.is_dependencies_ok(), "kotlin" => JavaBuilder {}.is_dependencies_ok(), "node" => NodeBuilder {}.is_dependencies_ok(), "scala" => ScalaBuilder {}.is_dependencies_ok(), "go" => GoBuilder {}.is_dependencies_ok(), "csharp" => CSharpBuilder {}.is_dependencies_ok(), "rust" => RustBuilder {}.is_dependencies_ok(), "python" => PythonBuilder {}.is_dependencies_ok(), _ => false, } } fn create_namespace(namespace: String) -> Result<(), String> { println!( "{} Creating CloudState namespace...", Emojis::default().winking() ); if let result = Command::new("kubectl") .arg("create") .arg("namespace") .arg(namespace) .status() .is_ok() { println!( "{} Success on create CloudState namespace", Emojis::default().smiling() ); return Ok(()); }; println!( "{} Failure on create CloudState namespace", Emojis::default().screaming() ); return Err(String::from("Failure on create CloudState namespace")); } fn init_operator(namespace: String) -> Result<(), String> { println!( "{} Initializing CloudState operator...", Emojis::default().rocket() ); if let result = Command::new("kubectl") .arg("apply") .arg("-n") .arg(namespace) .arg("-f") .arg(CLOUD_STATE_OPERATOR_DEPLOYMENT) .status() .is_ok() { println!( "{} Success on installing CloudState operator", Emojis::default().success() ); return Ok(()); }; println!( "{} Failure on installing CloudState operator", Emojis::default().crying() ); return Err(String::from("Failure on installing CloudState operator")); } }
use common::{BitSet, TinySet}; use docset::{DocSet, SkipResult}; use std::cmp::Ordering; use DocId; /// A `BitSetDocSet` makes it possible to iterate through a bitset as if it was a `DocSet`. /// /// # Implementation detail /// /// Skipping is relatively fast here as we can directly point to the /// right tiny bitset bucket. /// /// TODO: Consider implementing a `BitTreeSet` in order to advance faster /// when the bitset is sparse pub struct BitSetDocSet { docs: BitSet, cursor_bucket: u32, //< index associated to the current tiny bitset cursor_tinybitset: TinySet, doc: u32, } impl BitSetDocSet { fn go_to_bucket(&mut self, bucket_addr: u32) { self.cursor_bucket = bucket_addr; self.cursor_tinybitset = self.docs.tinyset(bucket_addr); } } impl From<BitSet> for BitSetDocSet { fn from(docs: BitSet) -> BitSetDocSet { let first_tiny_bitset = if docs.max_value() == 0 { TinySet::empty() } else { docs.tinyset(0) }; BitSetDocSet { docs, cursor_bucket: 0, cursor_tinybitset: first_tiny_bitset, doc: 0u32, } } } impl DocSet for BitSetDocSet { fn advance(&mut self) -> bool { if let Some(lower) = self.cursor_tinybitset.pop_lowest() { self.doc = (self.cursor_bucket as u32 * 64u32) | lower; return true; } if let Some(cursor_bucket) = self.docs.first_non_empty_bucket(self.cursor_bucket + 1) { self.go_to_bucket(cursor_bucket); let lower = self.cursor_tinybitset.pop_lowest().unwrap(); self.doc = (cursor_bucket * 64u32) | lower; true } else { false } } fn skip_next(&mut self, target: DocId) -> SkipResult { // skip is required to advance. if !self.advance() { return SkipResult::End; } let target_bucket = target / 64u32; // Mask for all of the bits greater or equal // to our target document. match target_bucket.cmp(&self.cursor_bucket) { Ordering::Greater => { self.go_to_bucket(target_bucket); let greater_filter: TinySet = TinySet::range_greater_or_equal(target); self.cursor_tinybitset = self.cursor_tinybitset.intersect(greater_filter); if !self.advance() { SkipResult::End } else if self.doc() == target { SkipResult::Reached } else { debug_assert!(self.doc() > target); SkipResult::OverStep } } Ordering::Equal => loop { match self.doc().cmp(&target) { Ordering::Less => { if !self.advance() { return SkipResult::End; } } Ordering::Equal => { return SkipResult::Reached; } Ordering::Greater => { debug_assert!(self.doc() > target); return SkipResult::OverStep; } } }, Ordering::Less => { debug_assert!(self.doc() > target); SkipResult::OverStep } } } /// Returns the current document fn doc(&self) -> DocId { self.doc } /// Returns half of the `max_doc` /// This is quite a terrible heuristic, /// but we don't have access to any better /// value. fn size_hint(&self) -> u32 { self.docs.len() as u32 } } #[cfg(test)] mod tests { use super::BitSetDocSet; use common::BitSet; use docset::{DocSet, SkipResult}; use DocId; fn create_docbitset(docs: &[DocId], max_doc: DocId) -> BitSetDocSet { let mut docset = BitSet::with_max_value(max_doc); for &doc in docs { docset.insert(doc); } BitSetDocSet::from(docset) } fn test_go_through_sequential(docs: &[DocId]) { let mut docset = create_docbitset(docs, 1_000u32); for &doc in docs { assert!(docset.advance()); assert_eq!(doc, docset.doc()); } assert!(!docset.advance()); assert!(!docset.advance()); } #[test] fn test_docbitset_sequential() { test_go_through_sequential(&[]); test_go_through_sequential(&[1, 2, 3]); test_go_through_sequential(&[1, 2, 3, 4, 5, 63, 64, 65]); test_go_through_sequential(&[63, 64, 65]); test_go_through_sequential(&[1, 2, 3, 4, 95, 96, 97, 98, 99]); } #[test] fn test_docbitset_skip() { { let mut docset = create_docbitset(&[1, 5, 6, 7, 5112], 10_000); assert_eq!(docset.skip_next(7), SkipResult::Reached); assert_eq!(docset.doc(), 7); assert!(docset.advance(), 7); assert_eq!(docset.doc(), 5112); assert!(!docset.advance()); } { let mut docset = create_docbitset(&[1, 5, 6, 7, 5112], 10_000); assert_eq!(docset.skip_next(3), SkipResult::OverStep); assert_eq!(docset.doc(), 5); assert!(docset.advance()); } { let mut docset = create_docbitset(&[5112], 10_000); assert_eq!(docset.skip_next(5112), SkipResult::Reached); assert_eq!(docset.doc(), 5112); assert!(!docset.advance()); } { let mut docset = create_docbitset(&[5112], 10_000); assert_eq!(docset.skip_next(5113), SkipResult::End); assert!(!docset.advance()); } { let mut docset = create_docbitset(&[5112], 10_000); assert_eq!(docset.skip_next(5111), SkipResult::OverStep); assert_eq!(docset.doc(), 5112); assert!(!docset.advance()); } { let mut docset = create_docbitset(&[1, 5, 6, 7, 5112, 5500, 6666], 10_000); assert_eq!(docset.skip_next(5112), SkipResult::Reached); assert_eq!(docset.doc(), 5112); assert!(docset.advance()); assert_eq!(docset.doc(), 5500); assert!(docset.advance()); assert_eq!(docset.doc(), 6666); assert!(!docset.advance()); } { let mut docset = create_docbitset(&[1, 5, 6, 7, 5112, 5500, 6666], 10_000); assert_eq!(docset.skip_next(5111), SkipResult::OverStep); assert_eq!(docset.doc(), 5112); assert!(docset.advance()); assert_eq!(docset.doc(), 5500); assert!(docset.advance()); assert_eq!(docset.doc(), 6666); assert!(!docset.advance()); } { let mut docset = create_docbitset(&[1, 5, 6, 7, 5112, 5513, 6666], 10_000); assert_eq!(docset.skip_next(5111), SkipResult::OverStep); assert_eq!(docset.doc(), 5112); assert!(docset.advance()); assert_eq!(docset.doc(), 5513); assert!(docset.advance()); assert_eq!(docset.doc(), 6666); assert!(!docset.advance()); } } } #[cfg(all(test, feature = "unstable"))] mod bench { use super::BitSet; use super::BitSetDocSet; use test; use tests; use DocSet; #[bench] fn bench_bitset_1pct_insert(b: &mut test::Bencher) { use tests; let els = tests::generate_nonunique_unsorted(1_000_000u32, 10_000); b.iter(|| { let mut bitset = BitSet::with_max_value(1_000_000); for el in els.iter().cloned() { bitset.insert(el); } }); } #[bench] fn bench_bitset_1pct_clone(b: &mut test::Bencher) { use tests; let els = tests::generate_nonunique_unsorted(1_000_000u32, 10_000); let mut bitset = BitSet::with_max_value(1_000_000); for el in els { bitset.insert(el); } b.iter(|| bitset.clone()); } #[bench] fn bench_bitset_1pct_clone_iterate(b: &mut test::Bencher) { let els = tests::sample(1_000_000u32, 0.01); let mut bitset = BitSet::with_max_value(1_000_000); for el in els { bitset.insert(el); } b.iter(|| { let mut docset = BitSetDocSet::from(bitset.clone()); while docset.advance() {} }); } }
use std::cell::{Cell, RefCell}; use silkenweb_reactive::signal::{ReadSignal, Signal}; use wasm_bindgen::{prelude::Closure, JsCast, JsValue}; use crate::window; pub fn queue_update(x: impl 'static + FnOnce()) { RENDER.with(|r| r.queue_update(x)); } /// Run a closure after the next render. pub fn after_render(x: impl 'static + FnOnce()) { RENDER.with(|r| r.after_render(x)); } pub fn animation_timestamp() -> ReadSignal<f64> { RENDER.with(Render::animation_timestamp) } /// Render any pending updates. /// /// This is mostly useful for testing. pub fn render_updates() { RENDER.with(Render::render_updates); } pub fn request_render_updates() { RENDER.with(Render::request_render_updates); } struct Render { raf_pending: Cell<bool>, pending_updates: RefCell<Vec<Box<dyn FnOnce()>>>, pending_effects: RefCell<Vec<Box<dyn FnOnce()>>>, on_animation_frame: Closure<dyn FnMut(JsValue)>, animation_timestamp_millis: Signal<f64>, } impl Render { pub fn new() -> Self { Self { raf_pending: Cell::new(false), pending_updates: RefCell::new(Vec::new()), pending_effects: RefCell::new(Vec::new()), on_animation_frame: Closure::wrap(Box::new(move |time_stamp: JsValue| { RENDER.with(|render| { render.raf_pending.set(false); render.update_animations(time_stamp.as_f64().unwrap()); render_updates(); }); })), animation_timestamp_millis: Signal::new(0.0), } } fn queue_update(&self, x: impl 'static + FnOnce()) { self.pending_updates.borrow_mut().push(Box::new(x)); self.request_render_updates(); } fn after_render(&self, x: impl 'static + FnOnce()) { self.pending_effects.borrow_mut().push(Box::new(x)); } fn animation_timestamp(&self) -> ReadSignal<f64> { self.animation_timestamp_millis.read() } pub fn render_updates(&self) { for update in self.pending_updates.take() { update(); } for effect in self.pending_effects.take() { effect(); } } fn request_render_updates(&self) { if !self.raf_pending.get() { self.raf_pending.set(true); window() .request_animation_frame(self.on_animation_frame.as_ref().unchecked_ref()) .unwrap(); } } fn update_animations(&self, timestamp: f64) { self.animation_timestamp_millis.write().set(timestamp); } } thread_local!( static RENDER: Render = Render::new(); );
pub enum Colour { Info = 0x4287db, Warn = 0xeb8934, Error = 0xcf130c, }
#![allow(dead_code)] mod composite; pub use self::composite::{CompositeEncoder, CompositeDecoder}; #[cfg(feature="simdcompression")] mod compression_simd; #[cfg(feature="simdcompression")] pub use self::compression_simd::{BlockEncoder, BlockDecoder}; #[cfg(not(feature="simdcompression"))] mod compression_nosimd; #[cfg(not(feature="simdcompression"))] pub use self::compression_nosimd::{BlockEncoder, BlockDecoder}; pub trait VIntEncoder { fn compress_vint_sorted(&mut self, input: &[u32], offset: u32) -> &[u8]; fn compress_vint_unsorted(&mut self, input: &[u32]) -> &[u8]; } pub trait VIntDecoder { fn uncompress_vint_sorted<'a>(&mut self, compressed_data: &'a [u8], offset: u32, num_els: usize) -> &'a [u8]; fn uncompress_vint_unsorted<'a>(&mut self, compressed_data: &'a [u8], num_els: usize) -> &'a [u8]; } impl VIntEncoder for BlockEncoder{ fn compress_vint_sorted(&mut self, input: &[u32], mut offset: u32) -> &[u8] { let mut byte_written = 0; for &v in input { let mut to_encode: u32 = v - offset; offset = v; loop { let next_byte: u8 = (to_encode % 128u32) as u8; to_encode /= 128u32; if to_encode == 0u32 { self.output[byte_written] = next_byte | 128u8; byte_written += 1; break; } else { self.output[byte_written] = next_byte; byte_written += 1; } } } &self.output[..byte_written] } fn compress_vint_unsorted(&mut self, input: &[u32]) -> &[u8] { let mut byte_written = 0; for &v in input { let mut to_encode: u32 = v; loop { let next_byte: u8 = (to_encode % 128u32) as u8; to_encode /= 128u32; if to_encode == 0u32 { self.output[byte_written] = next_byte | 128u8; byte_written += 1; break; } else { self.output[byte_written] = next_byte; byte_written += 1; } } } &self.output[..byte_written] } } impl VIntDecoder for BlockDecoder { fn uncompress_vint_sorted<'a>( &mut self, compressed_data: &'a [u8], offset: u32, num_els: usize) -> &'a [u8] { let mut read_byte = 0; let mut result = offset; for i in 0..num_els { let mut shift = 0u32; loop { let cur_byte = compressed_data[read_byte]; read_byte += 1; result += ((cur_byte % 128u8) as u32) << shift; if cur_byte & 128u8 != 0u8 { break; } shift += 7; } self.output[i] = result; } self.output_len = num_els; &compressed_data[read_byte..] } fn uncompress_vint_unsorted<'a>( &mut self, compressed_data: &'a [u8], num_els: usize) -> &'a [u8] { let mut read_byte = 0; for i in 0..num_els { let mut result = 0u32; let mut shift = 0u32; loop { let cur_byte = compressed_data[read_byte]; read_byte += 1; result += ((cur_byte % 128u8) as u32) << shift; if cur_byte & 128u8 != 0u8 { break; } shift += 7; } self.output[i] = result; } self.output_len = num_els; &compressed_data[read_byte..] } } pub const NUM_DOCS_PER_BLOCK: usize = 128; //< should be a power of 2 to let the compiler optimize. #[cfg(test)] pub mod tests { use rand::Rng; use rand::SeedableRng; use rand::XorShiftRng; use super::*; use test::Bencher; fn generate_array_with_seed(n: usize, ratio: f32, seed_val: u32) -> Vec<u32> { let seed: &[u32; 4] = &[1, 2, 3, seed_val]; let mut rng: XorShiftRng = XorShiftRng::from_seed(*seed); (0..u32::max_value()) .filter(|_| rng.next_f32()< ratio) .take(n) .collect() } pub fn generate_array(n: usize, ratio: f32) -> Vec<u32> { generate_array_with_seed(n, ratio, 4) } #[test] fn test_encode_sorted_block() { let vals: Vec<u32> = (0u32..128u32).map(|i| i*7).collect(); let mut encoder = BlockEncoder::new(); let compressed_data = encoder.compress_block_sorted(&vals, 0); let mut decoder = BlockDecoder::new(); { let remaining_data = decoder.uncompress_block_sorted(compressed_data, 0); assert_eq!(remaining_data.len(), 0); } for i in 0..128 { assert_eq!(vals[i], decoder.output(i)); } } #[test] fn test_encode_sorted_block_with_offset() { let vals: Vec<u32> = (0u32..128u32).map(|i| 11 + i*7).collect(); let mut encoder = BlockEncoder::new(); let compressed_data = encoder.compress_block_sorted(&vals, 10); let mut decoder = BlockDecoder::new(); { let remaining_data = decoder.uncompress_block_sorted(compressed_data, 10); assert_eq!(remaining_data.len(), 0); } for i in 0..128 { assert_eq!(vals[i], decoder.output(i)); } } #[test] fn test_encode_sorted_block_with_junk() { let mut compressed: Vec<u8> = Vec::new(); let n = 128; let vals: Vec<u32> = (0..n).map(|i| 11u32 + (i as u32)*7u32).collect(); let mut encoder = BlockEncoder::new(); let compressed_data = encoder.compress_block_sorted(&vals, 10); compressed.extend_from_slice(compressed_data); compressed.push(173u8); let mut decoder = BlockDecoder::new(); { let remaining_data = decoder.uncompress_block_sorted(&compressed, 10); assert_eq!(remaining_data.len(), 1); assert_eq!(remaining_data[0], 173u8); } for i in 0..n { assert_eq!(vals[i], decoder.output(i)); } } #[test] fn test_encode_unsorted_block_with_junk() { let mut compressed: Vec<u8> = Vec::new(); let n = 128; let vals: Vec<u32> = (0..n).map(|i| 11u32 + (i as u32)*7u32 % 12).collect(); let mut encoder = BlockEncoder::new(); let compressed_data = encoder.compress_block_unsorted(&vals); compressed.extend_from_slice(compressed_data); compressed.push(173u8); let mut decoder = BlockDecoder::new(); { let remaining_data = decoder.uncompress_block_unsorted(&compressed); assert_eq!(remaining_data.len(), 1); assert_eq!(remaining_data[0], 173u8); } for i in 0..n { assert_eq!(vals[i], decoder.output(i)); } } #[test] fn test_encode_vint() { { let expected_length = 123; let mut encoder = BlockEncoder::new(); let input: Vec<u32> = (0u32..123u32) .map(|i| 4 + i * 7 / 2) .into_iter() .collect(); for offset in &[0u32, 1u32, 2u32] { let encoded_data = encoder.compress_vint_sorted(&input, *offset); assert_eq!(encoded_data.len(), expected_length); let mut decoder = BlockDecoder::new(); let remaining_data = decoder.uncompress_vint_sorted(&encoded_data, *offset, input.len()); assert_eq!(0, remaining_data.len()); assert_eq!(input, decoder.output_array()); } } { let mut encoder = BlockEncoder::new(); let input = vec!(3u32, 17u32, 187u32); let encoded_data = encoder.compress_vint_sorted(&input, 0); assert_eq!(encoded_data.len(), 4); assert_eq!(encoded_data[0], 3u8 + 128u8); assert_eq!(encoded_data[1], (17u8 - 3u8) + 128u8); assert_eq!(encoded_data[2], (187u8 - 17u8 - 128u8)); assert_eq!(encoded_data[3], (1u8 + 128u8)); } } #[bench] fn bench_compress(b: &mut Bencher) { let mut encoder = BlockEncoder::new(); let data = generate_array(NUM_DOCS_PER_BLOCK, 0.1); b.iter(|| { encoder.compress_block_sorted(&data, 0u32); }); } #[bench] fn bench_uncompress(b: &mut Bencher) { let mut encoder = BlockEncoder::new(); let data = generate_array(NUM_DOCS_PER_BLOCK, 0.1); let compressed = encoder.compress_block_sorted(&data, 0u32); let mut decoder = BlockDecoder::new(); b.iter(|| { decoder.uncompress_block_sorted(compressed, 0u32); }); } }
#[doc = "Register `SR2` reader"] pub type R = crate::R<SR2_SPEC>; #[doc = "Register `SR2` writer"] pub type W = crate::W<SR2_SPEC>; #[doc = "Field `IRS` reader - IRS"] pub type IRS_R = crate::BitReader; #[doc = "Field `IRS` writer - IRS"] pub type IRS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ILS` reader - ILS"] pub type ILS_R = crate::BitReader; #[doc = "Field `ILS` writer - ILS"] pub type ILS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IFS` reader - IFS"] pub type IFS_R = crate::BitReader; #[doc = "Field `IFS` writer - IFS"] pub type IFS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IREN` reader - IREN"] pub type IREN_R = crate::BitReader; #[doc = "Field `IREN` writer - IREN"] pub type IREN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `ILEN` reader - ILEN"] pub type ILEN_R = crate::BitReader; #[doc = "Field `ILEN` writer - ILEN"] pub type ILEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `IFEN` reader - IFEN"] pub type IFEN_R = crate::BitReader; #[doc = "Field `IFEN` writer - IFEN"] pub type IFEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>; #[doc = "Field `FEMPT` reader - FEMPT"] pub type FEMPT_R = crate::BitReader; impl R { #[doc = "Bit 0 - IRS"] #[inline(always)] pub fn irs(&self) -> IRS_R { IRS_R::new((self.bits & 1) != 0) } #[doc = "Bit 1 - ILS"] #[inline(always)] pub fn ils(&self) -> ILS_R { ILS_R::new(((self.bits >> 1) & 1) != 0) } #[doc = "Bit 2 - IFS"] #[inline(always)] pub fn ifs(&self) -> IFS_R { IFS_R::new(((self.bits >> 2) & 1) != 0) } #[doc = "Bit 3 - IREN"] #[inline(always)] pub fn iren(&self) -> IREN_R { IREN_R::new(((self.bits >> 3) & 1) != 0) } #[doc = "Bit 4 - ILEN"] #[inline(always)] pub fn ilen(&self) -> ILEN_R { ILEN_R::new(((self.bits >> 4) & 1) != 0) } #[doc = "Bit 5 - IFEN"] #[inline(always)] pub fn ifen(&self) -> IFEN_R { IFEN_R::new(((self.bits >> 5) & 1) != 0) } #[doc = "Bit 6 - FEMPT"] #[inline(always)] pub fn fempt(&self) -> FEMPT_R { FEMPT_R::new(((self.bits >> 6) & 1) != 0) } } impl W { #[doc = "Bit 0 - IRS"] #[inline(always)] #[must_use] pub fn irs(&mut self) -> IRS_W<SR2_SPEC, 0> { IRS_W::new(self) } #[doc = "Bit 1 - ILS"] #[inline(always)] #[must_use] pub fn ils(&mut self) -> ILS_W<SR2_SPEC, 1> { ILS_W::new(self) } #[doc = "Bit 2 - IFS"] #[inline(always)] #[must_use] pub fn ifs(&mut self) -> IFS_W<SR2_SPEC, 2> { IFS_W::new(self) } #[doc = "Bit 3 - IREN"] #[inline(always)] #[must_use] pub fn iren(&mut self) -> IREN_W<SR2_SPEC, 3> { IREN_W::new(self) } #[doc = "Bit 4 - ILEN"] #[inline(always)] #[must_use] pub fn ilen(&mut self) -> ILEN_W<SR2_SPEC, 4> { ILEN_W::new(self) } #[doc = "Bit 5 - IFEN"] #[inline(always)] #[must_use] pub fn ifen(&mut self) -> IFEN_W<SR2_SPEC, 5> { IFEN_W::new(self) } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } } #[doc = "FIFO status and interrupt register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."] pub struct SR2_SPEC; impl crate::RegisterSpec for SR2_SPEC { type Ux = u32; } #[doc = "`read()` method returns [`sr2::R`](R) reader structure"] impl crate::Readable for SR2_SPEC {} #[doc = "`write(|w| ..)` method takes [`sr2::W`](W) writer structure"] impl crate::Writable for SR2_SPEC { const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0; } #[doc = "`reset()` method sets SR2 to value 0x40"] impl crate::Resettable for SR2_SPEC { const RESET_VALUE: Self::Ux = 0x40; }
use common::console_utils::Timer; use wasm_bindgen::prelude::*; #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; /// Calculate the sum of the fuel requirements for all of the modules on the spacecraft. /// See: https://adventofcode.com/2019/day/1 #[wasm_bindgen] pub fn part1(input: &str) -> Result<i32, JsValue> { Timer::new("rust::part1"); let sum = parse_input(input)? .iter() .map(|&mass| fuel_required(mass)) .sum(); Ok(sum) } /// Calculate the sum of the fuel requirements for all of the modules on the spacecraft, also /// taking into account the mass of the added fuel. /// See: https://adventofcode.com/2019/day/1#part2 #[wasm_bindgen] pub fn part2(input: &str) -> Result<i32, JsValue> { Timer::new("rust::part2"); let sum = parse_input(input)? .iter() .map(|&mass| total_fuel_required(mass)) .sum(); Ok(sum) } /// Parse the input (a list of modules their masses) as a list of integers. fn parse_input(input: &str) -> Result<Vec<i32>, &'static str> { input .lines() .map(|l| { l.parse::<i32>() .map_err(|_| "could not parse input as integers") }) .collect() } /// Fuel required to carry the given mass. fn fuel_required(mass: i32) -> i32 { // integer dision already rounds down (mass / 3) - 2 } /// Total fuel required to carry the given mass, including the mass of the fuel itself. fn total_fuel_required(mass: i32) -> i32 { let fuel = fuel_required(mass); if fuel <= 0 { return 0; } fuel + total_fuel_required(fuel) } #[cfg(test)] mod tests { #[test] fn parse_input() { assert_eq!(crate::parse_input("123\n-5\n0\n"), Ok(vec![123, -5, 0])); assert!(crate::parse_input("123\na\n0\n").is_err()); } #[test] fn fuel_required() { assert_eq!(crate::fuel_required(12), 2); assert_eq!(crate::fuel_required(14), 2); assert_eq!(crate::fuel_required(1969), 654); assert_eq!(crate::fuel_required(100_756), 33_583); } #[test] fn total_fuel_required() { assert_eq!(crate::total_fuel_required(14), 2); assert_eq!(crate::total_fuel_required(1969), 966); assert_eq!(crate::total_fuel_required(100_756), 50_346); } }
use papergrid::{ config::Entity, records::{ExactRecords, PeekableRecords}, }; use crate::{ grid::records::{Records, RecordsMut}, settings::{CellOption, TableOption}, }; /// A structure to handle special chars. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub struct Charset; impl Charset { /// Returns [`CleanCharset`] which removes all `\t` and `\r` occurences. /// /// Notice that tab is just removed rather then being replaced with spaces. /// You might be better call [`TabSize`] first if you not expect such behavior. /// /// # Example /// /// ``` /// use tabled::{Table, settings::formatting::Charset}; /// /// let text = "Some\ttext\t\twith \\tabs"; /// /// let mut table = Table::new([text]); /// table.with(Charset::clean()); /// /// assert_eq!( /// table.to_string(), /// "+--------------------+\n\ /// | &str |\n\ /// +--------------------+\n\ /// | Sometextwith \\tabs |\n\ /// +--------------------+" /// ) /// ``` /// /// [`TabSize`]: crate::settings::formatting::TabSize pub fn clean() -> CleanCharset { CleanCharset } } /// [`CleanCharset`] removes all `\t` and `\r` occurences. /// /// # Example /// /// ``` /// use tabled::{Table, settings::formatting::Charset}; /// /// let text = "Some text which was created on windows \r\n yes they use this \\r\\n"; /// /// let mut builder = Table::builder([text]); /// builder.set_header(["win. text"]); /// /// let mut table = builder.build(); /// table.with(Charset::clean()); /// /// assert_eq!( /// table.to_string(), /// "+-----------------------------------------+\n\ /// | win. text |\n\ /// +-----------------------------------------+\n\ /// | Some text which was created on windows |\n\ /// | yes they use this \\r\\n |\n\ /// +-----------------------------------------+" /// ) /// ``` #[derive(Debug, Default, Clone)] pub struct CleanCharset; impl<R, D, C> TableOption<R, D, C> for CleanCharset where for<'a> &'a R: Records, R: RecordsMut<String>, { fn change(self, records: &mut R, _: &mut C, _: &mut D) { let mut list = vec![]; for (row, cells) in records.iter_rows().into_iter().enumerate() { for (col, text) in cells.into_iter().enumerate() { let text = text.as_ref().replace(['\t', '\r'], ""); list.push(((row, col), text)); } } for (pos, text) in list { records.set(pos, text); } } } impl<R, C> CellOption<R, C> for CleanCharset where R: Records + ExactRecords + PeekableRecords + RecordsMut<String>, { fn change(self, records: &mut R, _: &mut C, entity: Entity) { let count_rows = records.count_rows(); let count_cols = records.count_columns(); for pos in entity.iter(count_rows, count_cols) { let text = records.get_text(pos); let text = text.replace(['\t', '\r'], ""); records.set(pos, text); } } }
//! Currently we use only 64Mb physical memory //! Physical memory layout looks like: //! [1] [0x0, 0x600000): Initially mapped space, Kernel starts at 0x100000 //! [2] [0x600000, 0x800000): Contiguous kernel memory, contains page table //! //! Virtual memory layout looks like: //! [1] [0xFFFFFFFF80000000, 0xFFFFFFFF80600000) //! [2] [0xFFFFFFFF80600000, 0xFFFFFFFF80800000) //! //! We don't really need to modify PDPT entries //! after initialization, so we just create the instance //! of several page tables and two page directories use core::convert::{From, Into}; use core::ops::Drop; use core::sync::atomic; use rlibc::memset; const PAGETABLE_PHYS: u64 = 0x600000; const PAGETABLE_VIRT: u64 = 0xFFFFFFFF80600000; const PAGE_SHIFT: u32 = 12; const INITIAL_MAPPED: u64 = 3072; const MAX_MAPPED: u64 = 16384; // 64Mb extern "C" { static mut pml4: [u64; 512]; static mut user_pdpt: PageTable; static mut kernel_pdpt: PageTable; static mut user_pd: PageTable; static mut kernel_pd: PageTable; } /// Structure representing a page table structure (PML4, PDPT, PD, PT) /// #[repr(C, packed)] pub struct PageTable { v: [u64; 512], } impl PageTable { /// Generic mapping function pub fn map( &mut self, idx: usize, paddr: PhysicalAddress, rw: bool, user: bool, ps: bool, ) -> bool { // Don't override existing mapping if self.v[idx] & 1 != 0 { return false; } let mut entry: u64 = 0x1; if ps { entry |= 0x80; } if rw { entry |= 0x2; } if user { entry |= 0x4; } entry |= paddr.mask(12); self.v[idx] = entry; true } /// Unmap an entry pub fn unmap(&mut self, idx: usize) -> bool { if self.v[idx] & 1 != 0 { self.v[idx] = 0; true } else { false } } /// Get next level page table pub fn next(&self, idx: usize) -> Result<*mut PageTable, ::common::error::Error> { // Only when the entry is present and PS flag is not set // it's a valid entry for next level if self.v[idx] & 1 == 0 || self.v[idx] & 0x80 != 0 { return Err(err!(EFAULT)); } // Page table resides in kernel space // and it's identically mapped let virtaddr = VirtualAddress((self.v[idx] & !((1 << 12) - 1)) + KERNEL_BASE); Ok(virtaddr.as_ptr()) } /// Map kernel space pub fn map_kernel(&mut self) -> bool { let paddr = unsafe { (&kernel_pdpt as *const PageTable) as u64 - KERNEL_BASE }; self.map(511, paddr.into(), true, false, false) } /// Check if entry present pub fn present(&self, idx: usize) -> bool { self.v[idx] & 1 != 0 } /// Get an entry directly pub fn get(&self, idx: usize) -> u64 { self.v[idx] } } /// Virtual address #[derive(Clone, Copy, Debug)] pub struct VirtualAddress(u64); /// Physical address #[derive(Clone, Copy, Debug)] pub struct PhysicalAddress(u64); impl VirtualAddress { pub fn new(v: u64) -> Self { VirtualAddress(v) } /// Create a VirtualAddress from pointer pub fn from_pointer<T>(ptr: *const T) -> Self { let value = ptr as u64; VirtualAddress(value) } /// Return page frame number #[inline] pub fn frame(&self) -> u64 { self.0 >> PAGE_SHIFT } /// Return page table index #[inline] pub fn table_index(&self) -> usize { ((self.0 >> PAGE_SHIFT) & 0x1ff) as usize } /// Return page directory index #[inline] pub fn dir_index(&self) -> usize { ((self.0 >> 21) & 0x1ff) as usize } /// Return page directory pointer index #[inline] pub fn dptr_index(&self) -> usize { ((self.0 >> 30) & 0x1ff) as usize } /// Return PML4 index #[inline] pub fn pml4_index(&self) -> usize { ((self.0 >> 39) & 0x1ff) as usize } /// Mask low bits #[inline] pub fn mask(&self, lowbits: u64) -> u64 { self.0 & !((1 << lowbits) - 1) } /// Return if the paddr is an usermode address pub fn usermode(&self) -> bool { self.pml4_index() == 0 } /// Returns as a pointer pub fn as_ptr<T: Sized>(&self) -> *mut T { self.0 as *mut T } /// Returns as a reference pub unsafe fn as_ref<T: Sized>(&self) -> Option<&mut T> { (self.as_ptr() as *mut T).as_mut() } /// Subtract and return the result pub fn sub(&self, v: u64) -> u64 { self.0 - v } /// Add and return the result pub fn add(&self, v: u64) -> u64 { self.0 + v } } impl PhysicalAddress { pub fn new(v: u64) -> Self { PhysicalAddress(v) } /// Mask low bits #[inline] pub fn mask(&self, lowbits: u64) -> u64 { self.0 & !((1 << lowbits) - 1) } /// From Page Frame Number #[inline] pub fn from_pfn(frame: u64) -> Self { PhysicalAddress(frame * PAGE_SIZE) } /// To PFN #[inline] pub fn pfn(&self) -> u64 { self.0 >> PAGE_SHIFT } /// Subtract and return the result pub fn sub(&self, v: u64) -> u64 { self.0 - v } /// Add and return the result pub fn add(&self, v: u64) -> u64 { self.0 + v } } impl From<u64> for PhysicalAddress { fn from(v: u64) -> Self { PhysicalAddress(v) } } impl From<u64> for VirtualAddress { fn from(v: u64) -> Self { VirtualAddress(v) } } impl Into<u64> for PhysicalAddress { fn into(self) -> u64 { self.0 } } impl Into<u64> for VirtualAddress { fn into(self) -> u64 { self.0 } } /// Mapped page size pub const PAGE_SIZE: u64 = 0x1000; pub const KERNEL_BASE: u64 = 0xFFFFFFFF80000000; pub const HEAP_VIRT: u64 = 0xFFFFFFFF80700000; pub const HEAP_SIZE: u64 = 0x100000; /// Physical page usage static mut PHYSPAGE_BITMAP: [u64; 1024] = [0; 1024]; /// Page table usage static mut PAGETABLE_INUSE: [bool; 1024] = [false; 1024]; /// Marks a 4K page as present fn mark_page(frame: u64) { let idx = frame as usize / 64; let bitoff = frame % 64; unsafe { PHYSPAGE_BITMAP[idx] |= 1 << bitoff; } } /// Marks a 4K page as free fn clear_page(frame: u64) { let idx = frame as usize / 64; let bitoff = frame % 64; unsafe { PHYSPAGE_BITMAP[idx] &= !(1 << bitoff); } } /// Checks whether a page is present fn page_marked(frame: u64) -> bool { let idx = frame as usize / 64; let bitoff = frame % 64; unsafe { (PHYSPAGE_BITMAP[idx] & 1 << bitoff) != 0 } } /// Get cr3 #[inline] pub unsafe fn cr3() -> u64 { let result: u64; asm!("mov %cr3, $0" : "=r"(result) : : ); result } /// Set cr3 #[inline] pub unsafe fn set_cr3(cr3: u64) { asm!("mov $0, %cr3" : : "r"(cr3) : : ); } static MMU_LOCK: atomic::AtomicBool = atomic::ATOMIC_BOOL_INIT; /// An instance of MMU pub struct MMU(bool); impl MMU { /// Get an instance of MMU pub fn get() -> Self { while !MMU_LOCK.compare_and_swap(false, true, atomic::Ordering::Relaxed) { // Do nothing } atomic::fence(atomic::Ordering::Acquire); MMU(true) } /// Virtual address to physical address pub fn vtop(addr: VirtualAddress) -> Result<PhysicalAddress, ::common::error::Error> { unimplemented!(); } /// Flush entire TLB pub unsafe fn flush(&self) { asm!( r#" mov %rax, %cr3 mov %cr3, %rax "# ); } /// Allocate one physical page pub fn alloc_phys(&self) -> Result<PhysicalAddress, ::common::error::Error> { for frame in INITIAL_MAPPED..MAX_MAPPED { if !page_marked(frame) { mark_page(frame); return Ok(PhysicalAddress::from_pfn(frame)); } } Err(err!(ENOMEM)) } /// Free one physical page pub fn free_phys(&self, addr: PhysicalAddress) -> Result<(), ::common::error::Error> { let frame = addr.pfn(); match page_marked(frame) { false => Err(err!(EFAULT)), true => { clear_page(frame); Ok(()) } } } /// Allocate one page pub fn alloc_page(&self) -> Result<VirtualAddress, ::common::error::Error> { // Page table region: 0x600000 ~ 0x700000 for i in 0..1024 { unsafe { if !PAGETABLE_INUSE[i] { PAGETABLE_INUSE[i] = true; let address: u64 = (i as u64) * PAGE_SIZE + PAGETABLE_VIRT; memset(address as *mut u8, 0, PAGE_SIZE as usize); return Ok(address.into()); } } } Err(err!(ENOMEM)) } /// Free one page pub fn free_page(&self, virt: VirtualAddress) -> Result<(), ::common::error::Error> { let address: u64 = (Into::<u64>::into(virt) - PAGETABLE_VIRT) / PAGE_SIZE; unsafe { if PAGETABLE_INUSE[address as usize] { PAGETABLE_INUSE[address as usize] = false; return Ok(()); } } Err(err!(EFAULT)) } /// Allocate contiguous pages pub fn alloc_contiguous(&self, count: usize) -> Result<VirtualAddress, ::common::error::Error> { for i in 0..1024 { // Look for contiguous pages linearly let mut flag = true; for j in i..i + count { unsafe { if j >= 1024 || PAGETABLE_INUSE[j] { flag = false; break; } } } // Returns if ok if flag { for j in i..i + count { unsafe { PAGETABLE_INUSE[j] = true; } } let addr = (i as u64) * PAGE_SIZE + PAGETABLE_VIRT; return Ok(addr.into()); } } Err(err!(ENOMEM)) } /// Free contiguous pages pub fn free_contiguous( &self, addr: VirtualAddress, count: usize, ) -> Result<(), ::common::error::Error> { for i in 0..count { let address = addr.mask(12) + (i as u64) * PAGE_SIZE; try!(self.free_page(address.into())); } Ok(()) } /// Return current PML4 Virtual address pub fn pml4(&self) -> *mut PageTable { unsafe { let virtaddr = VirtualAddress(cr3() + KERNEL_BASE); virtaddr.as_ptr() } } /// Create a initialized page table for a new context pub fn new_pml4(&self) -> Result<*mut PageTable, ::common::error::Error> { unimplemented!(); } /// Free a created PML4 pub fn free_pml4(&self) { unimplemented!(); } } impl Drop for MMU { fn drop(&mut self) { MMU_LOCK.store(false, atomic::Ordering::Release); } } pub fn init() { // These pages are mapped initially for i in 0..INITIAL_MAPPED { mark_page(i); } unsafe { // Replace the first pml4 entry let ptr = ((&mut pml4 as *mut [u64; 512]) as u64 + KERNEL_BASE) as *mut u64; let val = (&mut user_pdpt as *mut PageTable) as u64 - KERNEL_BASE; *ptr = val; // Map contiguous kernel memory let addr = 0x600000; assert!(kernel_pd.map(3, addr.into(), true, false, true)); } }
/* Copyright 2019-2023 Didier Plaindoux Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #[cfg(test)] mod tests_and { use celma_core::parser::parser::Parse; use celma_core::parser::response::Response::{Reject, Success}; use celma_core::stream::char_stream::CharStream; use celma_lang::meta::parser::celma_parsec_rules; use celma_lang::meta::syntax::ASTParsec::{PChar, PChoice, PCode}; use celma_lang::meta::syntax::ASTParsecRule; #[test] fn it_parse_one_char_rule() { let response = celma_parsec_rules().parse(CharStream::new("let a:{char} = 'a'")); match response { Success(ast, _, _) => assert_eq!( ast, vec!(ASTParsecRule { name: String::from("a"), input: String::from("char"), returns: String::from("char"), body: Box::new(PChar('a')), }) ), _ => assert_eq!(true, false), }; } #[test] fn it_parse_two_char_rules() { let response = celma_parsec_rules().parse(CharStream::new( "let a:{char} = {char('a')} let b:{char} = {char('b')}", )); match response { Success(ast, _, _) => assert_eq!( ast, vec!( ASTParsecRule { name: String::from("a"), input: String::from("char"), returns: String::from("char"), body: Box::new(PCode(String::from("char(\'a\')"))), }, ASTParsecRule { name: String::from("b"), input: String::from("char"), returns: String::from("char"), body: Box::new(PCode(String::from("char(\'b\')"))), } ) ), _ => assert_eq!(true, false), }; } #[test] fn it_parse_two_complexe_rules() { let response = celma_parsec_rules().parse(CharStream::new( "let a:{char} = 'a'|{char('b')} let b:{char} = {char('c')}", )); match response { Success(ast, _, _) => assert_eq!( ast, vec!( ASTParsecRule { name: String::from("a"), input: String::from("char"), returns: String::from("char"), body: Box::new(PChoice( Box::new(PChar('a')), Box::new(PCode(String::from("char(\'b\')"))), )), }, ASTParsecRule { name: String::from("b"), input: String::from("char"), returns: String::from("char"), body: Box::new(PCode(String::from("char(\'c\')"))), } ) ), _ => assert_eq!(true, false), }; } #[test] fn it_parse_celma_rules() { let response = celma_parsec_rules().parse(CharStream::new( r#" let parsec_rules:{Vec<ASTParserRule>} = _=parsec_rule+ let parsec_rule:{ASTParserRule} = "let" n=ident ':' '{' t=rust_code '}' "=" p=parsec -> { ASTParserRule(n,c,p) } let parsec:{ASTParser} = binding? atom occurrence? additional? transform? let binding:{String} = _=ident '=' let occurrence:{char} = ('*' | '+' | '?') let additional:{(bool,ASTParser)} = (c=("|"?) -> { c.is_empty() }) _=parser let transform:{String} = "->" '{' _=rust_code '}' let atom:{ASTParser} = ('(' _=parser ')') | CHAR | STRING | ident | ('{' _=rust_code '}') "# )); match response { Success(_, _, _) => assert_eq!(true, true), Reject(_, _) => assert_eq!(true, false), }; } #[test] fn it_parse_json() { let response = celma_parsec_rules().parse(CharStream::new( r#" let json:{JSon} = number|string|null|boolean|array|object|attribute let string:{JSon} = s={STRING} -> { TKString(s) } let number:{JSon} = n={NUMBER} -> { TKNumber(n) } let null:{JSon} = "null" -> { TKNull } let boolean:{JSon} = b=("true"|"false") -> { TKBool(b) } let array:{JSon} = '[' s=json* ']' -> { TkArray(s) } let object:{JSon} = '{' s=(_=STRING ":" _=json)* '}' -> { TkObject(s) } "#, )); match response { Success(_, _, _) => assert_eq!(true, true), Reject(_, _) => assert_eq!(true, false), }; } }
extern crate protobuf; extern crate serde_yaml; #[macro_use] extern crate serde_derive; mod xchain; pub mod encoder; pub mod errors; pub mod ocall; pub mod sgx_ocall; pub mod protos;
pub mod widgets; use glium::index::PrimitiveType::TriangleStrip; use glium::{DrawParameters, VertexBuffer, IndexBuffer, Surface, Frame}; use universe::game::Game; use render::Window; use render::Vertex; pub struct Gui{ pub buttons: Vec<widgets::Button> } impl Gui{ pub fn draw_gui(&self, target: &mut Frame, window: &Window, params: &DrawParameters){ let disp = &window.draw_context; let vert_buf = VertexBuffer::new(&disp.display, &[ Vertex { position: [ 0.0, 0.0, -1.0 ], normal: [ 0.0, 0.0, 0.0 ], tex_coords: [0.0, 0.0]}, Vertex { position: [ 1.0, 0.0, -1.0 ], normal: [ 0.0, 0.0, 0.0 ], tex_coords: [1.0, 0.0]}, Vertex { position: [ 1.0, 1.0, -1.0 ], normal: [ 0.0, 0.0, 0.0 ], tex_coords: [1.0, 1.0]}, Vertex { position: [ 0.0, 1.0, -1.0 ], normal: [ 0.0, 0.0, 0.0 ], tex_coords: [0.0, 1.0]}, ] ).unwrap(); let index_buffer = IndexBuffer::new(&disp.display, TriangleStrip, &[1 as u16, 2, 0, 3]).unwrap(); let perspective = disp.camera.perspective.to_homogeneous().as_ref().to_owned(); let view: [[f32; 4]; 4] = disp.camera.rot_view().into(); for x in &self.buttons{ let matrix: [[f32; 4]; 4] = [ [1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [ -0.5 , 0.0, -0.5, 1.0f32], ]; target.draw( &vert_buf, &index_buffer, disp.render_buffer.shaders.get("solid").unwrap(), &uniform! { matrix: matrix, perspective: perspective, view: view, tex: &x.base.texture, wrap: [0.0 as f32, 0.0 as f32]}, &params ).unwrap(); } } }
use std::{io, process, fmt, fs, mem}; use std::fs::File; use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock}; use std::fmt::Write; use std::str::FromStr; use bitcoin::{PublicKey, Script}; use bitcoin::hashes::hex::FromHex; use liquid_rpc as rpc; use error::Error; use utils::{self, RegexUtils}; use runner::{DaemonRunner, RunnerHelper, RuntimeData}; pub const CONFIG_FILENAME: &str = "elements.conf"; /// Older liquidd nodes were released as 2.x.x and 3.x.x versions. pub const OLD_LIQUID_VERSION: u64 = 2_00_00_00; /// The dynafed activation version. pub const DYNAFED_VERSION: u64 = 18_01_00; pub const DEFAULT_VERSION: u64 = 18_01_00; #[derive(Debug, Clone, Deserialize, Default)] pub struct Config { /// This field is not present in the config but is necessary to /// know the config file format that needs to be written. /// Two digits per section, 4 sections: 0.18.1.0 => 18_01_00 pub version: u64, pub datadir: PathBuf, pub debug: bool, pub printtoconsole: bool, pub daemon: bool, pub listen: bool, pub port: Option<u16>, pub txindex: bool, pub connect: Vec<String>, pub rpccookie: Option<String>, pub rpcport: Option<u16>, pub rpcuser: Option<String>, pub rpcpass: Option<String>, //TODO(stevenroose) enum? pub addresstype: Option<String>, pub blockmintxfee: Option<f64>, pub minrelaytxfee: Option<f64>, // Elements stuff: pub chain: String, pub validatepegin: bool, pub signblockscript: Option<Script>, pub con_max_block_sig_size: Option<usize>, pub fedpegscript: Option<Script>, #[serde(default)] pub pak_pubkeys: Vec<(PublicKey, PublicKey)>, pub con_dyna_deploy_start: Option<u32>, pub con_nminerconfirmationwindow: Option<u32>, pub con_nrulechangeactivationthreshold: Option<u32>, pub mainchain_rpchost: Option<String>, pub mainchain_rpcport: Option<u16>, pub mainchain_rpcuser: Option<String>, pub mainchain_rpcpass: Option<String>, } impl Config { pub fn write_into<W: io::Write>(&self, mut w: W) -> Result<(), io::Error> { //TODO(stevenroose) error? assert!(!self.chain.is_empty()); let version = if self.version > 0 { self.version } else { DEFAULT_VERSION }; writeln!(w, "datadir={}", self.datadir.as_path().to_str().unwrap_or("<INVALID>"))?; writeln!(w, "chain={}", self.chain)?; if version >= 17_00_00 && version < OLD_LIQUID_VERSION { writeln!(w, "[{}]", self.chain)?; } writeln!(w, "debug={}", self.debug as u8)?; writeln!(w, "printtoconsole={}", self.printtoconsole as u8)?; writeln!(w, "daemon={}", self.daemon as u8)?; writeln!(w, "listen={}", self.listen as u8)?; if let Some(p) = self.port { writeln!(w, "port={}", p)?; } writeln!(w, "txindex={}", self.txindex as u8)?; if let Some(ref v) = self.signblockscript { writeln!(w, "signblockscript={:x}", v)?; } if let Some(v) = self.con_max_block_sig_size { writeln!(w, "con_max_block_sig_size={}", v)?; } if let Some(ref v) = self.fedpegscript { writeln!(w, "fedpegscript={:x}", v)?; } for pair in &self.pak_pubkeys { if version >= DYNAFED_VERSION && version < OLD_LIQUID_VERSION { writeln!(w, "pak={}{}", pair.0, pair.1)?; } else { writeln!(w, "pak={}:{}", pair.0, pair.1)?; } } if let Some(v) = self.con_dyna_deploy_start { writeln!(w, "con_dyna_deploy_start={}", v)?; } if let Some(v) = self.con_nminerconfirmationwindow { writeln!(w, "con_nminerconfirmationwindow={}", v)?; } if let Some(v) = self.con_nrulechangeactivationthreshold { writeln!(w, "con_nrulechangeactivationthreshold={}", v)?; } for connect in &self.connect { writeln!(w, "connect={}", connect)?; } // RPC details if self.rpccookie.is_some() || self.rpcuser.is_some() { writeln!(w, "server=1")?; } if let Some(ref cf) = self.rpccookie { writeln!(w, "rpccookiefile={}", cf)?; } if let Some(p) = self.rpcport { writeln!(w, "rpcport={}", p)?; } if let Some(ref u) = self.rpcuser { writeln!(w, "rpcuser={}", u)?; } if let Some(ref p) = self.rpcpass { writeln!(w, "rpcpassword={}", p)?; } writeln!(w, "validatepegin={}", self.validatepegin as u8)?; if self.validatepegin { if let Some(ref v) = self.mainchain_rpchost { writeln!(w, "mainchainrpchost={}", v)?; } if let Some(ref v) = self.mainchain_rpcport { writeln!(w, "mainchainrpcport={}", v)?; } if let Some(ref v) = self.mainchain_rpcuser { writeln!(w, "mainchainrpcuser={}", v)?; } if let Some(ref v) = self.mainchain_rpcpass { writeln!(w, "mainchainrpcpassword={}", v)?; } } if let Some(ref v) = self.addresstype { writeln!(w, "addresstype={}", v)?; } if let Some(v) = self.blockmintxfee { writeln!(w, "blockmintxfee={}", v)?; } if let Some(v) = self.minrelaytxfee { writeln!(w, "minrelaytxfee={}", v)?; } Ok(()) } } #[derive(Default)] pub struct State { pub last_update_tip: Option<(u32, bitcoin::BlockHash)>, pub stderr: String, } pub struct Daemon { name: String, executable: PathBuf, config: Config, /// The path of the written config file. /// [None] before it has been written. config_file: Option<PathBuf>, runtime_data: Option<Arc<RwLock<RuntimeData<State>>>>, } const UPDATE_TIP_REGEX: &str = r".*UpdateTip: new best=([0-9a-f]+) height=([0-9]+) version=.*$"; pub fn parse_update_tip(msg: &str) -> Option<(u32, bitcoin::BlockHash)> { UPDATE_TIP_REGEX.rx_n(2, msg).map(|m| { let blockhash = bitcoin::BlockHash::from_hex( m[1].expect("blockhash missing in UpdateTip") ).expect("invalid blockhash in UpdateTip"); let height = u32::from_str( m[2].expect("height missing in UpdateTip") ).expect("invalid height in UpdateTip"); (height, blockhash) }) } impl Daemon { pub fn new<P: Into<PathBuf>>(executable: P, config: Config) -> Result<Daemon, Error> { Daemon::named("".into(), executable, config) } pub fn named<P: Into<PathBuf>>(name: String, executable: P, config: Config) -> Result<Daemon, Error> { if !config.datadir.is_absolute() { return Err(Error::Config("datadir should be an absolute path")); } Ok(Daemon { name: name, executable: executable.into(), config: config, config_file: None, runtime_data: None, }) } pub fn datadir(&self) -> &Path { self.config.datadir.as_path() } pub fn last_update_tip(&self) -> Option<(u32, bitcoin::BlockHash)> { self.runtime_data.as_ref().and_then(|rt| rt.read().unwrap().state.last_update_tip ) } /// Get the RPC info. /// /// Don't call this method before calling [start]. pub fn rpc_info(&self) -> Option<(String, rpc::Auth)> { let url = format!("http://127.0.0.1:{}", self.config.rpcport?); let auth = if let Some(ref c) = self.config.rpccookie { rpc::Auth::CookieFile(c.clone().into()) } else if let Some(ref u) = self.config.rpcuser { let pass = self.config.rpcpass.as_ref()?.clone(); rpc::Auth::UserPass(u.clone(), pass) } else { return None; }; Some((url, auth)) } pub fn rpc_client(&self) -> Option<Result<rpc::Client, rpc::Error>> { let (url, auth) = self.rpc_info()?; Some(rpc::Client::new(url, auth)) } pub fn take_stderr(&self) -> String { self.runtime_data.as_ref().map(|rt| mem::replace(&mut rt.write().unwrap().state.stderr, String::new()) ).unwrap_or_default() } } impl RunnerHelper for Daemon { type State = State; fn _prepare(&mut self) -> Result<(), Error> { if self.config_file.is_some() { return Ok(()); } // Make sure the datadir exists. fs::create_dir_all(&self.config.datadir)?; // Write the config file once and store the path. let mut path: PathBuf = self.config.datadir.clone().into(); path.push(CONFIG_FILENAME); let mut file = File::create(&path)?; self.config.write_into(&mut file)?; self.config_file = Some(path); Ok(()) } fn _command(&self) -> process::Command { let mut cmd = process::Command::new(self.executable.clone()); cmd.args(&[ &format!("-conf={}", self.config_file.as_ref().unwrap().as_path().display()), "-printtoconsole=1", ]); cmd } fn _init_state(&self) -> Self::State { State { last_update_tip: None, stderr: String::new(), } } /// Notify that the daemon has started. fn _notif_started(&mut self, runtime_data: Arc<RwLock<RuntimeData<Self::State>>>) { self.runtime_data.replace(runtime_data); } /// Get the current runtime data. fn _get_runtime(&self) -> Option<Arc<RwLock<RuntimeData<Self::State>>>> { self.runtime_data.clone() } fn _process_stdout(state: &mut Self::State, line: &str) { if let Some(tip) = parse_update_tip(&line) { trace!("Setting new elementsd tip: {:?}", tip); state.last_update_tip = Some(tip); } } fn _process_stderr(state: &mut Self::State, line: &str) { trace!("stderr line of elementsd: {}", line); writeln!(&mut state.stderr, "{}", line).unwrap(); } } impl DaemonRunner for Daemon {} impl fmt::Debug for Daemon { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.name.is_empty() { write!(f, "<unnamed> elementsd") } else { write!(f, "elementsd \"{}\"", self.name) } } }
use std::os::raw::c_char; extern { pub fn strlen(s: *const c_char) -> usize; } #[cfg(test)] mod test { use super::*; #[test] fn strlen_test() { use std::ffi::CString; let rust_str = "I'll be back"; let null_terminated = CString::new(rust_str).unwrap(); unsafe { assert_eq!(strlen(null_terminated.as_ptr()), 12); } } }
use super::{path_offset, socket_addr}; use crate::sys::unix::net::new_socket; use crate::sys::unix::UnixStream; use crate::unix::SourceFd; use crate::{event, Interest, Registry, Token}; use std::ffi::OsStr; use std::os::unix::ffi::OsStrExt; use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; use std::os::unix::net; use std::path::Path; use std::{ascii, fmt, io, mem}; #[derive(Debug)] pub struct UnixListener { inner: net::UnixListener, } /// An address associated with a `mio` specific Unix socket. /// /// This is implemented instead of imported from [`net::SocketAddr`] because /// there is no way to create a [`net::SocketAddr`]. One must be returned by /// [`accept`], so this is returned instead. /// /// [`net::SocketAddr`]: std::os::unix::net::SocketAddr /// [`accept`]: #method.accept pub struct SocketAddr { sockaddr: libc::sockaddr_un, socklen: libc::socklen_t, } enum AddressKind<'a> { Unnamed, Pathname(&'a Path), Abstract(&'a [u8]), } impl UnixListener { fn new(inner: net::UnixListener) -> UnixListener { UnixListener { inner } } pub(crate) fn accept(&self) -> io::Result<(UnixStream, SocketAddr)> { let sockaddr = mem::MaybeUninit::<libc::sockaddr_un>::zeroed(); // This is safe to assume because a `libc::sockaddr_un` filled with `0` // bytes is properly initialized. // // `0` is a valid value for `sockaddr_un::sun_family`; it is // `libc::AF_UNSPEC`. // // `[0; 108]` is a valid value for `sockaddr_un::sun_path`; it begins an // abstract path. let mut sockaddr = unsafe { sockaddr.assume_init() }; sockaddr.sun_family = libc::AF_UNIX as libc::sa_family_t; let mut socklen = mem::size_of_val(&sockaddr) as libc::socklen_t; #[cfg(not(any( target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "solaris" )))] let socket = { let flags = libc::SOCK_NONBLOCK | libc::SOCK_CLOEXEC; syscall!(accept4( self.inner.as_raw_fd(), &mut sockaddr as *mut libc::sockaddr_un as *mut libc::sockaddr, &mut socklen, flags )) .map(|socket| unsafe { UnixStream::from_raw_fd(socket) }) }; #[cfg(any( target_os = "ios", target_os = "macos", target_os = "netbsd", target_os = "solaris" ))] let socket = syscall!(accept( self.inner.as_raw_fd(), &mut sockaddr as *mut libc::sockaddr_un as *mut libc::sockaddr, &mut socklen, )) .and_then(|socket| { // Ensure the socket is closed if either of the `fcntl` calls // error below. let s = unsafe { UnixStream::from_raw_fd(socket) }; syscall!(fcntl(socket, libc::F_SETFL, libc::O_NONBLOCK)) .and_then(|_| syscall!(fcntl(socket, libc::F_SETFD, libc::FD_CLOEXEC)).map(|_| s)) }); socket.and_then(|s| Ok((s, SocketAddr::from_parts(sockaddr, socklen)))) } pub(crate) fn bind(path: &Path) -> io::Result<UnixListener> { let socket = new_socket(libc::AF_UNIX, libc::SOCK_STREAM)?; let (sockaddr, socklen) = socket_addr(path)?; let sockaddr = &sockaddr as *const libc::sockaddr_un as *const libc::sockaddr; syscall!(bind(socket, sockaddr, socklen)) .and_then(|_| syscall!(listen(socket, 1024))) .map_err(|err| { // Close the socket if we hit an error, ignoring the error from // closing since we can't pass back two errors. let _ = unsafe { libc::close(socket) }; err }) .map(|_| unsafe { UnixListener::from_raw_fd(socket) }) } pub fn from_std(inner: net::UnixListener) -> UnixListener { UnixListener { inner } } pub(crate) fn try_clone(&self) -> io::Result<UnixListener> { let inner = self.inner.try_clone()?; Ok(UnixListener::new(inner)) } pub(crate) fn local_addr(&self) -> io::Result<SocketAddr> { SocketAddr::new(|sockaddr, socklen| { syscall!(getsockname(self.inner.as_raw_fd(), sockaddr, socklen)) }) } /// Returns the value of the `SO_ERROR` option. pub(crate) fn take_error(&self) -> io::Result<Option<io::Error>> { self.inner.take_error() } } impl event::Source for UnixListener { fn register( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { SourceFd(&self.as_raw_fd()).register(registry, token, interests) } fn reregister( &mut self, registry: &Registry, token: Token, interests: Interest, ) -> io::Result<()> { SourceFd(&self.as_raw_fd()).reregister(registry, token, interests) } fn deregister(&mut self, registry: &Registry) -> io::Result<()> { SourceFd(&self.as_raw_fd()).deregister(registry) } } impl AsRawFd for UnixListener { fn as_raw_fd(&self) -> RawFd { self.inner.as_raw_fd() } } impl IntoRawFd for UnixListener { fn into_raw_fd(self) -> RawFd { self.inner.into_raw_fd() } } impl FromRawFd for UnixListener { unsafe fn from_raw_fd(fd: RawFd) -> UnixListener { UnixListener::new(net::UnixListener::from_raw_fd(fd)) } } impl SocketAddr { pub(crate) fn new<F>(f: F) -> io::Result<SocketAddr> where F: FnOnce(*mut libc::sockaddr, &mut libc::socklen_t) -> io::Result<libc::c_int>, { let mut sockaddr = { let sockaddr = mem::MaybeUninit::<libc::sockaddr_un>::zeroed(); unsafe { sockaddr.assume_init() } }; let raw_sockaddr = &mut sockaddr as *mut libc::sockaddr_un as *mut libc::sockaddr; let mut socklen = mem::size_of_val(&sockaddr) as libc::socklen_t; f(raw_sockaddr, &mut socklen)?; Ok(SocketAddr::from_parts(sockaddr, socklen)) } pub(crate) fn from_parts(sockaddr: libc::sockaddr_un, socklen: libc::socklen_t) -> SocketAddr { SocketAddr { sockaddr, socklen } } /// Returns `true` if the address is unnamed. /// /// Documentation reflected in [`SocketAddr`] /// /// [`SocketAddr`]: std::os::unix::net::SocketAddr pub fn is_unnamed(&self) -> bool { if let AddressKind::Unnamed = self.address() { true } else { false } } /// Returns the contents of this address if it is a `pathname` address. /// /// Documentation reflected in [`SocketAddr`] /// /// [`SocketAddr`]: std::os::unix::net::SocketAddr pub fn as_pathname(&self) -> Option<&Path> { if let AddressKind::Pathname(path) = self.address() { Some(path) } else { None } } fn address(&self) -> AddressKind<'_> { let offset = path_offset(&self.sockaddr); let len = self.socklen as usize - offset; let path = unsafe { &*(&self.sockaddr.sun_path as *const [libc::c_char] as *const [u8]) }; // macOS seems to return a len of 16 and a zeroed sun_path for unnamed addresses if len == 0 || (cfg!(not(any(target_os = "linux", target_os = "android"))) && self.sockaddr.sun_path[0] == 0) { AddressKind::Unnamed } else if self.sockaddr.sun_path[0] == 0 { AddressKind::Abstract(&path[1..len]) } else { AddressKind::Pathname(OsStr::from_bytes(&path[..len - 1]).as_ref()) } } } impl fmt::Debug for SocketAddr { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { match self.address() { AddressKind::Unnamed => write!(fmt, "(unnamed)"), AddressKind::Abstract(name) => write!(fmt, "{} (abstract)", AsciiEscaped(name)), AddressKind::Pathname(path) => write!(fmt, "{:?} (pathname)", path), } } } struct AsciiEscaped<'a>(&'a [u8]); impl<'a> fmt::Display for AsciiEscaped<'a> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { write!(fmt, "\"")?; for byte in self.0.iter().cloned().flat_map(ascii::escape_default) { write!(fmt, "{}", byte as char)?; } write!(fmt, "\"") } }
mod length_prefixed_vec; mod local_context; mod message_utils; mod raft_utils; mod response_types; mod utils; pub use length_prefixed_vec::LengthPrefixedVec; pub use local_context::LocalContext; pub use message_utils::{ access_type, accessed_inode, distribution_requirement, raft_group, request_locks, AccessType, DistributionRequirement, }; pub use raft_utils::node_contains_raft_group; pub use response_types::{FlatBufferResponse, FlatBufferWithResponse, ResultResponse}; pub use utils::{ check_access, empty_response, finalize_request, finalize_request_without_prefix, finalize_response, finalize_response_without_prefix, node_id_from_address, response_or_error, };
use crate::instruction::Instruction; use crate::state::State; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::net::TcpListener; pub struct Debugger { debug_continue: bool, break_address: Option<u16>, } #[derive(PartialEq, Debug)] enum Command { Continue, Registers, Flags, Disassemble, Read(u16), BreakAddress(u16), Info, Help, Exit, Unknown(String), Error(String), } impl Debugger { pub fn new() -> Self { Debugger { debug_continue: false, break_address: None, } } pub fn step(&mut self, mut state: State) { let listener = TcpListener::bind("127.0.0.1:6379").expect("unable to bind to port 6379"); eprintln!("Waiting for connection..."); match listener.accept() { Ok((stream, address)) => { eprintln!("Debug client connected: {:?}", address); let mut should_break = true; while state.running { while state.running && !self.debug_continue && should_break { self.debug_continue = false; let mut line = String::new(); let command = match BufReader::new(&stream).read_line(&mut line) { Ok(_) => parse(line.trim().as_ref()), Err(_) => Command::Error("Unable to read line".to_string()), }; let response = self.handle_command(&mut state, command); BufWriter::new(&stream) .write_all(format!("{}\n", response).as_bytes()) .expect("unable to write to socket"); } self.debug_continue = false; state = state.step(); should_break = self.should_break(state.pc); } } Err(e) => eprintln!("Couldn't get client: {:?}", e), } } fn should_break(&mut self, pc: u16) -> bool { match self.break_address { Some(break_address) => { if break_address == pc { self.break_address = None; true } else { false } } None => true, } } fn handle_command(&mut self, state: &mut State, command: Command) -> String { match command { Command::Continue => { self.debug_continue = true; format!("PC {:#04x}", state.pc) } Command::Flags => format!("{:?}", state.condition), Command::Registers => state .registers() .iter() .enumerate() .map(|(i, register)| format!("R{}: {:#04x}", i, register)) .collect::<Vec<String>>() .join("\n"), Command::Disassemble => { let instruction = state.memory.read(state.pc); format!( "{:?}, {:08b}_{:08b}", Instruction::decode(instruction), (instruction >> 8) & 0xff, instruction & 0xff ) } Command::Read(address) => { let value = state.memory.read(address); format!("{:#04x}, {:#016b}", value, value) } Command::BreakAddress(address) => { self.break_address = Some(address); format!("Break address set to {:#04x}", address) } Command::Info => { let instruction = Instruction::decode(state.memory.read(state.pc)); let registers = state .registers() .iter() .enumerate() .map(|(i, register)| format!("R{}: {:#04x}", i, register)) .collect::<Vec<String>>() .join(", "); let break_address = if let Some(a) = self.break_address { format!(", break-address {:#04x}", a) } else { String::new() }; format!( "{:#04x}: {:?}, Flags: {:?}, [{}]{}", state.pc, instruction, state.condition, registers, break_address ) } Command::Help => [ "c, continue Continue execution.", "r, registers Print registers.", "f, flags Print flags.", "d, disassemble Disassemble current instruction.", " read <addr> Read and display memory address. e.g. read 0x3000", " break-address <addr> Break at address. e.g. break-address 0x3000", ] .join("\n"), Command::Exit => { state.running = false; "Exiting...".to_string() } Command::Unknown(line) => format!("Unknown command {:?}", line), Command::Error(message) => message, } } } fn parse(line: &str) -> Command { match line { "c" | "continue" => Command::Continue, "f" | "flags" => Command::Flags, "r" | "registers" => Command::Registers, "d" | "disassemble" => Command::Disassemble, "i" | "info" => Command::Info, "h" | "help" => Command::Help, "exit" => Command::Exit, line => { if let Some(address) = parse_hex_after_pattern("read 0x", line) { return Command::Read(address); } if let Some(address) = parse_hex_after_pattern("break-address 0x", line) { return Command::BreakAddress(address); } Command::Unknown(line.trim().to_string()) } } } fn parse_hex_after_pattern(pattern: &str, line: &str) -> Option<u16> { if line.starts_with(pattern) { let (_, address) = line.split_at(pattern.len()); if !address.is_empty() && address.len() <= 4 && address.bytes().all(|b| b.is_ascii_hexdigit()) { return Some(u16::from_str_radix(address, 16).expect("unable to parse address")); } } None } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_hex_after_pattern() { for command in vec!["read", "read 0x", "read 0x12345", "read 0x1z", "a read 0x1"] { assert_eq!(parse_hex_after_pattern("read 0x", command), None); } assert_eq!(parse_hex_after_pattern("read 0x", "read 0x1"), Some(1)); assert_eq!( parse_hex_after_pattern("read 0x", "read 0x1234"), Some(4660) ); } }
extern crate winapi; use winapi::um::objbase::*; use winapi::um::mmdeviceapi::*; use winapi::um::endpointvolume::*; use winapi::shared::*; use winapi::Interface; fn get_device_enumerator() -> *mut IMMDeviceEnumerator { let cls_mm_device_enum : guiddef::GUID = CLSID_MMDeviceEnumerator; let iid_imm_device_enumerator = IMMDeviceEnumerator::uuidof(); let mut device_enumerator : *mut IMMDeviceEnumerator = unsafe { std::mem::zeroed() }; unsafe { winapi::um::combaseapi::CoCreateInstance(&cls_mm_device_enum, std::ptr::null_mut(), wtypesbase::CLSCTX_INPROC_SERVER, &iid_imm_device_enumerator, &mut device_enumerator as *mut *mut IMMDeviceEnumerator as *mut *mut winapi::ctypes::c_void); } return device_enumerator; } fn get_imm_device(device_enumerator : *mut IMMDeviceEnumerator) -> *mut IMMDevice { let mut pp_device : *mut winapi::um::mmdeviceapi::IMMDevice = unsafe { std::mem::zeroed() }; unsafe { (*device_enumerator).GetDefaultAudioEndpoint( winapi::um::mmdeviceapi::eCapture, winapi::um::mmdeviceapi::eCommunications, &mut pp_device ); } return pp_device; } fn get_iaudio_meter_information(pp_device : *mut IMMDevice) -> *mut IAudioMeterInformation { let cls_iaudio_meter_information = IAudioMeterInformation::uuidof(); let mut input_device : *mut IAudioMeterInformation = unsafe { std::mem::zeroed() }; unsafe { (*pp_device).Activate( &cls_iaudio_meter_information, wtypesbase::CLSCTX_INPROC_SERVER, std::ptr::null_mut(), &mut input_device as *mut *mut winapi::um::endpointvolume::IAudioMeterInformation as *mut *mut winapi::ctypes::c_void); } return input_device; } pub fn get_audio_meter_information() -> *mut IAudioMeterInformation { unsafe { CoInitialize(std::ptr::null_mut()) }; let device_enumerator : *mut IMMDeviceEnumerator = get_device_enumerator(); let pp_device : *mut IMMDevice = get_imm_device(device_enumerator); return get_iaudio_meter_information(pp_device); }
#![crate_name = "tar"] #![crate_type = "lib"] #![license = "MIT"] use std::io::fs::File; use std::collections::hashmap::HashMap; pub struct Tar { filepath: &'static str, pub fields: HashMap<&'static str, uint>, pub field_size: HashMap<uint, uint> } pub fn new(filepath: &'static str) -> Tar { let mut fields = HashMap::<&str, uint>::new(); let mut field_size = HashMap::<uint, uint>::new(); fields.insert("path", 1); fields.insert("mode", 2); fields.insert("uid", 3); fields.insert("gid", 4); fields.insert("size", 5); fields.insert("mtime", 6); fields.insert("cksum", 7); fields.insert("type", 8); fields.insert("linkpath", 9); field_size.insert(*fields.get(&"path"), 100); field_size.insert(*fields.get(&"mode"), 8); field_size.insert(*fields.get(&"uid"), 8); field_size.insert(*fields.get(&"gid"), 8); field_size.insert(*fields.get(&"size"), 12); field_size.insert(*fields.get(&"mtime"), 12); field_size.insert(*fields.get(&"cksum"), 8); field_size.insert(*fields.get(&"type"), 1); field_size.insert(*fields.get(&"linkpath"), 100); Tar { filepath: filepath, fields: fields, field_size: field_size } } impl Tar { pub fn read(&self) -> Vec<Vec<u8>> { static BLOCK_SIZE: uint = 512; let mut split_data = vec!(); let mut cur_block = 0; let mut handle = File::open(&Path::new(self.filepath)); let data = handle.read_to_end().unwrap(); while cur_block < data.len() { split_data.push(Vec::from_slice(data.slice(cur_block, cur_block + BLOCK_SIZE))); cur_block += BLOCK_SIZE; } split_data } pub fn extract(&self) { fail!(); } } struct TarHeader; impl TarHeader { fn decode() { fail!(); } fn encode() { fail!(); } fn calc_sum() { fail!(); } fn check_sum() { fail!(); } }
#[derive(Copy, Clone, Debug, Eq, PartialEq)] pub struct Instruction(pub u8); impl Instruction { }
pub mod combinators; pub mod primitives; #[derive(Debug, Clone)] pub struct ParseError { pub input: String, pub expected: String, pub fatal: bool, } #[derive(Debug, Clone)] pub struct ParseSuccess<A> { pub value: A, pub next: String, } pub type ParseResult<A> = Result<ParseSuccess<A>, ParseError>; pub type Parser<A> = Box<dyn Fn(&str) -> ParseResult<A>>; pub fn parse<A: Clone>(p: Parser<A>, input: &str) -> A { match p(input) { Ok(success) => success.value, Err(e) => panic!(format!("Failed to parse input, expected {}", e.expected)), } } pub fn parse_with_next<A: Clone>(p: Parser<A>, input: &str) -> ParseResult<A> { let res = p(input); if let Err(e) = res { panic!(format!("Failed to parse input, expected {}", e.expected)) } res }