text
stringlengths
8
4.13M
use std::arch::x86_64::*; use core::mem::transmute; fn main() { let input = include_bytes!("explore.in"); let mut input_vec: [__m256i; 2] = unsafe { core::mem::zeroed() }; let mut prev_ov = 0; let mut whitespace = 0; let mut structurals = 0; let mut ptr = input.as_ptr(); for _ in 0..64 { input_vec[0] = unsafe { _mm256_loadu_si256(ptr as *const _) }; input_vec[1] = unsafe { _mm256_loadu_si256(ptr.add(32) as *const _) }; unsafe { ptr = ptr.add(64) }; let od = odd_backslash_sequences(input_vec, &mut prev_ov); find_whitespace_and_structurals(input_vec, &mut whitespace, &mut structurals); print!("{} ", prev_ov); print!("{:016X} ", od); println!("{:016X} ", whitespace); } } #[inline(always)] fn odd_backslash_sequences(input: [__m256i; 2], prev_ov: &mut u64) -> u64 { const EVEN_BITS: u64 = 0x5555_5555_5555_5555; const ODD_BITS: u64 = 0xAAAA_AAAA_AAAA_AAAA; let mask = unsafe { _mm256_set1_epi8(b'\\' as i8) }; let backslashes = unsafe { let hi32: u32 = transmute(_mm256_movemask_epi8(_mm256_cmpeq_epi8(input[1], mask))); let lo32: u32 = transmute(_mm256_movemask_epi8(_mm256_cmpeq_epi8(input[0], mask))); lo32 as u64 | ((hi32 as u64) << 32) }; let starts = backslashes & (!(backslashes << 1)); let even_start_mask = EVEN_BITS ^ *prev_ov; let even_starts = starts & even_start_mask; let odd_starts = starts & (!even_start_mask); let even_carries = u64::wrapping_add(backslashes, even_starts); let (odd_carries, ov) = u64::overflowing_add(backslashes, odd_starts); let odd_carries = odd_carries | *prev_ov; *prev_ov = if ov { 1 } else { 0 }; let even_carry_ends = even_carries & (!backslashes); let odd_carry_ends = odd_carries & (!backslashes); let even_start_odd_end = even_carry_ends & ODD_BITS; let odd_start_even_end = odd_carry_ends & EVEN_BITS; even_start_odd_end | odd_start_even_end } #[inline(always)] fn find_whitespace_and_structurals(input: [__m256i; 2], whitespace: &mut u64, structurals: &mut u64) { let low_nibble_mask = unsafe { _mm256_setr_epi8( 16, 0, 0, 0, 0, 0, 0, 0, 0, 8, 10, 4, 1, 12, 0, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 8, 10, 4, 1, 12, 0, 0, ) }; let high_nibble_mask = unsafe { _mm256_setr_epi8( 8, 0, 17, 2, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 17, 2, 0, 4, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, ) }; let structural_mask = unsafe { _mm256_set1_epi8(0x07) }; let whitespace_mask = unsafe { _mm256_set1_epi8(0x18) }; let zero = unsafe { _mm256_set1_epi8(0) }; let category_hi32 = unsafe { let lo_nibble = _mm256_shuffle_epi8(low_nibble_mask, input[1]); let hi_nibble = _mm256_shuffle_epi8(high_nibble_mask, _mm256_and_si256(_mm256_srli_epi64(input[1], 4), _mm256_set1_epi8(0x7f)) ); _mm256_and_si256(lo_nibble, hi_nibble) }; let category_lo32 = unsafe { let lo_nibble = _mm256_shuffle_epi8(low_nibble_mask, input[0]); let hi_nibble = _mm256_shuffle_epi8(high_nibble_mask, _mm256_and_si256(_mm256_srli_epi64(input[0], 4), _mm256_set1_epi8(0x7f)) ); _mm256_and_si256(lo_nibble, hi_nibble) }; let structural_hi32: u32 = unsafe { let category_structural = _mm256_and_si256(category_hi32, structural_mask); let ans = _mm256_cmpgt_epi8(category_structural, zero); transmute(_mm256_movemask_epi8(ans)) }; let structural_lo32: u32 = unsafe { let category_structural = _mm256_and_si256(category_lo32, structural_mask); let ans = _mm256_cmpgt_epi8(category_structural, zero); transmute(_mm256_movemask_epi8(ans)) }; *structurals = structural_lo32 as u64 | ((structural_hi32 as u64) << 32); let whitespace_hi32: u32 = unsafe { let category_whitespace = _mm256_and_si256(category_hi32, whitespace_mask); let ans = _mm256_cmpgt_epi8(category_whitespace, zero); transmute(_mm256_movemask_epi8(ans)) }; let whitespace_lo32: u32 = unsafe { let category_whitespace = _mm256_and_si256(category_lo32, whitespace_mask); let ans = _mm256_cmpgt_epi8(category_whitespace, zero); transmute(_mm256_movemask_epi8(ans)) }; *whitespace = whitespace_lo32 as u64 | ((whitespace_hi32 as u64) << 32); } // #[inline(always)] // fn validate_utf8(input: [__m256i; 2], prev_error: &mut __m256) { // } // fn print_m256(input: __m256i) { // let arr = [0u64; 4]; // unsafe { _mm256_storeu_si256(&arr as *const _ as *mut __m256i, input) } // print!("{:016X} {:016X} {:016X} {:016X}", arr[3], arr[2], arr[1], arr[0]); // println!() // }
use std::cell::RefCell; use std::rc::Rc; pub enum ParseResult { Parsed, Help, Exit, Error(String), } pub enum Action<'a> { Flag(Box<IFlagAction + 'a>), Single(Box<IArgAction + 'a>), Push(Box<IArgsAction + 'a>), Many(Box<IArgsAction + 'a>), } pub trait TypedAction<T> { fn bind<'x>(&self, Rc<RefCell<&'x mut T>>) -> Action<'x>; } pub trait IFlagAction { fn parse_flag(&self) -> ParseResult; } pub trait IArgAction { fn parse_arg(&self, arg: &str) -> ParseResult; } pub trait IArgsAction { fn parse_args(&self, args: &[&str]) -> ParseResult; }
use std::collections::HashMap; use handlebars::Handlebars; use serde::Serialize; /// Indicates whether the user agent should send or receive user credentials /// (cookies, basic http auth, etc.) from the other domain in the case of /// cross-origin requests. #[derive(Serialize)] #[serde(rename_all = "kebab-case")] pub enum Credentials { /// Send user credentials if the URL is on the same origin as the calling /// script. This is the default value. SameOrigin, /// Always send user credentials, even for cross-origin calls. Include, /// Never send or receive user credentials. Omit, } impl Default for Credentials { fn default() -> Self { Credentials::SameOrigin } } /// A builder for constructing a GraphiQL (v2) HTML page. /// /// # Example /// /// ```rust /// use async_graphql::http::*; /// /// GraphiQLSource::build() /// .endpoint("/") /// .subscription_endpoint("/ws") /// .header("Authorization", "Bearer [token]") /// .credentials(Credentials::Include) /// .finish(); /// ``` #[derive(Default, Serialize)] pub struct GraphiQLSource<'a> { endpoint: &'a str, subscription_endpoint: Option<&'a str>, headers: Option<HashMap<&'a str, &'a str>>, title: Option<&'a str>, credentials: Credentials, } impl<'a> GraphiQLSource<'a> { /// Creates a builder for constructing a GraphiQL (v2) HTML page. pub fn build() -> GraphiQLSource<'a> { Default::default() } /// Sets the endpoint of the server GraphiQL will connect to. #[must_use] pub fn endpoint(self, endpoint: &'a str) -> GraphiQLSource<'a> { GraphiQLSource { endpoint, ..self } } /// Sets the subscription endpoint of the server GraphiQL will connect to. pub fn subscription_endpoint(self, endpoint: &'a str) -> GraphiQLSource<'a> { GraphiQLSource { subscription_endpoint: Some(endpoint), ..self } } /// Sets a header to be sent with requests GraphiQL will send. pub fn header(self, name: &'a str, value: &'a str) -> GraphiQLSource<'a> { let mut headers = match self.headers { Some(headers) => headers, None => HashMap::new(), }; headers.insert(name, value); GraphiQLSource { headers: Some(headers), ..self } } /// Sets the html document title. pub fn title(self, title: &'a str) -> GraphiQLSource<'a> { GraphiQLSource { title: Some(title), ..self } } /// Sets credentials option for the fetch requests. pub fn credentials(self, credentials: Credentials) -> GraphiQLSource<'a> { GraphiQLSource { credentials, ..self } } /// Returns a GraphiQL (v2) HTML page. pub fn finish(self) -> String { let mut handlebars = Handlebars::new(); handlebars .register_template_string( "graphiql_v2_source", include_str!("./graphiql_v2_source.hbs"), ) .expect("Failed to register template"); handlebars .render("graphiql_v2_source", &self) .expect("Failed to render template") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_with_only_url() { let graphiql_source = GraphiQLSource::build().endpoint("/").finish(); assert_eq!( graphiql_source, r#"<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="robots" content="noindex"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="referrer" content="origin"> <title>GraphiQL IDE</title> <style> body { height: 100%; margin: 0; width: 100%; overflow: hidden; } #graphiql { height: 100vh; } </style> <script crossorigin src="https://unpkg.com/react@17/umd/react.development.js" ></script> <script crossorigin src="https://unpkg.com/react-dom@17/umd/react-dom.development.js" ></script> <link rel="icon" href="https://graphql.org/favicon.ico"> <link rel="stylesheet" href="https://unpkg.com/graphiql/graphiql.min.css" /> </head> <body> <div id="graphiql">Loading...</div> <script src="https://unpkg.com/graphiql/graphiql.min.js" type="application/javascript" ></script> <script> customFetch = (url, opts = {}) => { return fetch(url, {...opts, credentials: 'same-origin'}) } createUrl = (endpoint, subscription = false) => { const url = new URL(endpoint, window.location.origin); if (subscription) { url.protocol = url.protocol === 'https:' ? 'wss:' : 'ws:'; } return url.toString(); } ReactDOM.render( React.createElement(GraphiQL, { fetcher: GraphiQL.createFetcher({ url: createUrl('/'), fetch: customFetch, }), defaultEditorToolsVisibility: true, }), document.getElementById("graphiql") ); </script> </body> </html>"# ) } #[test] fn test_with_both_urls() { let graphiql_source = GraphiQLSource::build() .endpoint("/") .subscription_endpoint("/ws") .finish(); assert_eq!( graphiql_source, r#"<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="robots" content="noindex"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="referrer" content="origin"> <title>GraphiQL IDE</title> <style> body { height: 100%; margin: 0; width: 100%; overflow: hidden; } #graphiql { height: 100vh; } </style> <script crossorigin src="https://unpkg.com/react@17/umd/react.development.js" ></script> <script crossorigin src="https://unpkg.com/react-dom@17/umd/react-dom.development.js" ></script> <link rel="icon" href="https://graphql.org/favicon.ico"> <link rel="stylesheet" href="https://unpkg.com/graphiql/graphiql.min.css" /> </head> <body> <div id="graphiql">Loading...</div> <script src="https://unpkg.com/graphiql/graphiql.min.js" type="application/javascript" ></script> <script> customFetch = (url, opts = {}) => { return fetch(url, {...opts, credentials: 'same-origin'}) } createUrl = (endpoint, subscription = false) => { const url = new URL(endpoint, window.location.origin); if (subscription) { url.protocol = url.protocol === 'https:' ? 'wss:' : 'ws:'; } return url.toString(); } ReactDOM.render( React.createElement(GraphiQL, { fetcher: GraphiQL.createFetcher({ url: createUrl('/'), fetch: customFetch, subscriptionUrl: createUrl('/ws', true), }), defaultEditorToolsVisibility: true, }), document.getElementById("graphiql") ); </script> </body> </html>"# ) } #[test] fn test_with_all_options() { let graphiql_source = GraphiQLSource::build() .endpoint("/") .subscription_endpoint("/ws") .header("Authorization", "Bearer [token]") .title("Awesome GraphiQL IDE Test") .credentials(Credentials::Include) .finish(); assert_eq!( graphiql_source, r#"<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="robots" content="noindex"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="referrer" content="origin"> <title>Awesome GraphiQL IDE Test</title> <style> body { height: 100%; margin: 0; width: 100%; overflow: hidden; } #graphiql { height: 100vh; } </style> <script crossorigin src="https://unpkg.com/react@17/umd/react.development.js" ></script> <script crossorigin src="https://unpkg.com/react-dom@17/umd/react-dom.development.js" ></script> <link rel="icon" href="https://graphql.org/favicon.ico"> <link rel="stylesheet" href="https://unpkg.com/graphiql/graphiql.min.css" /> </head> <body> <div id="graphiql">Loading...</div> <script src="https://unpkg.com/graphiql/graphiql.min.js" type="application/javascript" ></script> <script> customFetch = (url, opts = {}) => { return fetch(url, {...opts, credentials: 'include'}) } createUrl = (endpoint, subscription = false) => { const url = new URL(endpoint, window.location.origin); if (subscription) { url.protocol = url.protocol === 'https:' ? 'wss:' : 'ws:'; } return url.toString(); } ReactDOM.render( React.createElement(GraphiQL, { fetcher: GraphiQL.createFetcher({ url: createUrl('/'), fetch: customFetch, subscriptionUrl: createUrl('/ws', true), headers: { 'Authorization': 'Bearer [token]', }, }), defaultEditorToolsVisibility: true, }), document.getElementById("graphiql") ); </script> </body> </html>"# ) } }
use super::input::Input; use crate::coverage::Coverage; use candy_frontend::hir::Id; use candy_vm::{ self, channel::Packet, execution_controller::{CountingExecutionController, ExecutionController}, fiber::{EndedReason, Fiber, FiberId, InstructionPointer, Panic, VmEnded}, heap::{ DisplayWithSymbolTable, Function, Heap, HirId, InlineObjectSliceCloneToHeap, SymbolTable, }, lir::Lir, tracer::{ stack_trace::{FiberStackTracer, StackTracer}, FiberTracer, }, vm::{self, Vm}, }; use rustc_hash::FxHashMap; use std::borrow::Borrow; const MAX_INSTRUCTIONS: usize = 1000000; pub struct Runner<L: Borrow<Lir>> { pub lir: L, pub vm: Option<Vm<L, StackTracer>>, // Is consumed when the runner is finished. pub input: Input, pub tracer: StackTracer, pub num_instructions: usize, pub coverage: Coverage, pub result: Option<RunResult>, } pub enum RunResult { /// Executing the function with the input took more than `MAX_INSTRUCTIONS`. Timeout, /// The execution finished successfully with a value. Done(Packet), /// The execution panicked and the caller of the function (aka the fuzzer) /// is at fault. NeedsUnfulfilled { reason: String }, /// The execution panicked with an internal panic. This indicates an error /// in the code that should be shown to the user. Panicked(Panic), } impl RunResult { pub fn to_string(&self, symbol_table: &SymbolTable, call: &str) -> String { match self { RunResult::Timeout => format!("{call} timed out."), RunResult::Done(return_value) => format!( "{call} returned {}.", DisplayWithSymbolTable::to_string(&return_value.object, symbol_table), ), RunResult::NeedsUnfulfilled { reason } => { format!("{call} panicked and it's our fault: {reason}") } RunResult::Panicked(panic) => { format!("{call} panicked internally: {}", panic.reason) } } } } impl<L: Borrow<Lir> + Clone> Runner<L> { pub fn new(lir: L, function: Function, input: Input) -> Self { let mut heap = Heap::default(); let num_instructions = lir.borrow().instructions.len(); let mut mapping = FxHashMap::default(); let function = function .clone_to_heap_with_mapping(&mut heap, &mut mapping) .try_into() .unwrap(); let arguments = input .arguments .clone_to_heap_with_mapping(&mut heap, &mut mapping); let responsible = HirId::create(&mut heap, true, Id::fuzzer()); let mut tracer = StackTracer::default(); let vm = Vm::for_function( lir.clone(), heap, function, &arguments, responsible, &mut tracer, ); Runner { lir, vm: Some(vm), input, tracer, num_instructions: 0, coverage: Coverage::none(num_instructions), result: None, } } pub fn run(&mut self, execution_controller: &mut impl ExecutionController<FiberStackTracer>) { assert!(self.vm.is_some()); assert!(self.result.is_none()); let mut coverage_tracker = CoverageTrackingExecutionController { coverage: &mut self.coverage, }; let mut instruction_counter = CountingExecutionController::default(); let mut execution_controller = ( execution_controller, &mut coverage_tracker, &mut instruction_counter, ); self.vm .as_mut() .unwrap() .run(&mut execution_controller, &mut self.tracer); self.num_instructions += instruction_counter.num_instructions; self.result = match self.vm.as_ref().unwrap().status() { vm::Status::CanRun => { if self.num_instructions > MAX_INSTRUCTIONS { Some(RunResult::Timeout) } else { None } } // Because the fuzzer never sends channels as inputs, the function // waits on some internal concurrency operations that will never be // completed. This most likely indicates an error in the code, but // it's of course valid to have a function that never returns. Thus, // this should be treated just like a regular timeout. vm::Status::WaitingForOperations => Some(RunResult::Timeout), vm::Status::Done => { let VmEnded { heap, reason, .. } = self.vm.take().unwrap().tear_down(&mut self.tracer); let EndedReason::Finished(return_value) = reason else { unreachable!(); }; Some(RunResult::Done(Packet { heap, object: return_value, })) } vm::Status::Panicked(panic) => Some(if panic.responsible == Id::fuzzer() { RunResult::NeedsUnfulfilled { reason: panic.reason, } } else { self.vm.take().unwrap().tear_down(&mut self.tracer); RunResult::Panicked(panic) }), }; } } pub struct CoverageTrackingExecutionController<'a> { coverage: &'a mut Coverage, } impl<'a, T: FiberTracer> ExecutionController<T> for CoverageTrackingExecutionController<'a> { fn should_continue_running(&self) -> bool { true } fn instruction_executed( &mut self, _fiber_id: FiberId, _fiber: &Fiber<T>, ip: InstructionPointer, ) { self.coverage.add(ip); } }
use std::fmt; use std::iter::FromIterator; use nom::branch::Alt; use nom::error::{ContextError, ErrorKind, FromExternalError, ParseError}; use nom::sequence::Tuple; use nom::{Err, IResult, Parser}; pub struct DynamicAlt<P>(Vec<P>); impl<P> From<Vec<P>> for DynamicAlt<P> { fn from(v: Vec<P>) -> Self { Self(v) } } impl<P> FromIterator<P> for DynamicAlt<P> { fn from_iter<T: IntoIterator<Item = P>>(iter: T) -> Self { Self(iter.into_iter().collect()) } } impl<I: Clone, O, E, P> Alt<I, O, E> for DynamicAlt<P> where P: Parser<I, O, E>, { fn choice(&mut self, input: I) -> IResult<I, O, E> { let length = self.0.len(); for alt in &mut self.0[..length - 1] { if let Ok(o) = alt.parse(input.clone()) { return Ok(o); }; } self.0 .last_mut() .expect("DynamicAlt must include at least one alternative") .parse(input) } } impl<I, O, E, P> Tuple<I, Vec<O>, E> for DynamicAlt<P> where P: Parser<I, O, E>, { fn parse(&mut self, mut input: I) -> IResult<I, Vec<O>, E> { let mut results = Vec::with_capacity(self.0.len()); for parser in &mut self.0 { let (next, res) = parser.parse(input)?; input = next; results.push(res); } Ok((input, results)) } } #[derive(Default, Debug, Clone, PartialEq)] pub struct VerboseError<I> { errors: Vec<(I, VerboseErrorKind)>, } #[derive(Clone, Debug, PartialEq)] /// Error context for `VerboseError` pub enum VerboseErrorKind { /// Static string added by the `context` function Context(&'static str), /// Dynamic string added by the `context` function OwnedContext(String), /// Indicates which character was expected by the `char` function Char(char), /// Error kind given by various nom parsers Nom(ErrorKind), } impl<I> ParseError<I> for VerboseError<I> { fn from_error_kind(input: I, kind: ErrorKind) -> Self { Self { errors: vec![(input, VerboseErrorKind::Nom(kind))], } } fn append(input: I, kind: ErrorKind, mut other: Self) -> Self { other.errors.push((input, VerboseErrorKind::Nom(kind))); other } fn from_char(input: I, c: char) -> Self { VerboseError { errors: vec![(input, VerboseErrorKind::Char(c))], } } } impl<I, E> FromExternalError<I, E> for VerboseError<I> { fn from_external_error(input: I, kind: ErrorKind, _e: E) -> Self { Self::from_error_kind(input, kind) } } impl<I: fmt::Display> fmt::Display for VerboseError<I> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "Parse error:")?; for (input, error) in &self.errors { match error { VerboseErrorKind::Nom(e) => writeln!(f, "{:?} at: {}", e, input)?, VerboseErrorKind::Char(c) => writeln!(f, "expected '{}' at: {}", c, input)?, VerboseErrorKind::Context(s) => writeln!(f, "in section '{}', at: {}", s, input)?, VerboseErrorKind::OwnedContext(s) => { writeln!(f, "in section '{}', at: {}", s, input)? } } } Ok(()) } } impl<I> ContextError<I> for VerboseError<I> { fn add_context(input: I, ctx: &'static str, mut other: Self) -> Self { other.errors.push((input, VerboseErrorKind::Context(ctx))); other } } impl<I> VerboseError<I> { pub fn add_owned_context(input: I, ctx: String, mut other: Self) -> Self { other .errors .push((input, VerboseErrorKind::OwnedContext(ctx))); other } } pub fn owned_context<I: Clone, F, O>( context: String, mut f: F, ) -> impl FnMut(I) -> IResult<I, O, VerboseError<I>> where F: Parser<I, O, VerboseError<I>>, { move |i: I| match f.parse(i.clone()) { Ok(o) => Ok(o), Err(Err::Incomplete(i)) => Err(Err::Incomplete(i)), Err(Err::Error(e)) => Err(Err::Error(VerboseError::add_owned_context( i, context.clone(), e, ))), Err(Err::Failure(e)) => Err(Err::Failure(VerboseError::add_owned_context( i, context.clone(), e, ))), } }
extern crate serde; #[macro_use] extern crate serde_derive; use std::io::prelude::*; use std::io::BufReader; use std::path::Path; use std::fs::File; use std::vec::Vec; use std::error::Error; use rand::thread_rng;; use rand::seq::SliceRandom; use rusty_machine; use rusty_machine::linalg::Matrix; use rusty_machine::linalg::Vector; use ml_utils::datasets::get_boston_records_from_file; pub fn run() -> Result<(), Box<dyn Error>> { let fl= "data/boston.csv"; let mut data = get_boston_records_from_file(&fl); data.shuffle(&mut thread_rng()); // separate out to train and test let test_size: f64 = 0.2; let test_size: f64 = data.len() as f64 * test_size; let test_size = test_size.round() as usize; let (test_data, train_data) = data.split_at(test_size); let train_size = train_data.len(); let test_size = test_data.len(); // differentiate the predictors and the targets. let boston_x_train: Vec<f64> = train_data.iter() .flat_map(|r| r.into_feature_vector()) .collect(); let boston_y_train: Vec<f64> = train_data.iter() .map(|r| r.into_targets()).collect(); let boston_x_test: Vec<f64> = test_data.iter() .flat_map(|r| r.into_feature_vector()).collect(); let boston_y_test: Vec<f64> = test_data.iter() .map(|r| r.into_targets()).collect(); let boston_x_train = Matrix::new(train_size, 13, boston_x_train); let boston_y_train = Vector::new(boston_y_train); let boston_x_test = Matrix::new(test_size, 13, boston_x_test); let boston_y_test = Matrix::new(test_size, 1, boston_y_test); }
#![feature(associated_consts)] #[macro_use] extern crate log; extern crate rand; use rand::SeedableRng; extern crate time; use std::io; use std::io::prelude::*; mod go; mod mcts; mod gtp; use log::{LogRecord, LogLevel, LogLevelFilter, LogMetadata}; struct SimpleLogger; impl log::Log for SimpleLogger { fn enabled(&self, metadata: &LogMetadata) -> bool { metadata.level() <= LogLevel::Info } fn log(&self, record: &LogRecord) { if self.enabled(record.metadata()) { writeln!(&mut std::io::stderr(), "{} {}:{} - {}", record.level(), record.location().file(), record.location().line(), record.args()).unwrap(); } } } #[allow(dead_code)] fn main() { log::set_logger(|max_log_level| { max_log_level.set(LogLevelFilter::Info); Box::new(SimpleLogger) }).unwrap(); // let mut rng = rand::StdRng::from_seed(&[time::precise_time_ns() as usize]); let rng = rand::StdRng::from_seed(&[42]); // benchmark(run_rollouts, 10000, 11); let mut engine = gtp::Engine::new(rng); let stdin = io::stdin(); for line in stdin.lock().lines() { println!("{}", engine.execute(line.unwrap())); println!(""); if !engine.running { return; } } } fn benchmark(f: fn(u64), n: u64, repetitions: u64) { let mut durations = (0..repetitions).map(|_| { let start = time::PreciseTime::now(); f(n); let total = start.to(time::PreciseTime::now()); println!("{} playouts in {}, {:.2} kpps", n, total, n as f64 / total.num_milliseconds() as f64); total }).collect::<Vec<_>>(); durations.sort(); let mean = durations.iter().fold(time::Duration::zero(), |acc, &d| acc + d) / repetitions as i32; let median = durations[(repetitions / 2) as usize]; let min = durations[0]; let max = durations[(repetitions - 1) as usize]; let mut stddev = 0.0; for d in durations { let diff = (d.num_nanoseconds().unwrap() - mean.num_nanoseconds().unwrap()) as f64; stddev += diff * diff; } let stddev_dur = time::Duration::nanoseconds(stddev.sqrt() as i64); println!("|{}---{}---{}|, mean {} +- {}", min, median, max, mean, stddev_dur); } fn run_rollouts(num_rollouts: u64) { let mut rng = rand::StdRng::from_seed(&[42]); let mut num_moves = 0u64; let mut double_total_score = 0i64; let mut game = go::GoGame::new(19); let mut num_black_wins = 0u64; for _ in 0 .. num_rollouts { let (n, s) = play(&mut game, &mut rng); num_moves += n as u64; double_total_score += s as i64; if s > 0 { num_black_wins += 1; } } println!("{} moves per playout, mean score {:.2}, winrate {:.2} %", num_moves as f64 / num_rollouts as f64, double_total_score as f64 / num_rollouts as f64 / 2f64, 100f64 * num_black_wins as f64 / num_rollouts as f64); } fn play(game: &mut go::GoGame, rng: &mut rand::StdRng) -> (u32, i16) { // Use doubled score so we can score 0.5 komi in integer. let double_komi = 15; let mut color_to_play = go::stone::WHITE; let mut num_consecutive_passes = 0; let mut num_moves = 0; game.reset(); while num_consecutive_passes < 2 { color_to_play = color_to_play.opponent(); num_moves += 1; let v = game.random_move(color_to_play, rng); if v == go::PASS { num_consecutive_passes += 1; } else { game.play(color_to_play, v); num_consecutive_passes = 0; } if num_moves > 600 { println!("{}", game); } if num_moves > 610 { println!("suspicious game with > 600 moves"); break; } } return (num_moves, game.chinese_score() * 2 - double_komi); }
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)] #[link(name = "windows")] extern "system" { #[cfg(feature = "Win32_System_Com")] pub fn CreateXmlReader(riid: *const ::windows_sys::core::GUID, ppvobject: *mut *mut ::core::ffi::c_void, pmalloc: super::super::super::System::Com::IMalloc) -> ::windows_sys::core::HRESULT; #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_Com"))] pub fn CreateXmlReaderInputWithEncodingCodePage(pinputstream: ::windows_sys::core::IUnknown, pmalloc: super::super::super::System::Com::IMalloc, nencodingcodepage: u32, fencodinghint: super::super::super::Foundation::BOOL, pwszbaseuri: super::super::super::Foundation::PWSTR, ppinput: *mut ::windows_sys::core::IUnknown) -> ::windows_sys::core::HRESULT; #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_Com"))] pub fn CreateXmlReaderInputWithEncodingName(pinputstream: ::windows_sys::core::IUnknown, pmalloc: super::super::super::System::Com::IMalloc, pwszencodingname: super::super::super::Foundation::PWSTR, fencodinghint: super::super::super::Foundation::BOOL, pwszbaseuri: super::super::super::Foundation::PWSTR, ppinput: *mut ::windows_sys::core::IUnknown) -> ::windows_sys::core::HRESULT; #[cfg(feature = "Win32_System_Com")] pub fn CreateXmlWriter(riid: *const ::windows_sys::core::GUID, ppvobject: *mut *mut ::core::ffi::c_void, pmalloc: super::super::super::System::Com::IMalloc) -> ::windows_sys::core::HRESULT; #[cfg(feature = "Win32_System_Com")] pub fn CreateXmlWriterOutputWithEncodingCodePage(poutputstream: ::windows_sys::core::IUnknown, pmalloc: super::super::super::System::Com::IMalloc, nencodingcodepage: u32, ppoutput: *mut ::windows_sys::core::IUnknown) -> ::windows_sys::core::HRESULT; #[cfg(all(feature = "Win32_Foundation", feature = "Win32_System_Com"))] pub fn CreateXmlWriterOutputWithEncodingName(poutputstream: ::windows_sys::core::IUnknown, pmalloc: super::super::super::System::Com::IMalloc, pwszencodingname: super::super::super::Foundation::PWSTR, ppoutput: *mut ::windows_sys::core::IUnknown) -> ::windows_sys::core::HRESULT; } pub type DtdProcessing = i32; pub const DtdProcessing_Prohibit: DtdProcessing = 0i32; pub const DtdProcessing_Parse: DtdProcessing = 1i32; pub const _DtdProcessing_Last: DtdProcessing = 1i32; pub type IXmlReader = *mut ::core::ffi::c_void; pub type IXmlResolver = *mut ::core::ffi::c_void; pub type IXmlWriter = *mut ::core::ffi::c_void; pub type IXmlWriterLite = *mut ::core::ffi::c_void; pub type XmlConformanceLevel = i32; pub const XmlConformanceLevel_Auto: XmlConformanceLevel = 0i32; pub const XmlConformanceLevel_Fragment: XmlConformanceLevel = 1i32; pub const XmlConformanceLevel_Document: XmlConformanceLevel = 2i32; pub const _XmlConformanceLevel_Last: XmlConformanceLevel = 2i32; pub type XmlError = i32; pub const MX_E_MX: XmlError = -1072894464i32; pub const MX_E_INPUTEND: XmlError = -1072894463i32; pub const MX_E_ENCODING: XmlError = -1072894462i32; pub const MX_E_ENCODINGSWITCH: XmlError = -1072894461i32; pub const MX_E_ENCODINGSIGNATURE: XmlError = -1072894460i32; pub const WC_E_WC: XmlError = -1072894432i32; pub const WC_E_WHITESPACE: XmlError = -1072894431i32; pub const WC_E_SEMICOLON: XmlError = -1072894430i32; pub const WC_E_GREATERTHAN: XmlError = -1072894429i32; pub const WC_E_QUOTE: XmlError = -1072894428i32; pub const WC_E_EQUAL: XmlError = -1072894427i32; pub const WC_E_LESSTHAN: XmlError = -1072894426i32; pub const WC_E_HEXDIGIT: XmlError = -1072894425i32; pub const WC_E_DIGIT: XmlError = -1072894424i32; pub const WC_E_LEFTBRACKET: XmlError = -1072894423i32; pub const WC_E_LEFTPAREN: XmlError = -1072894422i32; pub const WC_E_XMLCHARACTER: XmlError = -1072894421i32; pub const WC_E_NAMECHARACTER: XmlError = -1072894420i32; pub const WC_E_SYNTAX: XmlError = -1072894419i32; pub const WC_E_CDSECT: XmlError = -1072894418i32; pub const WC_E_COMMENT: XmlError = -1072894417i32; pub const WC_E_CONDSECT: XmlError = -1072894416i32; pub const WC_E_DECLATTLIST: XmlError = -1072894415i32; pub const WC_E_DECLDOCTYPE: XmlError = -1072894414i32; pub const WC_E_DECLELEMENT: XmlError = -1072894413i32; pub const WC_E_DECLENTITY: XmlError = -1072894412i32; pub const WC_E_DECLNOTATION: XmlError = -1072894411i32; pub const WC_E_NDATA: XmlError = -1072894410i32; pub const WC_E_PUBLIC: XmlError = -1072894409i32; pub const WC_E_SYSTEM: XmlError = -1072894408i32; pub const WC_E_NAME: XmlError = -1072894407i32; pub const WC_E_ROOTELEMENT: XmlError = -1072894406i32; pub const WC_E_ELEMENTMATCH: XmlError = -1072894405i32; pub const WC_E_UNIQUEATTRIBUTE: XmlError = -1072894404i32; pub const WC_E_TEXTXMLDECL: XmlError = -1072894403i32; pub const WC_E_LEADINGXML: XmlError = -1072894402i32; pub const WC_E_TEXTDECL: XmlError = -1072894401i32; pub const WC_E_XMLDECL: XmlError = -1072894400i32; pub const WC_E_ENCNAME: XmlError = -1072894399i32; pub const WC_E_PUBLICID: XmlError = -1072894398i32; pub const WC_E_PESINTERNALSUBSET: XmlError = -1072894397i32; pub const WC_E_PESBETWEENDECLS: XmlError = -1072894396i32; pub const WC_E_NORECURSION: XmlError = -1072894395i32; pub const WC_E_ENTITYCONTENT: XmlError = -1072894394i32; pub const WC_E_UNDECLAREDENTITY: XmlError = -1072894393i32; pub const WC_E_PARSEDENTITY: XmlError = -1072894392i32; pub const WC_E_NOEXTERNALENTITYREF: XmlError = -1072894391i32; pub const WC_E_PI: XmlError = -1072894390i32; pub const WC_E_SYSTEMID: XmlError = -1072894389i32; pub const WC_E_QUESTIONMARK: XmlError = -1072894388i32; pub const WC_E_CDSECTEND: XmlError = -1072894387i32; pub const WC_E_MOREDATA: XmlError = -1072894386i32; pub const WC_E_DTDPROHIBITED: XmlError = -1072894385i32; pub const WC_E_INVALIDXMLSPACE: XmlError = -1072894384i32; pub const NC_E_NC: XmlError = -1072894368i32; pub const NC_E_QNAMECHARACTER: XmlError = -1072894367i32; pub const NC_E_QNAMECOLON: XmlError = -1072894366i32; pub const NC_E_NAMECOLON: XmlError = -1072894365i32; pub const NC_E_DECLAREDPREFIX: XmlError = -1072894364i32; pub const NC_E_UNDECLAREDPREFIX: XmlError = -1072894363i32; pub const NC_E_EMPTYURI: XmlError = -1072894362i32; pub const NC_E_XMLPREFIXRESERVED: XmlError = -1072894361i32; pub const NC_E_XMLNSPREFIXRESERVED: XmlError = -1072894360i32; pub const NC_E_XMLURIRESERVED: XmlError = -1072894359i32; pub const NC_E_XMLNSURIRESERVED: XmlError = -1072894358i32; pub const SC_E_SC: XmlError = -1072894336i32; pub const SC_E_MAXELEMENTDEPTH: XmlError = -1072894335i32; pub const SC_E_MAXENTITYEXPANSION: XmlError = -1072894334i32; pub const WR_E_WR: XmlError = -1072894208i32; pub const WR_E_NONWHITESPACE: XmlError = -1072894207i32; pub const WR_E_NSPREFIXDECLARED: XmlError = -1072894206i32; pub const WR_E_NSPREFIXWITHEMPTYNSURI: XmlError = -1072894205i32; pub const WR_E_DUPLICATEATTRIBUTE: XmlError = -1072894204i32; pub const WR_E_XMLNSPREFIXDECLARATION: XmlError = -1072894203i32; pub const WR_E_XMLPREFIXDECLARATION: XmlError = -1072894202i32; pub const WR_E_XMLURIDECLARATION: XmlError = -1072894201i32; pub const WR_E_XMLNSURIDECLARATION: XmlError = -1072894200i32; pub const WR_E_NAMESPACEUNDECLARED: XmlError = -1072894199i32; pub const WR_E_INVALIDXMLSPACE: XmlError = -1072894198i32; pub const WR_E_INVALIDACTION: XmlError = -1072894197i32; pub const WR_E_INVALIDSURROGATEPAIR: XmlError = -1072894196i32; pub const XML_E_INVALID_DECIMAL: XmlError = -1072898019i32; pub const XML_E_INVALID_HEXIDECIMAL: XmlError = -1072898018i32; pub const XML_E_INVALID_UNICODE: XmlError = -1072898017i32; pub const XML_E_INVALIDENCODING: XmlError = -1072897938i32; pub type XmlNodeType = i32; pub const XmlNodeType_None: XmlNodeType = 0i32; pub const XmlNodeType_Element: XmlNodeType = 1i32; pub const XmlNodeType_Attribute: XmlNodeType = 2i32; pub const XmlNodeType_Text: XmlNodeType = 3i32; pub const XmlNodeType_CDATA: XmlNodeType = 4i32; pub const XmlNodeType_ProcessingInstruction: XmlNodeType = 7i32; pub const XmlNodeType_Comment: XmlNodeType = 8i32; pub const XmlNodeType_DocumentType: XmlNodeType = 10i32; pub const XmlNodeType_Whitespace: XmlNodeType = 13i32; pub const XmlNodeType_EndElement: XmlNodeType = 15i32; pub const XmlNodeType_XmlDeclaration: XmlNodeType = 17i32; pub const _XmlNodeType_Last: XmlNodeType = 17i32; pub type XmlReadState = i32; pub const XmlReadState_Initial: XmlReadState = 0i32; pub const XmlReadState_Interactive: XmlReadState = 1i32; pub const XmlReadState_Error: XmlReadState = 2i32; pub const XmlReadState_EndOfFile: XmlReadState = 3i32; pub const XmlReadState_Closed: XmlReadState = 4i32; pub type XmlReaderProperty = i32; pub const XmlReaderProperty_MultiLanguage: XmlReaderProperty = 0i32; pub const XmlReaderProperty_ConformanceLevel: XmlReaderProperty = 1i32; pub const XmlReaderProperty_RandomAccess: XmlReaderProperty = 2i32; pub const XmlReaderProperty_XmlResolver: XmlReaderProperty = 3i32; pub const XmlReaderProperty_DtdProcessing: XmlReaderProperty = 4i32; pub const XmlReaderProperty_ReadState: XmlReaderProperty = 5i32; pub const XmlReaderProperty_MaxElementDepth: XmlReaderProperty = 6i32; pub const XmlReaderProperty_MaxEntityExpansion: XmlReaderProperty = 7i32; pub const _XmlReaderProperty_Last: XmlReaderProperty = 7i32; pub type XmlStandalone = i32; pub const XmlStandalone_Omit: XmlStandalone = 0i32; pub const XmlStandalone_Yes: XmlStandalone = 1i32; pub const XmlStandalone_No: XmlStandalone = 2i32; pub const _XmlStandalone_Last: XmlStandalone = 2i32; pub type XmlWriterProperty = i32; pub const XmlWriterProperty_MultiLanguage: XmlWriterProperty = 0i32; pub const XmlWriterProperty_Indent: XmlWriterProperty = 1i32; pub const XmlWriterProperty_ByteOrderMark: XmlWriterProperty = 2i32; pub const XmlWriterProperty_OmitXmlDeclaration: XmlWriterProperty = 3i32; pub const XmlWriterProperty_ConformanceLevel: XmlWriterProperty = 4i32; pub const XmlWriterProperty_CompactEmptyElement: XmlWriterProperty = 5i32; pub const _XmlWriterProperty_Last: XmlWriterProperty = 5i32; pub const _IID_IXmlReader: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1920597121, data2: 28829, data3: 16533, data4: [182, 61, 105, 254, 75, 13, 144, 48] }; pub const _IID_IXmlResolver: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1920597122, data2: 28829, data3: 16533, data4: [182, 61, 105, 254, 75, 13, 144, 48] }; pub const _IID_IXmlWriter: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 1920597128, data2: 28829, data3: 16533, data4: [182, 61, 105, 254, 75, 13, 144, 48] };
use std::fmt; #[derive(Debug)] pub struct AppError { pub kind: String, pub message: String, } impl fmt::Display for AppError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "AppError {{ kind: {}, message: {} }}", self.kind, self.message ) } } // Implement std::convert::From for AppError; from io::Error // impl From<io::Error> for AppError { // fn from(error: io::Error) -> Self { // AppError { // kind: String::from("io"), // message: error.to_string(), // } // } // }
mod my { // Публичная структура с публичным полем обобщённого типа `T` pub struct WhiteBox<T> { pub contents: T, } // Публичная структура с приватным полем обобщённого типа `T` #[allow(dead_code)] pub struct BlackBox<T> { contents: T, } impl<T> BlackBox<T> { // Публичный конструктор pub fn new(contents: T) -> BlackBox<T> { BlackBox { contents: contents, } } } } fn main() { // Публичная структура с публичным полем может быть создана, как обычно let white_box = my::WhiteBox { contents: "публичную информацию" }; // а их поля доступны всем. println!("Белая упаковка хранит: {}", white_box.contents); // Публичные структуры с приватными полями не могут быть созданы, используя имя полей // Ошибка! `BlackBox` имеет приватные поля //let black_box = my::BlackBox { contents: "классифицированную информацию" }; // ЗАДАНИЕ ^ Попробуйте раскомментировать эту строку // Однако, структуры с приватными полями могут быть созданы с помощью // публичного конструктора let _black_box = my::BlackBox::new("классифицированную информацию"); // нельзя получить доступ к приватным полям публичных структур. // Ошибка! Поле `contents` приватное //println!("Чёрная упаковка хранит: {}", _black_box.contents); // ЗАДАНИЕ ^ Попробуйте раскомментировать эту строку }
use prelude::*; //use rand::prelude::{Rng, ThreadRng}; //use std::collections::HashMap; //use std::rc::Rc; //use std::cell::RefCell; use super::click_ana::ClickAnaState; use super::keyed_state::*; use super::single_state::{Leaf}; //use super::mk_key::{ MakeKey, key_type_from_row }; #[derive(Debug,Clone)] enum Memoization<T> { Empty, Store(Row), Computing { computer: T, // Because, for the time being, I only expect to use this for grouping UDF's // this should only ever contain a single row. memoization: Option<Row>, } } impl<T> Memoization<T> { fn value<'a>(&'a self) -> &'a Row { self.value_may().unwrap() } fn value_may<'a>(&'a self) -> Option<&'a Row> { match self { Memoization::Empty => Option::None, Memoization::Store(r) => Option::Some( &r ), Memoization::Computing { memoization, .. } => memoization.as_ref(), } } fn computer_may_mut<'a>(&'a mut self) -> Option<&'a mut T> { match self { Memoization::Computing { computer , .. } => Option::Some(computer), _ => Option::None, } } fn iter<'a>(&'a self) -> impl Iterator<Item=&'a Row> { match self { Memoization::Empty => Option::None.into_iter(), Memoization::Store(r) => Option::Some(r).into_iter(), Memoization::Computing { memoization, .. } => memoization.as_ref().into_iter(), } } fn replace_row(&mut self, item: Row) -> Option<Row> { match self { Memoization::Empty => { *self = Memoization::Store(item); Option::None } Memoization::Store(ref mut e) => { let mut o = Option::Some(item); std::mem::swap(e, o.as_mut().unwrap()); o } Memoization::Computing {ref mut memoization, .. } => { let mut o = Option::Some(item); std::mem::swap(&mut o, memoization); o } } } fn into_row(self) -> Option<Row> { match self { Memoization::Empty => Option::None, Memoization::Store(e) => Option::Some(e), Memoization::Computing { memoization, .. } => memoization, } } fn drop_row(&mut self) -> Option<Row> { match self { Memoization::Computing { ref mut memoization, .. } => { let mut o = Option::None; std::mem::swap(&mut o, memoization); o } _ => { let mut new = Memoization::Empty; std::mem::swap(self, &mut new); new.into_row() } } } } impl<T> Default for Memoization<T> { fn default() -> Self { Memoization::Empty } } use std::ops::Index; impl<T> Index<usize> for Memoization<T> { type Output = DataType; fn index(&self, index: usize) -> &DataType { self.value().index(index) } } #[derive(Debug)] pub(crate) struct MemoElem<T>(Memoization<T>); impl<T> MemoElem<T> { pub fn value<'a>(&'a self) -> &'a Row { self.0.value() } pub fn singleton_row(r:Row) -> MemoElem<T> { MemoElem(Memoization::Store(r)) } pub fn value_may<'a>(&'a self) -> Option<&'a Row> { self.0.value_may() } pub fn get_or_init_compute_mut<'a>(&'a mut self) -> &'a mut T where T: Default { match self.0 { Memoization::Computing { ref mut computer, .. } => computer, _ => { let new = Memoization::Computing { computer: Default::default(), memoization: Option::None, }; let old = std::mem::replace(&mut self.0, new); if let Memoization::Computing { ref mut computer , ref mut memoization } = self.0 { std::mem::replace(memoization, old.into_row()); computer } else { unreachable!() } } } } } impl<T> Default for MemoElem<T> { fn default() -> Self { MemoElem(Memoization::default()) } } impl <T:SizeOf> SizeOf for MemoElem<T> { fn size_of(&self) -> u64 { self.0.size_of() } fn deep_size_of(&self) -> u64 { self.0.deep_size_of() } } impl<T> Leaf for MemoElem<T> { fn push(&mut self, item: Row) { let i = self.0.replace_row(item); debug_assert!(i.is_none()); } fn remove(&mut self, row: &[DataType]) -> Option<Row> { if self.0.value_may().map_or(false, |i| &i[..] == row) { self.0.drop_row() } else { Option::None } } fn row_slice(&self) -> &[Row] { match self.0 { Memoization::Store(ref e) | Memoization::Computing { memoization: Option::Some( ref e) , .. } => std::slice::from_ref(e), _ => &[] } } } // Same assumptions as with `Row` apply here unsafe impl<T> Send for MemoElem<T> {} unsafe impl<T> Sync for MemoElem<T> {} impl<T: SizeOf> SizeOf for Memoization<T> { fn size_of(&self) -> u64 { unimplemented!() } fn deep_size_of(&self) -> u64 { unimplemented!() } } impl<T: SizeOf> DeallocSize for MemoElem<T> { fn dealloc_size(&self) -> u64 { match self.0 { Memoization::Empty => 0, Memoization::Store(ref r) => r.dealloc_size(), Memoization::Computing { ref computer, ref memoization } => computer.deep_size_of() + memoization.dealloc_size() } } } pub(crate) struct SpecialStateWrapper<T>(pub MemoryState<T>); impl<T> SpecialStateWrapper<T> { pub fn new() -> Self { SpecialStateWrapper(MemoryState::default()) } } impl <T:SizeOf> SizeOf for SpecialStateWrapper<T> { fn size_of(&self) -> u64 { self.0.size_of() } fn deep_size_of(&self) -> u64 { self.0.deep_size_of() } } impl State for super::click_ana::ClickAnaState { fn add_key(&mut self, columns: &[usize], partial: Option<Vec<Tag>>) { self.0.add_key(columns, partial) } fn is_useful(&self) -> bool { self.0.is_useful() } fn is_partial(&self) -> bool { self.0.is_partial() } fn process_records(&mut self, records: &mut Records, partial_tag: Option<Tag>) { self.0.process_records(records, partial_tag) } fn mark_hole(&mut self, key: &[DataType], tag: Tag) { self.0.mark_hole(key, tag) } fn mark_filled(&mut self, key: Vec<DataType>, tag: Tag) { self.0.mark_filled(key, tag) } fn lookup<'a>(&'a self, columns: &[usize], key: &KeyType) -> LookupResult<'a> { self.0.lookup(columns, key) } fn rows(&self) -> usize { self.0.rows() } fn keys(&self) -> Vec<Vec<usize>> { self.0.keys() } fn cloned_records(&self) -> Vec<Vec<DataType>> { self.0.cloned_records() } fn evict_random_keys(&mut self, count: usize) -> (&[usize], Vec<Vec<DataType>>, u64) { self.0.evict_random_keys(count) } fn evict_keys(&mut self, tag: Tag, keys: &[Vec<DataType>]) -> Option<(&[usize], u64)> { self.0.evict_keys(tag, keys) } fn clear(&mut self) { self.0.clear() } fn as_click_ana_state<'a>(&'a mut self) -> Option<&'a mut ClickAnaState> { Option::Some(self) } } // struct StateElement<T> { // state: KeyedState<MemoElem<T>>, // key: Vec<usize>, // partial: bool, // } // macro_rules! insert_row_match_impl { // ($self:ident, $r:ident, $map:ident) => {{ // let key = MakeKey::from_row(&$self.key, &$r); // match $map.entry(key) { // Entry::Occupied(mut rs) => { // let res = rs.get_mut().0.replace_row($r); // debug_assert!(res.is_none(), "Key already exists"); // } // Entry::Vacant(..) if $self.partial => return false, // rs @ Entry::Vacant(..) => { // let res = rs.or_default().0.replace_row($r); // debug_assert!(res.is_none(), "How could this happen?"); // } // } // }}; // } // use std::hash::{BuildHasher, Hash}; // impl<T> StateElement<T> { // fn new(columns: &[usize], partial: bool) -> Self { // Self { // key: Vec::from(columns), // state: columns.into(), // partial, // } // } // fn rows(&self) -> usize { // unimplemented!() // } // fn key<'a>(&'a self) -> &'a [usize] { // &self.key // } // fn partial(&self) -> bool { // self.partial // } // fn values<'a>(&'a self) -> Box<dyn Iterator<Item = &'a MemoElem<T>> + 'a> { // fn val_helper<'a, K: Eq + Hash, V, H: BuildHasher>( // map: &'a rahashmap::HashMap<K, V, H>, // ) -> Box<dyn Iterator<Item = &'a V> + 'a> { // Box::new(map.values()) // } // match self.state { // KeyedState::Single(ref map) => val_helper(map), // KeyedState::Double(ref map) => val_helper(map), // KeyedState::Tri(ref map) => val_helper(map), // KeyedState::Quad(ref map) => val_helper(map), // KeyedState::Quin(ref map) => val_helper(map), // KeyedState::Sex(ref map) => val_helper(map), // } // } // fn insert(&mut self, e: Row) -> bool { // use rahashmap::Entry; // match self.state { // KeyedState::Single(ref mut map) => { // // treat this specially to avoid the extra Vec // debug_assert_eq!(self.key.len(), 1); // // i *wish* we could use the entry API here, but it would mean an extra clone // // in the common case of an entry already existing for the given key... // if let Some(ref mut rs) = map.get_mut(&e[self.key[0]]) { // let res = rs.0.replace_row(e); // debug_assert!(res.is_none(), "Can this happen?"); // } else if self.partial { // // trying to insert a record into partial materialization hole! // return false; // } else { // map.insert(e.0[self.key[0]].clone(), MemoElem::singleton_row(e)); // } // } // KeyedState::Double(ref mut map) => insert_row_match_impl!(self, e, map), // KeyedState::Tri(ref mut map) => insert_row_match_impl!(self, e, map), // KeyedState::Quad(ref mut map) => insert_row_match_impl!(self, e, map), // KeyedState::Quin(ref mut map) => insert_row_match_impl!(self, e, map), // KeyedState::Sex(ref mut map) => insert_row_match_impl!(self, e, map), // } // true // } // fn clear(&mut self) { // self.state.clear() // } // fn is_contained(&self, key: &[DataType]) -> bool { // self.values() // .any(|v| v.0.value_may().map_or(false, |d| **d == key)) // } // fn mark_hole(&mut self, key: &[DataType]) -> u64 // where // T: SizeOf, // { // self.state.mark_hole(key) // } // fn mark_filled(&mut self, key: Vec<DataType>) { // self.state.mark_filled(key, MemoElem::default()) // } // pub(super) fn lookup<'a>(&'a self, key: &KeyType) -> LookupResult<'a> // where // T: std::fmt::Debug, // { // if let Some(rs) = self.state.lookup(key) { // LookupResult::Some(RecordResult::Borrowed(std::slice::from_ref( // rs.value(), // ))) // } else if self.partial() { // // partially materialized, so this is a hole (empty results would be vec![]) // LookupResult::Missing // } else { // LookupResult::Some(RecordResult::Owned(vec![])) // } // } // fn lookup_row<'a>(&'a self, row: &[DataType]) -> Option<&'a MemoElem<T>> // where // T: std::fmt::Debug // { // self.state.lookup(&key_type_from_row(&self.key, row)) // } // fn lookup_computer_mut<'a>(&'a mut self, key: &KeyType) -> Option<Option<&'a mut T>> // where // T: std::fmt::Debug // { // self.state.lookup_mut(key).map(|me| me.0.computer_may_mut()) // } // fn evict_keys(&mut self, keys: &[Vec<DataType>]) -> u64 // where // T: SizeOf, // { // keys.iter().map(|k| self.state.evict(k)).sum() // } // fn evict_random_keys(&mut self, count: usize, rng: &mut ThreadRng) -> (u64, Vec<Vec<DataType>>) // where // T: SizeOf, // { // self.state.evict_random_keys(count, rng) // } // } // pub struct MemoizedComputableState<T> { // state: Vec<StateElement<T>>, // by_tag: HashMap<Tag, usize>, // mem_size: u64, // } // impl <T> MemoizedComputableState<T> { // pub fn new() -> Self { // MemoizedComputableState { // state: Vec::new(), // by_tag: HashMap::default(), // mem_size: 0, // } // } // fn lookup_computer_mut<'a>(&'a mut self, columns: &[usize], key: &KeyType) -> Option<Option<&'a mut T>> // where // T: std::fmt::Debug // { // let index = self // .state_for(columns) // .expect("lookup on non-indexed column set"); // self.state[index].lookup_computer_mut(key) // } // fn state_for(&self, cols: &[usize]) -> Option<usize> { // self.state.iter().position(|s| s.key() == cols) // } // } // impl<T: SizeOf> SizeOf for MemoizedComputableState<T> { // fn size_of(&self) -> u64 { // std::mem::size_of::<Self>() as u64 // } // fn deep_size_of(&self) -> u64 { // self.mem_size // } // } // impl super::State for ClickAnaState { // fn add_key(&mut self, columns: &[usize], partial: Option<Vec<Tag>>) { // let (i, exists) = if let Some(i) = self.state_for(columns) { // // already keyed by this key; just adding tags // (i, true) // } else { // // will eventually be assigned // (self.state.len(), false) // }; // if let Some(ref p) = partial { // for &tag in p { // self.by_tag.insert(tag, i); // } // } // if exists { // return; // } // self.state // .push(StateElement::new(columns, partial.is_some())); // if !self.state.is_empty() && partial.is_none() { // // we need to *construct* the index! // let (new, old) = self.state.split_last_mut().unwrap(); // if !old.is_empty() { // assert!(!old[0].partial()); // for r in old[0].values().map(MemoElem::value_may).flat_map(Option::into_iter) { // new.insert(( *r ).clone()); // } // } // } // } // fn is_useful(&self) -> bool { // !self.state.is_empty() // } // fn is_partial(&self) -> bool { // self.state.iter().any(StateElement::partial) // } // /// Because in this state type I'm mostly circumventing the standard state // /// interface this function doesn't actually insert records but only ensures // /// that the processing node the processing in the node was proper. // fn process_records(&mut self, records: &mut Records, partial_tag: Option<Tag>) { // if self.is_partial() { // records.retain(|r| { // match *r { // Record::Positive(ref r) => self // .record_save_state(r) // .expect("There should have been a key for this!"), // Record::Negative(ref r) => { // // I'm panicking here, but it may be okay for the key to // // be missing, not sure. // self.record_save_state(r) // .expect("I think this key might have to exist.") // } // } // }); // } else { // for r in records.iter() { // // This should have been handled by the operator, so I only do // // some checking that the values line up // match *r { // Record::Positive(ref r) => debug_assert!(self.is_contained(r)), // Record::Negative(ref r) => debug_assert!(!self.is_contained(r)), // } // } // } // } // fn rows(&self) -> usize { // self.state.iter().map(StateElement::rows).sum() // } // fn mark_filled(&mut self, key: Vec<DataType>, tag: Tag) { // debug_assert!(!self.state.is_empty(), "filling uninitialized index"); // let index = self.by_tag[&tag]; // self.state[index].mark_filled(key); // } // fn mark_hole(&mut self, key: &[DataType], tag: Tag) { // debug_assert!(!self.state.is_empty(), "filling uninitialized index"); // let index = self.by_tag[&tag]; // let freed_bytes = self.state[index].mark_hole(key); // self.mem_size = self.mem_size.checked_sub(freed_bytes).unwrap(); // } // fn lookup<'a>(&'a self, columns: &[usize], key: &KeyType) -> LookupResult<'a> { // let index = self // .state_for(columns) // .expect("lookup on non-indexed column set"); // self.state[index].lookup(key) // } // fn keys(&self) -> Vec<Vec<usize>> { // self.state.iter().map(|s| s.key().to_vec()).collect() // } // fn cloned_records(&self) -> Vec<Vec<DataType>> { // assert!(!self.state[0].partial()); // self.state[0] // .values() // .map(|v| (**v.value()).clone()) // .collect() // } // fn evict_random_keys(&mut self, count: usize) -> (&[usize], Vec<Vec<DataType>>, u64) { // let mut rng = rand::thread_rng(); // let index = rng.gen_range(0, self.state.len()); // let (bytes_freed, keys) = self.state[index].evict_random_keys(count, &mut rng); // self.mem_size = self.mem_size.saturating_sub(bytes_freed); // (self.state[index].key(), keys, bytes_freed) // } // fn evict_keys(&mut self, tag: Tag, keys: &[Vec<DataType>]) -> Option<(&[usize], u64)> { // self.by_tag.get(&tag).cloned().map(move |index| { // let bytes = self.state[index].evict_keys(keys); // self.mem_size = self.mem_size.saturating_sub(bytes); // (self.state[index].key(), bytes) // }) // } // fn clear(&mut self) { // for state in &mut self.state { // state.clear(); // } // self.mem_size = 0; // } // fn as_click_ana_state<'a>(&'a mut self) -> Option<&'a mut ClickAnaState> { // Option::Some(self) // } // } // impl ClickAnaState { // fn is_contained(&self, row: &[DataType]) -> bool { // self.state.iter().any(|s| s.is_contained(row)) // } // /// Return how the provided row is stored. If the result is `Some` // /// there was a key matching the record found in the states. If additionally // /// the boolean is `true` there was also a value for that key found. // fn record_save_state(&self, r: &[DataType]) -> Option<bool> { // let mut material = false; // let mut exists = false; // for s in self.state.iter() { // s.lookup_row(r).map(|o| { // exists = true; // o.value_may().map(|_| { // material = true // }) // }); // } // if exists { // Option::Some(material) // } else { // Option::None // } // } // }
// Copyright (c) 2016, <daggerbot@gmail.com> // This software is available under the terms of the zlib license. // See COPYING.md for more information. use std::cmp; use std::convert::{TryFrom, TryInto}; use std::ops::{Add, Sub}; use vec::Vec2; /// Rectangle structure. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub struct Rect<T>(pub Vec2<T>, pub Vec2<T>); impl<T> Rect<T> { pub fn height (&self) -> T where for<'l, 'r> &'l T: Sub<&'r T, Output = T> { &self.1.y - &self.0.y } pub fn intersect (self, other: Rect<T>) -> Rect<T> where T: Ord { Rect(Vec2::new(cmp::max(self.0.x, other.0.x), cmp::max(self.0.y, other.0.y)), Vec2::new(cmp::min(self.1.x, other.1.x), cmp::min(self.1.y, other.1.y))) } pub fn is_good (&self) -> bool where T: Ord { self.1.x > self.0.x && self.1.y > self.0.y } pub fn size (&self) -> Vec2<T> where for<'l, 'r> &'l T: Sub<&'r T, Output = T> { Vec2 { x: self.width(), y: self.height() } } pub fn width (&self) -> T where for<'l, 'r> &'l T: Sub<&'r T, Output = T> { &self.1.x - &self.0.x } pub fn xywh (x: T, y: T, w: T, h: T) -> Rect<T> where for<'a> &'a T: Add<T, Output = T> { let x1 = &x + w; let y1 = &y + h; Rect(Vec2::new(x, y), Vec2::new(x1, y1)) } } // Conversion operators. impl<T, F: TryInto<T>> TryFrom<Rect<F>> for Rect<T> { type Err = F::Err; fn try_from (other: Rect<F>) -> Result<Rect<T>, F::Err> { Ok(Rect(try!(other.0.try_into()), try!(other.1.try_into()))) } } // Translation operators. impl<T: Copy + Add<Output = T>> Add<Vec2<T>> for Rect<T> { type Output = Rect<T>; fn add (self, rhs: Vec2<T>) -> Rect<T> { Rect(self.0 + rhs, self.1 + rhs) } } impl<'a, T: Add<&'a T, Output = T>> Add<&'a Vec2<T>> for Rect<T> { type Output = Rect<T>; fn add (self, rhs: &'a Vec2<T>) -> Rect<T> { Rect(self.0 + rhs, self.1 + rhs) } } impl<'l, 'r, T> Add<&'r Vec2<T>> for &'l Rect<T> where &'l T: Add<&'r T, Output = T> { type Output = Rect<T>; fn add (self, rhs: &'r Vec2<T>) -> Rect<T> { Rect(&self.0 + rhs, &self.1 + rhs) } } impl<T: Copy + Sub<Output = T>> Sub<Vec2<T>> for Rect<T> { type Output = Rect<T>; fn sub (self, rhs: Vec2<T>) -> Rect<T> { Rect(self.0 - rhs, self.1 - rhs) } } impl<'a, T: Sub<&'a T, Output = T>> Sub<&'a Vec2<T>> for Rect<T> { type Output = Rect<T>; fn sub (self, rhs: &'a Vec2<T>) -> Rect<T> { Rect(self.0 - rhs, self.1 - rhs) } } impl<'l, 'r, T> Sub<&'r Vec2<T>> for &'l Rect<T> where &'l T: Sub<&'r T, Output = T> { type Output = Rect<T>; fn sub (self, rhs: &'r Vec2<T>) -> Rect<T> { Rect(&self.0 - rhs, &self.1 - rhs) } }
extern crate futures; use std::sync::mpsc::channel; use futures::future::ok; use futures::prelude::*; #[test] fn lots() { fn doit(n: usize) -> Box<Future<Item=(), Error=()> + Send> { if n == 0 { Box::new(ok(())) } else { Box::new(ok(n - 1).and_then(doit)) } } let (tx, rx) = channel(); ::std::thread::spawn(|| { doit(1_000).map(move |_| tx.send(()).unwrap()).wait() }); rx.recv().unwrap(); }
fn main() { let mut v: Vec<i32> = Vec::new(); v.push(1); v.push(2); v.push(3); let x = vec![1, 2, 3]; let z = match x.get(2) { Some(xz) => *xz, None => 0, }; println!("{}", z); println!("{}", v[1]); let mut nnn = vec![1, 2, 3]; //直接奔溃 //let nnn1 = &nnn[100]; //如果越界返回None let nnn2 = nnn.get(100); //遍历值 for i in &nnn { println!("======>{}", i); } //遍历并修改值 for i in &mut nnn { *i += 60; } for i in &nnn { println!("======>{}", i); } }
#[cfg(test)] extern crate quickcheck; #[cfg(test)] #[macro_use(quickcheck)] extern crate quickcheck_macros; mod error; mod ket; pub mod gate; pub mod quantum_computer; pub mod registers;
// Use external crate for WASM integration extern crate wasm_bindgen; // Import from the prelude directory use wasm_bindgen::prelude::*; // Use the WASM package to be able to call the JavaScript alert method from rust #[wasm_bindgen] extern { fn alert(s: &str); } // Setup the button click method to use the JavaScript alert to display a message to the user #[wasm_bindgen] pub fn rust_alert() { alert("Hello World from Rust!"); }
//! Implements the model for headers, as specified in the //! [STOMP Protocol Specification,Version 1.2](https://stomp.github.io/stomp-specification-1.2.html). #[macro_use] mod macros; use crate::common::functions::decode_str; use crate::error::StompParseError; use either::Either; use paste::paste; use std::convert::TryFrom; use std::str::FromStr; /// A Header that reveals it's type and it's value, and can be displayed pub trait HeaderValue: std::fmt::Display { type OwnedValue; type Value; const OWNED: bool; fn header_name(&self) -> &str; } pub trait DecodableValue { fn decoded_value(&self) -> Result<Either<&str, String>, StompParseError>; } #[derive(Eq, PartialEq, Debug, Clone)] pub struct NameValue { pub name: String, pub value: String, } impl std::fmt::Display for NameValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { write!(f, "{}:{}", &self.name, &self.value) } } fn split_once(input: &str, delim: char) -> Option<(&str, &str)> { input .find(delim) .map(|idx| (&input[0..idx], &input[(idx + 1)..input.len()])) } impl FromStr for NameValue { type Err = StompParseError; fn from_str(input: &str) -> Result<NameValue, StompParseError> { split_once(input, ':') .map(|(name, value)| NameValue { name: name.to_owned(), value: value.to_owned(), }) .ok_or_else(|| StompParseError::new(format!("Poorly formatted header: {}", input))) } } /// A pair of numbers which specify at what intervall the originator of /// the containing message will supply a heartbeat and expect a heartbeat. #[derive(Eq, PartialEq, Debug, Clone, Default)] pub struct HeartBeatIntervalls { pub supplied: u32, pub expected: u32, } impl HeartBeatIntervalls { pub fn new(supplied: u32, expected: u32) -> HeartBeatIntervalls { HeartBeatIntervalls { expected, supplied } } } impl std::fmt::Display for HeartBeatIntervalls { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { write!(f, "{},{}", &self.supplied, &self.expected) } } impl FromStr for HeartBeatIntervalls { type Err = StompParseError; /// Parses the string message as two ints representing "supplied, expected" heartbeat intervalls fn from_str(input: &str) -> Result<HeartBeatIntervalls, StompParseError> { split_once(input, ',') .ok_or_else(|| StompParseError::new(format!("Poorly formatted heartbeats: {}", input))) .and_then(|(supplied, expected)| { u32::from_str(expected) .and_then(|expected| { u32::from_str(supplied) .map(|supplied| HeartBeatIntervalls { expected, supplied }) }) .map_err(|_| { StompParseError::new(format!("Poorly formatted heartbeats: {}", input)) }) }) } } #[derive(Eq, PartialEq, Debug, Clone)] pub struct StompVersions(pub Vec<StompVersion>); impl std::fmt::Display for StompVersions { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { write!( f, "{}", self.0 .iter() .map(|version| version.to_string()) .collect::<Vec<String>>() .join(",") ) } } impl FromStr for StompVersions { type Err = StompParseError; fn from_str(input: &str) -> Result<StompVersions, StompParseError> { input .split(',') .map(|section| StompVersion::from_str(section)) .try_fold(Vec::new(), |mut vec, result| { result .map(|version| { vec.push(version); vec }) .map_err(|_| { StompParseError::new(format!("Poorly formatted accept-versions: {}", input)) }) }) .map(StompVersions) } } impl std::ops::Deref for StompVersions { type Target = Vec<StompVersion>; fn deref(&self) -> &Self::Target { &self.0 } } #[derive(Eq, PartialEq, Debug, Clone)] /// The Ack approach to be used for the subscription pub enum AckType { /// The client need not send Acks. Messages are assumed received as soon as sent. Auto, /// Client must send Ack frames. Ack frames are cummulative, acknowledging also all previous messages. Client, /// Client must send Ack frames. Ack frames are individual, acknowledging only the specified message. ClientIndividual, } impl Default for AckType { fn default() -> Self { AckType::Auto } } impl std::fmt::Display for AckType { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str(match self { AckType::Auto => "auto", AckType::Client => "client", AckType::ClientIndividual => "client-individual", }) } } impl FromStr for AckType { type Err = StompParseError; fn from_str(input: &str) -> Result<AckType, StompParseError> { match input { "auto" => Ok(AckType::Auto), "client" => Ok(AckType::Client), "client-individual" => Ok(AckType::ClientIndividual), _ => Err(StompParseError::new(format!("Unknown ack-type: {}", input))), } } } #[allow(non_camel_case_types)] #[derive(Debug, Eq, PartialEq, Clone)] /// Stomp Versions that client and server can negotiate to use pub enum StompVersion { V1_0, V1_1, V1_2, Unknown(String), } impl std::fmt::Display for StompVersion { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> { let text = match self { StompVersion::V1_0 => "1.0", StompVersion::V1_1 => "1.1", StompVersion::V1_2 => "1.2", _ => return Err(std::fmt::Error {}), }; f.write_str(text) } } impl FromStr for StompVersion { type Err = StompParseError; fn from_str(input: &str) -> Result<StompVersion, StompParseError> { match input { "1.0" => Ok(StompVersion::V1_0), "1.1" => Ok(StompVersion::V1_1), "1.2" => Ok(StompVersion::V1_2), _ => Ok(StompVersion::Unknown(input.to_owned())), } } } const EMPTY: &str = ""; headers!( (Ack, "ack", AckType, (AckType::Auto)), ( AcceptVersion, "accept-version", StompVersions, (StompVersions(Vec::new())) ), (ContentLength, "content-length", u32, 0), (ContentType, "content-type"), (Destination, "destination"), ( HeartBeat, "heart-beat", HeartBeatIntervalls, (HeartBeatIntervalls::new(0, 0)) ), (Host, "host"), (Id, "id"), (Login, "login"), (Message, "message"), (MessageId, "message-id"), (Passcode, "passcode"), (Receipt, "receipt"), (ReceiptId, "receipt-id"), (Server, "server"), (Session, "session"), (Subscription, "subscription"), (Transaction, "transaction"), (Version, "version", StompVersion, (StompVersion::V1_2)) ); #[cfg(test)] mod test { use crate::common::functions::decode_str; use crate::error::StompParseError; use crate::headers::{HeartBeatIntervalls, HeartBeatValue}; use either::Either; use std::{fmt::Display, str::FromStr}; use super::{ContentLengthValue, DecodableValue, DestinationValue, HeaderValue}; fn do_something(value: &str) { println!("Value: {}", value); } #[test] fn header_value() { let d = DestinationValue::new("Foo"); let value: &str = d.value(); do_something(value); drop(d); // println!("Value: {}", value); } #[test] fn header_value_display() { let x = ContentLengthValue::new(10); assert_eq!("content-length:10", x.to_string()) } #[test] fn heartbeat_reads_supplied_then_expected() { let hb = HeartBeatIntervalls::from_str("100,200").expect("Heartbeat parse failed"); assert_eq!(100, hb.supplied); assert_eq!(200, hb.expected); } #[test] fn heartbeat_writes_supplied_then_expected() { let hb = HeartBeatIntervalls::new(500, 300); assert_eq!("500,300", hb.to_string()); } #[test] fn heartbeat_into_intervalls() { let hb = HeartBeatValue::new(HeartBeatIntervalls::new(123, 987)); let intervalls: HeartBeatIntervalls = hb.into(); assert_eq!(123, intervalls.supplied); assert_eq!(987, intervalls.expected); } struct TestValue { value: &'static str, } impl TestValue { fn value(&self) -> &str { self.value } } impl DecodableValue for TestValue { fn decoded_value(&self) -> Result<Either<&str, String>, StompParseError> { decode_str(self.value()) } } impl Display for TestValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_fmt(format_args!("test:{}", self.value)) } } impl HeaderValue for TestValue { type OwnedValue = String; type Value = &'static str; const OWNED: bool = false; fn header_name(&self) -> &str { todo!() } } #[test] fn returns_value_if_no_escape() { let value = "Hello"; let instance = TestValue { value }; let result = instance.decoded_value(); if let Ok(Either::Left(result)) = result { assert_eq!(value.as_ptr(), result.as_ptr()); } else { panic!("Unexpected return"); } } #[test] fn transforms_escaped_slash() { let value = "Hel\\\\lo"; let instance = TestValue { value }; let result = instance.decoded_value(); if let Ok(Either::Right(result)) = result { assert_eq!("Hel\\lo", &result); } else { panic!("Unexpected return"); } } #[test] fn transforms_escaped_n() { let value = "Hell\\nno"; let instance = TestValue { value }; let result = instance.decoded_value(); if let Ok(Either::Right(result)) = result { assert_eq!("Hell\nno", &result); } else { panic!("Unexpected return"); } } #[test] fn transforms_escaped_r() { let value = "Hell\\rno"; let instance = TestValue { value }; let result = instance.decoded_value(); if let Ok(Either::Right(result)) = result { assert_eq!("Hell\rno", &result); } else { panic!("Unexpected return"); } } #[test] fn transforms_escaped_c() { let value = "Hell\\cno"; let instance = TestValue { value }; let result = instance.decoded_value(); if let Ok(Either::Right(result)) = result { assert_eq!("Hell:no", &result); } else { panic!("Unexpected return"); } } #[test] fn rejects_escaped_t() { let value = "Hell\\tno"; let instance = TestValue { value }; let result = instance.decoded_value(); if let Ok(_) = result { panic!("Unexpected return"); } } #[test] fn rejects_slash_at_end() { let value = "Hell\\"; let instance = TestValue { value }; let result = instance.decoded_value(); if let Ok(_) = result { panic!("Unexpected return"); } } }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::any::Any; use std::collections::hash_map::DefaultHasher; use std::hash::Hash; use std::hash::Hasher; use std::sync::Arc; use common_catalog::plan::PartInfo; use common_catalog::plan::PartInfoPtr; use storages_common_table_meta::meta::Location; #[derive(serde::Serialize, serde::Deserialize, PartialEq, Eq)] pub struct FuseLazyPartInfo { pub segment_location: Location, } #[typetag::serde(name = "fuse_lazy")] impl PartInfo for FuseLazyPartInfo { fn as_any(&self) -> &dyn Any { self } fn equals(&self, info: &Box<dyn PartInfo>) -> bool { match info.as_any().downcast_ref::<FuseLazyPartInfo>() { None => false, Some(other) => self == other, } } fn hash(&self) -> u64 { let mut s = DefaultHasher::new(); self.segment_location.0.hash(&mut s); s.finish() } } impl FuseLazyPartInfo { pub fn create(segment_location: Location) -> PartInfoPtr { Arc::new(Box::new(FuseLazyPartInfo { segment_location })) } }
use core::cmp; use core::ops; use core::slice; use anyhow::anyhow; use super::{BigInteger, OpaqueTerm, Term, Tuple}; /// A marker trait for index types pub trait TupleIndex: Into<usize> {} /// A marker trait for internal index types to help in specialization pub trait NonPrimitiveIndex: Sized {} macro_rules! bad_index { () => { anyhow!("invalid index: bad argument") }; } /// Represents indices which start at 1 and progress upwards #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] #[repr(transparent)] pub struct OneBasedIndex(usize); impl OneBasedIndex { #[inline] pub fn new(i: usize) -> anyhow::Result<Self> { if i > 0 { Ok(Self(i)) } else { Err(bad_index!()) } } } impl TupleIndex for OneBasedIndex {} impl NonPrimitiveIndex for OneBasedIndex {} impl Default for OneBasedIndex { fn default() -> Self { Self(1) } } impl TryFrom<&BigInteger> for OneBasedIndex { type Error = anyhow::Error; fn try_from(n: &BigInteger) -> Result<Self, Self::Error> { Self::new(n.try_into().map_err(|_| bad_index!())?) } } impl TryFrom<i64> for OneBasedIndex { type Error = anyhow::Error; fn try_from(n: i64) -> Result<Self, Self::Error> { Self::new(n.try_into()?) } } impl TryFrom<OpaqueTerm> for OneBasedIndex { type Error = anyhow::Error; fn try_from(term: OpaqueTerm) -> Result<Self, Self::Error> { let term: Term = term.into(); term.try_into() } } impl TryFrom<Term> for OneBasedIndex { type Error = anyhow::Error; fn try_from(term: Term) -> Result<Self, Self::Error> { match term { Term::Int(i) => i.try_into(), Term::BigInt(i) => i.as_ref().try_into(), _ => Err(bad_index!()), } } } impl Into<usize> for OneBasedIndex { #[inline] fn into(self) -> usize { self.0 - 1 } } impl PartialEq<usize> for OneBasedIndex { #[inline] fn eq(&self, other: &usize) -> bool { (self.0 - 1) == *other } } impl PartialEq<ZeroBasedIndex> for OneBasedIndex { #[inline] fn eq(&self, other: &ZeroBasedIndex) -> bool { other.eq(self) } } impl PartialOrd<usize> for OneBasedIndex { #[inline] fn partial_cmp(&self, other: &usize) -> Option<cmp::Ordering> { (self.0 - 1).partial_cmp(other) } } impl PartialOrd<ZeroBasedIndex> for OneBasedIndex { #[inline] fn partial_cmp(&self, other: &ZeroBasedIndex) -> Option<cmp::Ordering> { other.partial_cmp(self).map(|o| o.reverse()) } } impl ops::Add for OneBasedIndex { type Output = Self; #[inline] fn add(self, rhs: Self) -> Self::Output { Self(self.0 + rhs.0) } } impl ops::Add<usize> for OneBasedIndex { type Output = Self; #[inline] fn add(self, rhs: usize) -> Self::Output { Self(self.0 + rhs) } } /// Represents indices which start at 0 and progress upwards #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] #[repr(transparent)] pub struct ZeroBasedIndex(usize); impl ZeroBasedIndex { #[inline(always)] pub fn new(i: usize) -> Self { Self(i) } } impl TupleIndex for ZeroBasedIndex {} impl NonPrimitiveIndex for ZeroBasedIndex {} impl Default for ZeroBasedIndex { #[inline(always)] fn default() -> Self { Self(0) } } impl TryFrom<i64> for ZeroBasedIndex { type Error = anyhow::Error; fn try_from(n: i64) -> Result<Self, Self::Error> { Ok(Self::new(n.try_into()?)) } } impl TryFrom<OpaqueTerm> for ZeroBasedIndex { type Error = anyhow::Error; fn try_from(term: OpaqueTerm) -> Result<Self, Self::Error> { let term: Term = term.into(); term.try_into() } } impl TryFrom<Term> for ZeroBasedIndex { type Error = anyhow::Error; fn try_from(term: Term) -> Result<Self, Self::Error> { match term { Term::Int(i) => i.try_into().map_err(|_| bad_index!()), Term::BigInt(i) => i.as_i64().ok_or_else(|| bad_index!())?.try_into(), _ => Err(bad_index!()), } } } impl From<OneBasedIndex> for ZeroBasedIndex { #[inline] fn from(i: OneBasedIndex) -> ZeroBasedIndex { Self(i.0 - 1) } } impl From<usize> for ZeroBasedIndex { #[inline] fn from(n: usize) -> Self { Self::new(n) } } impl Into<usize> for ZeroBasedIndex { #[inline] fn into(self) -> usize { self.0 } } impl PartialEq<usize> for ZeroBasedIndex { #[inline] fn eq(&self, other: &usize) -> bool { self.0 == *other } } impl PartialEq<OneBasedIndex> for ZeroBasedIndex { fn eq(&self, other: &OneBasedIndex) -> bool { let index: ZeroBasedIndex = (*other).into(); self.0 == index.0 } } impl PartialOrd<usize> for ZeroBasedIndex { #[inline] fn partial_cmp(&self, other: &usize) -> Option<core::cmp::Ordering> { self.0.partial_cmp(other) } } impl PartialOrd<OneBasedIndex> for ZeroBasedIndex { fn partial_cmp(&self, other: &OneBasedIndex) -> Option<core::cmp::Ordering> { let index: ZeroBasedIndex = (*other).into(); self.partial_cmp(&index) } } impl ops::Add for ZeroBasedIndex { type Output = Self; #[inline] fn add(self, rhs: Self) -> Self::Output { Self(self.0 + rhs.0) } } impl ops::Add<usize> for ZeroBasedIndex { type Output = Self; #[inline] fn add(self, rhs: usize) -> Self::Output { Self(self.0 + rhs) } } // Tuple indexing impl ops::Index<ops::RangeFull> for Tuple { type Output = [OpaqueTerm]; #[inline] fn index(&self, index: ops::RangeFull) -> &Self::Output { ops::Index::index(self.as_slice(), index) } } // Specialization for indexing with usize values (assumed to be zero-based indices) impl TupleIndex for usize {} impl ops::Index<usize> for Tuple { type Output = OpaqueTerm; #[inline] fn index(&self, index: usize) -> &Self::Output { <usize as slice::SliceIndex<[OpaqueTerm]>>::index(index, self.as_slice()) } } impl ops::IndexMut<usize> for Tuple { #[inline] fn index_mut(&mut self, index: usize) -> &mut Self::Output { <usize as slice::SliceIndex<[OpaqueTerm]>>::index_mut(index, self.as_mut_slice()) } } impl ops::Index<ops::RangeTo<usize>> for Tuple { type Output = [OpaqueTerm]; #[inline] fn index(&self, index: ops::RangeTo<usize>) -> &Self::Output { <ops::RangeTo<usize> as slice::SliceIndex<[OpaqueTerm]>>::index(index, self.as_slice()) } } impl ops::Index<ops::RangeFrom<usize>> for Tuple { type Output = [OpaqueTerm]; #[inline] fn index(&self, index: ops::RangeFrom<usize>) -> &Self::Output { <ops::RangeFrom<usize> as slice::SliceIndex<[OpaqueTerm]>>::index(index, self.as_slice()) } } // Generic tuple indexing for any type that implements TupleIndex + NonPrimitiveIndex impl<I> ops::Index<I> for Tuple where I: TupleIndex + NonPrimitiveIndex, { type Output = OpaqueTerm; #[inline] fn index(&self, index: I) -> &Self::Output { let uindex: usize = index.into(); ops::Index::index(self.as_slice(), uindex) } } impl<I> ops::IndexMut<I> for Tuple where I: TupleIndex + NonPrimitiveIndex, { #[inline] fn index_mut(&mut self, index: I) -> &mut Self::Output { let uindex: usize = index.into(); ops::IndexMut::index_mut(self.as_mut_slice(), uindex) } } impl<I> ops::Index<ops::RangeTo<I>> for Tuple where I: TupleIndex + NonPrimitiveIndex, { type Output = [OpaqueTerm]; #[inline] fn index(&self, index: ops::RangeTo<I>) -> &Self::Output { let uindex: usize = index.end.into(); ops::Index::index(self.as_slice(), ops::RangeTo { end: uindex }) } } impl<I> ops::Index<ops::RangeFrom<I>> for Tuple where I: TupleIndex + NonPrimitiveIndex, { type Output = [OpaqueTerm]; #[inline] fn index(&self, index: ops::RangeFrom<I>) -> &Self::Output { let uindex: usize = index.start.into(); ops::Index::index(self.as_slice(), ops::RangeFrom { start: uindex }) } }
extern crate disjoint_sets; #[macro_use] extern crate scan_rules; use std::io; struct Program { id: usize, connections: Vec<usize>, } fn read_program() -> Result<Option<Program>, scan_rules::ScanError> { let mut line = String::new(); match io::stdin().read_line(&mut line) { Err(e) => Err(scan_rules::ScanError::io(e)), Ok(0) => Ok(None), Ok(_) => Ok(Some(scan!(&line; (let id, "<->", [ let connections: usize ],+) => Program { id, connections }, )?)), } } fn get_input() -> Vec<Program> { let mut result = Vec::new(); loop { match read_program().expect("failed to parse program") { None => break, Some(p) => result.push(p), } } result } fn make_unions(data: &[Program]) -> disjoint_sets::UnionFind { let set_size = data.iter().map(|p| p.id).max().expect("no programs") + 1; let mut union_find = disjoint_sets::UnionFind::new(set_size); for p in data { for c in &p.connections { union_find.union(p.id, *c); } } union_find } fn solve(data: &[Program]) -> usize { let union_find = make_unions(data); data.iter().filter(|p| union_find.equiv(0, p.id)).count() } fn main() { let data = get_input(); let result = solve(&data); println!("Solution: {}", result); }
//! Minecraft World support for the Coruscant engine. //! //! Documentation content is available under CC BY-NC-SA 3.0 unless otherwise noted. pub mod level_dat; pub mod entity;
use clippy_utils::diagnostics::span_lint_and_then; use rustc_data_structures::fx::FxHashMap; use rustc_hir::{ def::Res, def_id::DefId, Item, ItemKind, PolyTraitRef, PrimTy, TraitBoundModifier, Ty, TyKind, UseKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::Span; use crate::utils::conf; declare_clippy_lint! { /// ### What it does /// Denies the configured types in clippy.toml. /// /// ### Why is this bad? /// Some types are undesirable in certain contexts. /// /// ### Example: /// An example clippy.toml configuration: /// ```toml /// # clippy.toml /// disallowed-types = [ /// # Can use a string as the path of the disallowed type. /// "std::collections::BTreeMap", /// # Can also use an inline table with a `path` key. /// { path = "std::net::TcpListener" }, /// # When using an inline table, can add a `reason` for why the type /// # is disallowed. /// { path = "std::net::Ipv4Addr", reason = "no IPv4 allowed" }, /// ] /// ``` /// /// ```rust,ignore /// use std::collections::BTreeMap; /// // or its use /// let x = std::collections::BTreeMap::new(); /// ``` /// Use instead: /// ```rust,ignore /// // A similar type that is allowed by the config /// use std::collections::HashMap; /// ``` #[clippy::version = "1.55.0"] pub DISALLOWED_TYPE, nursery, "use of a disallowed type" } #[derive(Clone, Debug)] pub struct DisallowedType { conf_disallowed: Vec<conf::DisallowedType>, def_ids: FxHashMap<DefId, Option<String>>, prim_tys: FxHashMap<PrimTy, Option<String>>, } impl DisallowedType { pub fn new(conf_disallowed: Vec<conf::DisallowedType>) -> Self { Self { conf_disallowed, def_ids: FxHashMap::default(), prim_tys: FxHashMap::default(), } } fn check_res_emit(&self, cx: &LateContext<'_>, res: &Res, span: Span) { match res { Res::Def(_, did) => { if let Some(reason) = self.def_ids.get(did) { emit(cx, &cx.tcx.def_path_str(*did), span, reason.as_deref()); } }, Res::PrimTy(prim) => { if let Some(reason) = self.prim_tys.get(prim) { emit(cx, prim.name_str(), span, reason.as_deref()); } }, _ => {}, } } } impl_lint_pass!(DisallowedType => [DISALLOWED_TYPE]); impl<'tcx> LateLintPass<'tcx> for DisallowedType { fn check_crate(&mut self, cx: &LateContext<'_>) { for conf in &self.conf_disallowed { let (path, reason) = match conf { conf::DisallowedType::Simple(path) => (path, None), conf::DisallowedType::WithReason { path, reason } => ( path, reason.as_ref().map(|reason| format!("{} (from clippy.toml)", reason)), ), }; let segs: Vec<_> = path.split("::").collect(); match clippy_utils::path_to_res(cx, &segs) { Res::Def(_, id) => { self.def_ids.insert(id, reason); }, Res::PrimTy(ty) => { self.prim_tys.insert(ty, reason); }, _ => {}, } } } fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { if let ItemKind::Use(path, UseKind::Single) = &item.kind { self.check_res_emit(cx, &path.res, item.span); } } fn check_ty(&mut self, cx: &LateContext<'tcx>, ty: &'tcx Ty<'tcx>) { if let TyKind::Path(path) = &ty.kind { self.check_res_emit(cx, &cx.qpath_res(path, ty.hir_id), ty.span); } } fn check_poly_trait_ref(&mut self, cx: &LateContext<'tcx>, poly: &'tcx PolyTraitRef<'tcx>, _: TraitBoundModifier) { self.check_res_emit(cx, &poly.trait_ref.path.res, poly.trait_ref.path.span); } } fn emit(cx: &LateContext<'_>, name: &str, span: Span, reason: Option<&str>) { span_lint_and_then( cx, DISALLOWED_TYPE, span, &format!("`{}` is not allowed according to config", name), |diag| { if let Some(reason) = reason { diag.note(reason); } }, ); }
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::convert::TryFrom; use std::sync::Arc; use async_trait::async_trait; use common_arrow::arrow::datatypes::Schema; use common_arrow::arrow::io::ipc::write::common::IpcWriteOptions; use common_arrow::arrow_flight::utils::flight_data_from_arrow_schema; use common_arrow::arrow_flight::FlightData; use common_exception::ErrorCode; use common_exception::Result; use common_meta_api::MetaApi; use common_meta_raft_store::state_machine::AppliedState; use common_meta_types::Cmd; use common_meta_types::CreateDatabaseReply; use common_meta_types::CreateTableReply; use common_meta_types::DatabaseInfo; use common_meta_types::MetaId; use common_meta_types::MetaVersion; use common_meta_types::Table; use common_meta_types::TableInfo; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DropDatabasePlan; use common_planners::DropTablePlan; use common_tracing::tracing; use crate::MetaEmbedded; #[async_trait] impl MetaApi for MetaEmbedded { async fn create_database(&self, plan: CreateDatabasePlan) -> Result<CreateDatabaseReply> { let cmd = Cmd::CreateDatabase { name: plan.db.clone(), db: DatabaseInfo { database_id: 0, db: plan.db.clone(), }, }; let mut sm = self.inner.lock().await; let res = sm.apply_cmd(&cmd).await?; let (prev, result) = match res { AppliedState::DataBase { prev, result } => (prev, result), _ => return Err(ErrorCode::MetaNodeInternalError("not a Database result")), }; assert!(result.is_some()); if prev.is_some() && !plan.if_not_exists { return Err(ErrorCode::DatabaseAlreadyExists(format!( "{} database exists", plan.db ))); } Ok(CreateDatabaseReply { database_id: result.unwrap().1.value.database_id, }) } async fn drop_database(&self, plan: DropDatabasePlan) -> Result<()> { let cmd = Cmd::DropDatabase { name: plan.db.clone(), }; let mut sm = self.inner.lock().await; let res = sm.apply_cmd(&cmd).await?; assert!(res.result().is_none()); if res.prev().is_none() && !plan.if_exists { return Err(ErrorCode::UnknownDatabase(format!( "database not found: {:}", plan.db ))); } Ok(()) } async fn get_database(&self, db: &str) -> Result<Arc<DatabaseInfo>> { let sm = self.inner.lock().await; let res = sm .get_database(db)? .ok_or_else(|| ErrorCode::UnknownDatabase(db.to_string()))?; Ok(Arc::new(res.1.value)) } async fn get_databases(&self) -> Result<Vec<Arc<DatabaseInfo>>> { let sm = self.inner.lock().await; let res = sm.get_databases()?; Ok(res .iter() .map(|(_name, db)| Arc::new(db.clone())) .collect::<Vec<_>>()) } async fn create_table(&self, plan: CreateTablePlan) -> Result<CreateTableReply> { let db_name = &plan.db; let table_name = &plan.table; let if_not_exists = plan.if_not_exists; tracing::info!("create table: {:}: {:?}", &db_name, &table_name); let options = IpcWriteOptions::default(); let flight_data = flight_data_from_arrow_schema(&plan.schema.to_arrow(), &options); let table = Table { table_id: 0, table_name: table_name.to_string(), database_id: 0, // this field is unused during the creation of table db_name: db_name.to_string(), schema: flight_data.data_header, table_engine: plan.engine.clone(), table_options: plan.options.clone(), parts: Default::default(), }; let cr = Cmd::CreateTable { db_name: db_name.clone(), table_name: table_name.clone(), if_not_exists, table, }; let mut sm = self.inner.lock().await; let res = sm.apply_cmd(&cr).await?; let (prev, result) = match res { AppliedState::Table { prev, result } => (prev, result), _ => { panic!("not Table result"); } }; assert!(result.is_some()); if prev.is_some() && !if_not_exists { Err(ErrorCode::TableAlreadyExists(format!( "table exists: {}", table_name ))) } else { Ok(CreateTableReply { table_id: result.unwrap().table_id, }) } } async fn drop_table(&self, plan: DropTablePlan) -> Result<()> { let db_name = &plan.db; let table_name = &plan.table; let if_exists = plan.if_exists; let cr = Cmd::DropTable { db_name: db_name.clone(), table_name: table_name.clone(), if_exists, }; let mut sm = self.inner.lock().await; let res = sm.apply_cmd(&cr).await?; assert!(res.result().is_none()); if res.prev().is_none() && !if_exists { return Err(ErrorCode::UnknownTable(format!( "table not found: {:}", table_name ))); } Ok(()) } async fn get_table(&self, db: &str, table: &str) -> Result<Arc<TableInfo>> { let sm = self.inner.lock().await; let seq_db = sm.get_database(db)?.ok_or_else(|| { ErrorCode::UnknownDatabase(format!("get table: database not found {:}", db)) })?; let dbi = seq_db.1.value; let db_id = dbi.database_id; let table_id = sm .table_lookup .get(&(db_id, table.to_string())) .ok_or_else(|| ErrorCode::UnknownTable(format!("table not found: {:}", table)))?; let table_id = *table_id; let res = sm.get_table(&table_id); let res = res.ok_or_else(|| ErrorCode::UnknownTable(table.to_string()))?; let arrow_schema = Schema::try_from(&FlightData { data_header: res.schema, ..Default::default() }) .map_err(|e| ErrorCode::IllegalSchema(format!("invalid schema: {:}", e.to_string())))?; let rst = TableInfo { database_id: db_id, table_id: res.table_id, version: 0, // placeholder, not yet implemented in meta service db: db.to_string(), name: table.to_string(), schema: Arc::new(arrow_schema.into()), engine: res.table_engine.clone(), options: res.table_options, }; Ok(Arc::new(rst)) } async fn get_tables(&self, db: &str) -> Result<Vec<Arc<TableInfo>>> { let sm = self.inner.lock().await; sm.get_tables(db) } async fn get_table_by_id( &self, table_id: MetaId, _table_version: Option<MetaVersion>, ) -> Result<Arc<TableInfo>> { let sm = self.inner.lock().await; let table = sm.get_table(&table_id).ok_or_else(|| { ErrorCode::UnknownTable(format!("table of id {} not found", table_id)) })?; let arrow_schema = Schema::try_from(&FlightData { data_header: table.schema, ..Default::default() }) .map_err(|e| ErrorCode::IllegalSchema(format!("invalid schema: {:}", e.to_string())))?; let rst = TableInfo { database_id: 0, table_id: table.table_id, version: 0, // placeholder, not yet implemented in meta service db: "".to_string(), name: "".to_string(), schema: Arc::new(arrow_schema.into()), engine: table.table_engine.clone(), options: table.table_options, }; Ok(Arc::new(rst)) } fn name(&self) -> String { "meta-embedded".to_string() } }
// This code is editable and runnable! fn main() { let greetings = ["Hello", "Hola", "こんにちは", "您好"]; for greeting in greetings.iter() { println!("{}", greeting); } }
use core::cmp::Ordering; use gc::{Gc, GcCell, Trace, Finalize, custom_trace}; use gc_derive::{Trace, Finalize}; use guvm_rs::{Value, BuiltInAsyncFunction, BuiltInSynchronousFunction, Closure}; use gc_immutable_collections::{Array, Map}; mod float; use float::PavoFloat; mod fun; use fun::{Fun, SynchronousFun, AsynchronousFun, CoreFailure, CoreFuture}; #[derive(Finalize)] pub enum V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { Nil, Bool(bool), Float(PavoFloat), Int(i64), Array(Array<Self>), Map(Map<Self, Self>), Fun(Fun<SS, SA, DS, DA, F, Fut>), } impl<SS, SA, DS, DA, F, Fut> Clone for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, { fn clone(&self) -> Self { match self { V::Nil => V::Nil, V::Bool(b) => V::Bool(b.clone()), V::Float(f) => V::Float(f.clone()), V::Int(i) => V::Int(i.clone()), V::Array(a) => V::Array(a.clone()), V::Map(m) => V::Map(m.clone()), V::Fun(f) => V::Fun(f.clone()), } } } unsafe impl<SS, SA, DS, DA, F, Fut> Trace for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { custom_trace!(this, { match this { V::Nil | V::Bool(_) | V::Float(_) | V::Int(_) => {} V::Array(a) => mark(a), V::Map(m) => mark(m), V::Fun(f) => mark(f), } }); } impl<SS, SA, DS, DA, F, Fut> Default for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { fn default() -> Self { V::Nil } } impl<SS, SA, DS, DA, F, Fut> PartialEq for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { fn eq(&self, other: &Self) -> bool { unimplemented!() } } impl<SS, SA, DS, DA, F, Fut> Eq for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, {} impl<SS, SA, DS, DA, F, Fut> PartialOrd for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { unimplemented!() } } impl<SS, SA, DS, DA, F, Fut> Ord for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { fn cmp(&self, other: &Self) -> Ordering { unimplemented!() } } impl<SS, SA, DS, DA, F, Fut> Value for V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { type Failure = VvvmFailure<Self, F>; type Fut = VvvmFuture<Self, Fut>; type BuiltInFunction = SynchronousFun<SS, DS>; type BuiltInAsync = AsynchronousFun<SA, DA>; fn truthy(&self) -> bool { match self { V::Nil | V::Bool(false) => false, _ => true, } } fn as_built_in_function(self) -> Option<Self::BuiltInFunction> { match self { V::Fun(Fun::SynchronousFunction(f)) => Some(f), _ => None, } } fn as_built_in_function_ref(&self) -> Option<&Self::BuiltInFunction> { match self { V::Fun(Fun::SynchronousFunction(f)) => Some(f), _ => None, } } fn as_built_in_function_mut(&mut self) -> Option<&mut Self::BuiltInFunction> { match self { V::Fun(Fun::SynchronousFunction(f)) => Some(f), _ => None, } } fn as_built_in_async(self) -> Option<Self::BuiltInAsync> { match self { V::Fun(Fun::AsynchronousFunction(f)) => Some(f), _ => None, } } fn as_built_in_async_ref(&self) -> Option<&Self::BuiltInAsync> { match self { V::Fun(Fun::AsynchronousFunction(f)) => Some(f), _ => None, } } fn as_built_in_async_mut(&mut self) -> Option<&mut Self::BuiltInAsync> { match self { V::Fun(Fun::AsynchronousFunction(f)) => Some(f), _ => None, } } fn new_closure(f: Closure<Self>) -> Self { V::Fun(Fun::Closure(f)) } fn as_closure(self) -> Option<Closure<Self>> { match self { V::Fun(Fun::Closure(f)) => Some(f), _ => None, } } fn as_closure_ref(&self) -> Option<&Closure<Self>> { match self { V::Fun(Fun::Closure(f)) => Some(f), _ => None, } } fn as_closure_mut(&mut self) -> Option<&mut Closure<Self>> { match self { V::Fun(Fun::Closure(f)) => Some(f), _ => None, } } } impl<SS, SA, DS, DA, F, Fut> V<SS, SA, DS, DA, F, Fut> where SS: ValueBaseOrdered, DS: ValueBase, SA: ValueBaseOrdered, DA: ValueBase, F: 'static, Fut: 'static, { pub fn nil() -> Self { V::Nil } pub fn boo(b: bool) -> Self { V::Bool(b) } pub fn float(x: f64) -> Self { V::Float(PavoFloat(x)) } pub fn int(n: i64) -> Self { V::Int(n) } pub fn string(s: &str) -> Self { unimplemented!() } pub fn ok(v: Self) -> Self { unimplemented!() } pub fn err(v: Self) -> Self { unimplemented!() } pub fn err_nil() -> Self { Self::err(Self::nil()) } pub fn partial_compare(&self, other: &Self) -> Option<Ordering> { unimplemented!() } pub fn partial_lt(&self, other: &Self) -> Option<bool> { unimplemented!() } pub fn partial_leq(&self, other: &Self) -> Option<bool> { unimplemented!() } pub fn partial_eq(&self, other: &Self) -> Option<bool> { unimplemented!() } pub fn partial_geq(&self, other: &Self) -> Option<bool> { unimplemented!() } pub fn partial_gt(&self, other: &Self) -> Option<bool> { unimplemented!() } pub fn partial_neq(&self, other: &Self) -> Option<bool> { unimplemented!() } pub fn partial_greatest_lower_bound(&self, other: &Self) -> Option<Self> { unimplemented!() } pub fn partial_least_upper_bound(&self, other: &Self) -> Option<Self> { unimplemented!() } } pub enum VvvmFailure<Val, F> { Core(CoreFailure<Val>), Other(F), } impl<Val, F> From<CoreFailure<Val>> for VvvmFailure<Val, F> { fn from(f: CoreFailure<Val>) -> Self { VvvmFailure::Core(f) } } pub enum VvvmFuture<Val, Fut> { Core(CoreFuture<Val>), Other(Fut), } pub trait ValueBase: Sized + Trace + Finalize + Clone + Default + 'static {} pub trait ValueBaseOrdered: ValueBase + PartialEq + Eq + PartialOrd + Ord {} #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
#[macro_use] extern crate clap; use std::io::{BufReader, BufWriter}; use std::io::prelude::*; use std::fs::File; use std::string::String; #[allow(non_snake_case)] fn process_fastq(infile:&str, outfile:&str, sfile:&str, con_a:usize, read_len:usize) -> () { let f = File::open(infile).expect("Unable to open input file"); let mut reader = BufReader::new(f); let f_out = File::create(outfile).expect("Unable to create output file"); let s_out = File::create(sfile).expect("Unable to create survival file"); let mut writer = BufWriter::new(f_out); let mut s_writer = BufWriter::new(s_out); let mut lcount = 0; loop { let mut line1 = String::new(); let mut line2 = String::new(); let mut line3 = String::new(); let mut line4 = String::new(); let num_bytes1 = reader.read_line(&mut line1) .expect("Failed to read line1"); if num_bytes1 == 0 { println!("End of file. Breaking the loop."); break; } let num_bytes2 = reader.read_line(&mut line2) .expect("Failed to read line2"); if num_bytes2 == 0 { panic!("Incomplete file while reading line 2!"); } let num_bytes3 = reader.read_line(&mut line3) .expect("Failed to read line3"); if num_bytes3 == 0 { panic!("Incomplete file while reading line 3!"); } let num_bytes4 = reader.read_line(&mut line4) .expect("Failed to read line4"); if num_bytes4 == 0 { panic!("Incomplete file while reading line 4!"); } lcount += 1; // Remove the rightmost newline character line1.pop(); line2.pop(); line3.pop(); line4.pop(); let parts_vec = line1.split_whitespace().collect::<Vec<&str>>(); let len = line2.chars().count(); let mut A_count = 0; // We adjust last_end as one based position. let mut last_end = 0; for (i, lchar) in line2.chars().enumerate() { if lchar == 'A' { A_count += 1; if A_count == con_a { // Time to trim break; } } else { // Reset A_count = 0; last_end = i + 1; } } if last_end >= read_len { let line1_fmt = get_fmt_line1(parts_vec, len, last_end); let line2_fmt = line2.chars().take(last_end).collect::<String>(); let line3_fmt = line3; let line4_fmt = line4.chars().take(last_end).collect::<String>(); writeln!(&mut writer, "{}", line1_fmt).unwrap(); writeln!(&mut writer, "{}", line2_fmt).unwrap(); writeln!(&mut writer, "{}", line3_fmt).unwrap(); writeln!(&mut writer, "{}", line4_fmt).unwrap(); let sline: String = lcount.to_string(); writeln!(&mut s_writer, "{}", sline).unwrap(); } // Process the third if lcount % 1000000 == 0 { println!("lcount: {}", lcount); } } println!("Total count of lines in infile: {}", lcount * 4) } fn get_fmt_line1(parts_vec:Vec<&str>, len: usize, last_end: usize) -> String { let out_str0; if len != last_end { out_str0 = format!("{}:trimmed_{}_{}", parts_vec[0], (last_end + 1), (len - last_end)); } else { out_str0 = format!("{}:untrimmed_0_0", parts_vec[0]); } let out_str; if parts_vec.len() == 1 { out_str = out_str0; } else { out_str = format!("{} {}", out_str0, parts_vec[1]); } out_str } fn main() { let con_a_def = "7"; let read_len_def = "20"; let matches = clap_app!(myapp => (about: "Does awesome things") (@arg Infile: -i --infile +takes_value +required "Input file") (@arg Outfile: -o --outfile +takes_value +required "Output file") (@arg Sfile: -s --Sfile +takes_value "Survival file") (@arg Con_A: -c --con_A +takes_value "Minimum num of consecutive A to start trimming") (@arg Read_len: -r --read_len +takes_value "Min read len required to keep") ).get_matches(); let infile; if matches.is_present("Infile") { infile = matches.value_of("Infile").unwrap(); println!("value of infile: {}", infile); } else { panic!("value of infile not present"); } let outfile; if matches.is_present("Outfile") { outfile = matches.value_of("Outfile").unwrap(); println!("value of outfile: {}", outfile); } else { panic!("value of outfile not present"); } let sfile_def = str::replace(outfile, ".fastq", ".sfile"); let sfile = matches.value_of("Sfile").unwrap_or(&sfile_def); println!("value of Sfile: {}", sfile); let con_a:usize; let con_a_obj = matches.value_of("Con_A").unwrap_or(con_a_def); con_a = con_a_obj.parse::<usize>().unwrap(); println!("value of Con_a : {}", con_a); let read_len:usize; let read_len_obj = matches.value_of("Read_len").unwrap_or(read_len_def); read_len = read_len_obj.parse::<usize>().unwrap(); println!("value of Read_len : {}", read_len); process_fastq(infile, outfile, &sfile, con_a, read_len) }
pub mod translator;
pub struct Point { x: i32, y: i32, } impl Point { pub fn new(x: i32, y: i32) -> Point { Point { x, y } } pub fn find_distance_to(&self, point: &Point) -> i32 { use std::cmp; cmp::max(i32::abs(self.x - point.x), i32::abs(self.y - point.y)) } } pub fn min_time_to_visit_all_points(points: Vec<Vec<i32>>) -> i32 { let coordinates: Vec<Point> = points .iter() .map(|point| Point::new(point[0], point[1])) .collect(); let mut total_time: i32 = 0; for i in 0..coordinates.len() - 1 { total_time += coordinates[i].find_distance_to(&coordinates[i + 1]); } total_time } #[cfg(test)] mod min_time_to_visit_all_points_tests { use super::*; #[test] fn min_time_to_visit_all_points_test_one() { // arrange let test = vec![vec![1, 1], vec![3, 4], vec![-1, 0]]; // act let result = min_time_to_visit_all_points(test); // assert assert_eq!(result, 7); } #[test] fn min_time_to_visit_all_points_test_two() { // arrange let test = vec![vec![3, 2], vec![-2, 2]]; // act let result = min_time_to_visit_all_points(test); // assert assert_eq!(result, 5); } }
extern crate libc; pub mod anoncreds; pub mod signus; pub mod ledger; pub mod pool; pub mod wallet; use self::libc::{c_char}; #[repr(i32)] pub enum ErrorCode { Success = 0, // Common errors // Caller passed invalid pool ledger handle CommonInvalidPoolLedgerHandle = 100, // Caller passed invalid wallet handle CommonInvalidWalletHandle, // Caller passed invalid value as param 1 (null, invalid json and etc..) CommonInvalidParam1, // Caller passed invalid value as param 2 (null, invalid json and etc..) CommonInvalidParam2, // Caller passed invalid value as param 3 (null, invalid json and etc..) CommonInvalidParam3, // Caller passed invalid value as param 4 (null, invalid json and etc..) CommonInvalidParam4, // Caller passed invalid value as param 5 (null, invalid json and etc..) CommonInvalidParam5, // Invalid library state was detected in runtime. It signals library bug CommonInvalidState, // Wallet errors // Unknown type of wallet was passed on create_wallet WalletUnknownTypeError = 200, // Attempt to register already existing wallet type WalletTypeAlreadyRegisteredError, // Requested entity id isn't present in wallet WalletNotFoundError, // Wallet files referenced in open_wallet have invalid data format WalletInvalidDataFormat, // IO error during access wallet backend WalletIOError, // Trying to use wallet with pool that has different name WalletIncompatiblePoolError, // Ledger errors // Pool ledger files referenced in open_pool_ledger have invalid data format PoolLedgerInvalidDataFormat = 300, // IO error during access pool ledger files PoolILedgerOError, // No concensus during ledger operation LedgerNoConsensusError, // Attempt to send unknown or incomplete transaction message LedgerInvalidTransaction, // Attempt to send transaction without the necessary privileges LedgerSecurityError, // IO error during sending of ledger transactions or catchup process LedgerIOError, // Crypto errors // Invalid structure of any crypto promitives (keys, signatures, seeds and etc...) CryptoInvalidStructure = 400, // Unknown crypto type was requested for signing/verifiyng or encoding/decoding CryptoUnknownTypeError, // Revocation registry is full and creation of new registry is necessary CryptoRevocationRegistryFullError, CryptoInvalidUserRevocIndex, CryptoBackendError, AnoncredsNotIssuedError, // Attempt to generate master secret with dupplicated name AnoncredsMasterSecretDuplicateNameError }
use super::facts::Mapping; use super::{Atom, Error, Message, PendingRequests}; use std::any::Any; use std::collections::HashMap; use std::io::{self, Read, Write}; use std::marker::PhantomData; use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream, ToSocketAddrs}; use std::sync::{mpsc, Arc, Mutex}; use std::thread::JoinHandle; use std::time::Duration; use log::*; mod codec; use codec::{Decoder, Encoder}; pub trait Conn: Read + Write + Send + Sync + 'static { fn try_clone(&self) -> io::Result<Box<Conn>>; fn shutdown(&self, how: Shutdown) -> io::Result<()>; } impl Conn for std::net::TcpStream { fn try_clone(&self) -> io::Result<Box<Conn>> { let other = self.try_clone()?; Ok(Box::new(other)) } fn shutdown(&self, how: Shutdown) -> io::Result<()> { self.shutdown(how) } } pub trait Handler<CL, M, P, NP, R>: Sync + Send where CL: Clone, P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { fn handle(&self, client: Caller<M, P, NP, R>, params: P) -> Result<R, Error>; fn make_client(client: Caller<M, P, NP, R>) -> CL; } #[derive(Debug)] pub enum SinkValue<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { Shutdown, Message(Message<M, P, NP, R>), } pub struct Caller<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { conn_ref: Option<Arc<ConnRef>>, shutdown_handle: Box<Conn>, queue: Arc<Mutex<Queue<M, P, NP, R>>>, sink: mpsc::Sender<SinkValue<M, P, NP, R>>, } impl<M, P, NP, R> Drop for Caller<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { fn drop(&mut self) { debug!("Caller: dropping!"); } } impl<M, P, NP, R> Clone for Caller<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { fn clone(&self) -> Self { Caller { conn_ref: self.conn_ref.clone(), queue: self.queue.clone(), sink: self.sink.clone(), shutdown_handle: self.shutdown_handle.try_clone().unwrap(), } } } impl<M, P, NP, R> Caller<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { pub fn call_raw(&self, params: P) -> Result<Message<M, P, NP, R>, Error> { let id = { let mut queue = self.queue.lock()?; queue.next_id() }; let method = params.method(); let m = Message::Request { id, params, phantom: PhantomData, }; let (tx, rx) = mpsc::channel::<Message<M, P, NP, R>>(); let in_flight = InFlightRequest { method, tx }; { let mut queue = self.queue.lock()?; queue.in_flight_requests.insert(id, in_flight); } { let sink = self.sink.clone(); sink.send(SinkValue::Message(m))?; } Ok(rx.recv()?) } #[allow(clippy::needless_lifetimes)] pub fn call<D, RR>(&self, params: P, downgrade: D) -> Result<RR, Error> where D: Fn(R) -> Option<RR>, { match self.call_raw(params) { Ok(m) => match m { Message::Response { results, error, .. } => { if let Some(error) = error { Err(Error::RemoteError(error)) } else if let Some(results) = results { downgrade(results).ok_or_else(|| Error::WrongResults) } else { Err(Error::MissingResults) } } _ => Err(Error::WrongMessageType), }, Err(msg) => Err(Error::TransportError(format!("{:#?}", msg))), } } pub fn shutdown_runtime(&self) { self.shutdown_handle.shutdown(Shutdown::Both).ok(); } } pub fn default_timeout() -> Duration { Duration::from_secs(2) } // Connect to an address over TCP, then spawn a new RPC // system with the given handler. pub fn connect<M, AH, A, H, CL, P, NP, R>( handler: AH, addr: A, ) -> Result<Runtime<CL, H, M, P, NP, R>, Error> where AH: Into<Arc<H>>, A: ToSocketAddrs, H: Handler<CL, M, P, NP, R> + 'static, CL: Clone, P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { connect_timeout(handler, addr, Some(default_timeout())) } pub fn connect_timeout<M, AH, A, H, CL, P, NP, R>( handler: AH, addr: A, timeout: Option<Duration>, ) -> Result<Runtime<CL, H, M, P, NP, R>, Error> where AH: Into<Arc<H>>, A: ToSocketAddrs, H: Handler<CL, M, P, NP, R> + 'static, CL: Clone, P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { let conn = match timeout { Some(timeout) => { let addr = addr.to_socket_addrs()?.next().unwrap(); TcpStream::connect_timeout(&addr, timeout) } None => TcpStream::connect(addr), }?; conn.set_nodelay(true)?; spawn(handler, Box::new(conn)) } pub struct Server { join_handle: JoinHandle<()>, local_addr: SocketAddr, } impl Server { pub fn join(self) -> Result<(), Box<dyn Any + Send + 'static>> { self.join_handle.join() } pub fn local_addr(&self) -> SocketAddr { self.local_addr } } pub fn serve<M, AH, A, H, CL, P, NP, R>(handler: AH, addr: A) -> Result<Server, Error> where AH: Into<Arc<H>>, A: ToSocketAddrs, H: Handler<CL, M, P, NP, R> + 'static, CL: Clone, P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { serve_max_conns(handler, addr, None) } pub fn serve_once<M, AH, A, H, CL, P, NP, R>(handler: AH, addr: A) -> Result<Server, Error> where AH: Into<Arc<H>>, A: ToSocketAddrs, H: Handler<CL, M, P, NP, R> + 'static, CL: Clone, P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { serve_max_conns(handler, addr, Some(1)) } pub fn serve_max_conns<M, AH, A, H, CL, P, NP, R>( handler: AH, addr: A, max_conns: Option<usize>, ) -> Result<Server, Error> where AH: Into<Arc<H>>, A: ToSocketAddrs, H: Handler<CL, M, P, NP, R> + 'static, CL: Clone, P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { let handler = handler.into(); let listener = TcpListener::bind(addr)?; let local_addr = listener.local_addr()?; let join_handle = std::thread::spawn(move || { let mut conn_number = 0; let mut incoming = listener.incoming(); let mut runtimes = Vec::<Runtime<CL, H, M, P, NP, R>>::new(); while let Some(conn) = incoming.next() { let conn = conn.unwrap(); conn.set_nodelay(true).unwrap(); let handler = handler.clone(); // oh poor rustc you needed a little push there let runtime = spawn::<CL, M, Arc<H>, H, P, NP, R>(handler, Box::new(conn)).unwrap(); conn_number += 1; if let Some(max_conns) = max_conns { // only collect runtimes if we have a maximum number of // connections - otherwise, it'd just grow unbounded. runtimes.push(runtime); if conn_number >= max_conns { break; } } } for mut r in runtimes.drain(..) { r.join().unwrap(); } }); Ok(Server { join_handle, local_addr, }) } pub struct Runtime<CL, H, M, P, NP, R> where CL: Clone, M: Mapping, P: Atom<M>, NP: Atom<M>, R: Atom<M>, H: Handler<CL, M, P, NP, R> + 'static, { caller: Caller<M, P, NP, R>, encode_handle: Option<JoinHandle<()>>, decode_handle: Option<JoinHandle<()>>, shutdown_handle: Box<Conn>, phantom: PhantomData<(CL, H)>, } impl<CL, H, M, P, NP, R> Runtime<CL, H, M, P, NP, R> where CL: Clone, M: Mapping, P: Atom<M>, NP: Atom<M>, R: Atom<M>, H: Handler<CL, M, P, NP, R> + 'static, { pub fn join(&mut self) -> Result<(), Box<dyn Any + Send>> { if let Some(h) = self.encode_handle.take() { h.join()?; } if let Some(h) = self.decode_handle.take() { h.join()?; } Ok(()) } pub fn client(&self) -> CL { H::make_client(self.caller.clone()) } pub fn shutdown(&self) { debug!("Runtime: explicit shutdown requested"); self.shutdown_handle.shutdown(Shutdown::Both).ok(); } } struct ConnRef { conn: Box<Conn>, } impl ConnRef { fn shutdown(&self) { debug!("ConnRef: dropping, shutting down connection"); self.conn.shutdown(Shutdown::Both).ok(); } } impl Drop for ConnRef { fn drop(&mut self) { self.shutdown(); } } pub fn spawn<CL, M, AH, H, P, NP, R>( handler: AH, conn: Box<Conn>, ) -> Result<Runtime<CL, H, M, P, NP, R>, Error> where CL: Clone, AH: Into<Arc<H>>, H: Handler<CL, M, P, NP, R> + 'static, P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { let handler = handler.into(); let queue = Arc::new(Mutex::new(Queue::new())); let conn_ref = Arc::new(ConnRef { conn: conn.try_clone()?, }); let shutdown_handle = conn.try_clone()?; let encode_shutdown_handle = conn.try_clone()?; let decode_shutdown_handle = conn.try_clone()?; let write = conn.try_clone()?; let read = conn; let mut decoder = Decoder::new(read); // FIXME: default is wrong here, obviously. let mut encoder = Encoder::new(write, M::default()); let (tx, rx) = mpsc::channel(); let runtime_tx = tx.clone(); let runtime_caller = Caller::<M, P, NP, R> { conn_ref: Some(conn_ref.clone()), shutdown_handle: decode_shutdown_handle.try_clone()?, queue: queue.clone(), sink: tx.clone(), }; let internal_caller = Caller::<M, P, NP, R> { conn_ref: None, shutdown_handle: decode_shutdown_handle.try_clone()?, queue: queue.clone(), sink: tx.clone(), }; let encode_queue = queue.clone(); let encode_handle = std::thread::spawn(move || { 'relay: loop { match rx.recv() { Ok(val) => match val { SinkValue::Message(m) => { if let Err(e) = encoder.encode(m) { debug!("Encoder thread: could not encode: {:#?}", e); debug!("Encoder thread: breaking"); break 'relay; } } SinkValue::Shutdown => { debug!("Encoder thread: received shutdown"); break 'relay; } }, Err(e) => { debug!("Encoder thread: could not receive"); debug!("Encoder thread: error was: {:#?}", e); break 'relay; } } } debug!("Encoder thread watcher: shutting down conn"); encode_shutdown_handle.shutdown(Shutdown::Both).ok(); debug!("Encoder thread watcher: cancelling pending requests"); let queue = encode_queue.lock().unwrap(); for (k, v) in &queue.in_flight_requests { v.tx.send(Message::Response { id: *k, error: Some("Connection closed".into()), results: None, phantom: PhantomData, }) .ok(); } debug!("Encoder thread watcher: done"); }); let decode_handle = std::thread::spawn(move || { 'relay: loop { match decoder.decode() { Err(e) => { debug!("Decode thread: could not decode: {:#?}", e); debug!("Decode thread: breaking"); break 'relay; } Ok(res) => match res { Some(message) => { let handler = handler.clone(); let caller = internal_caller.clone(); std::thread::spawn(move || { let res = handle_message(message, handler, caller); if let Err(e) = res { eprintln!("message stream error: {:#?}", e); } }); } None => { // Signals EOF, we're done here debug!("Decode thread: received None value, end of stream"); debug!("Decode thread: breaking"); break 'relay; } }, } } debug!("Decoder thread watcher: sending shutdown to encode thread"); runtime_tx.send(SinkValue::Shutdown).ok(); debug!("Decoder thread watcher: shutting down conn"); decode_shutdown_handle.shutdown(Shutdown::Both).ok(); debug!("Decoder thread watcher: done"); }); debug!("Spawned runtime!"); Ok(Runtime { caller: runtime_caller, shutdown_handle, encode_handle: Some(encode_handle), decode_handle: Some(decode_handle), phantom: PhantomData, }) } fn handle_message<M, P, NP, R, H, CL>( inbound: Message<M, P, NP, R>, handler: Arc<H>, caller: Caller<M, P, NP, R>, ) -> Result<(), Error> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, CL: Clone, H: Handler<CL, M, P, NP, R>, M: Mapping, { match inbound { Message::Request { id, params, .. } => { let m = match handler.handle(caller.clone(), params) { Ok(results) => Message::Response::<M, P, NP, R> { id, results: Some(results), error: None, phantom: PhantomData, }, Err(error) => Message::Response::<M, P, NP, R> { id, results: None, error: Some(format!("internal error: {:#?}", error)), phantom: PhantomData, }, }; caller.sink.send(SinkValue::Message(m))?; } Message::Response { id, error, results, .. } => { if let Some(in_flight) = { let mut queue = caller.queue.lock()?; queue.in_flight_requests.remove(&id) } { in_flight.tx.send(Message::Response { id, error, results, phantom: PhantomData, })?; } } Message::Notification { .. } => unimplemented!(), }; Ok(()) } struct InFlightRequest<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { method: &'static str, tx: mpsc::Sender<Message<M, P, NP, R>>, } pub struct Queue<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { id: u32, in_flight_requests: HashMap<u32, InFlightRequest<M, P, NP, R>>, } impl<M, P, NP, R> Queue<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { fn new() -> Self { Queue { id: 0, in_flight_requests: HashMap::new(), } } fn next_id(&mut self) -> u32 { let res = self.id; self.id += 1; res } } impl<M, P, NP, R> PendingRequests for Queue<M, P, NP, R> where P: Atom<M>, NP: Atom<M>, R: Atom<M>, M: Mapping, { fn get_pending(&self, id: u32) -> Option<&'static str> { self.in_flight_requests.get(&id).map(|req| req.method) } }
//! `Timespec` and related types, which are used by multiple public API //! modules. use crate::backend::c; /// `struct timespec` #[cfg(not(fix_y2038))] pub type Timespec = c::timespec; /// `struct timespec` #[cfg(fix_y2038)] #[derive(Debug, Clone, Copy)] #[repr(C)] pub struct Timespec { /// Seconds. pub tv_sec: Secs, /// Nanoseconds. Must be less than 1_000_000_000. pub tv_nsec: Nsecs, } /// A type for the `tv_sec` field of [`Timespec`]. #[cfg(not(fix_y2038))] #[allow(deprecated)] pub type Secs = c::time_t; /// A type for the `tv_sec` field of [`Timespec`]. #[cfg(fix_y2038)] pub type Secs = i64; /// A type for the `tv_nsec` field of [`Timespec`]. #[cfg(all(libc, target_arch = "x86_64", target_pointer_width = "32"))] pub type Nsecs = i64; /// A type for the `tv_nsec` field of [`Timespec`]. #[cfg(all(libc, not(all(target_arch = "x86_64", target_pointer_width = "32"))))] pub type Nsecs = c::c_long; /// A type for the `tv_nsec` field of [`Timespec`]. #[cfg(linux_raw)] pub type Nsecs = i64; /// On 32-bit glibc platforms, `timespec` has anonymous padding fields, which /// Rust doesn't support yet (see `unnamed_fields`), so we define our own /// struct with explicit padding, with bidirectional `From` impls. #[cfg(fix_y2038)] #[repr(C)] #[derive(Debug, Clone)] pub(crate) struct LibcTimespec { pub(crate) tv_sec: Secs, #[cfg(target_endian = "big")] padding: core::mem::MaybeUninit<u32>, pub(crate) tv_nsec: Nsecs, #[cfg(target_endian = "little")] padding: core::mem::MaybeUninit<u32>, } #[cfg(fix_y2038)] impl From<LibcTimespec> for Timespec { #[inline] fn from(t: LibcTimespec) -> Self { Self { tv_sec: t.tv_sec, tv_nsec: t.tv_nsec, } } } #[cfg(fix_y2038)] impl From<Timespec> for LibcTimespec { #[inline] fn from(t: Timespec) -> Self { Self { tv_sec: t.tv_sec, tv_nsec: t.tv_nsec, padding: core::mem::MaybeUninit::uninit(), } } } #[test] fn test_sizes() { assert_eq_size!(Secs, u64); const_assert!(core::mem::size_of::<Timespec>() >= core::mem::size_of::<(u64, u32)>()); const_assert!(core::mem::size_of::<Nsecs>() >= 4); let mut t = Timespec { tv_sec: 0, tv_nsec: 0, }; // `tv_nsec` needs to be able to hold nanoseconds up to a second. t.tv_nsec = 999_999_999_u32 as _; assert_eq!(t.tv_nsec as u64, 999_999_999_u64); // `tv_sec` needs to be able to hold more than 32-bits of seconds. t.tv_sec = 0x1_0000_0000_u64 as _; assert_eq!(t.tv_sec as u64, 0x1_0000_0000_u64); } // Test that our workarounds are needed. #[cfg(fix_y2038)] #[test] #[allow(deprecated)] fn test_fix_y2038() { assert_eq_size!(libc::time_t, u32); }
pub mod tensor; pub mod tensor_ops; pub mod traits; pub mod structs;
use std; use std::cmp::max; use combat; use game; use game_item; use hero; use map; use monster; use texts; pub use cursive::{Cursive, CursiveExt}; //use cursive::theme; use cursive::event::Key; //use cursive::menu::MenuTree; use cursive::traits::*; use cursive::views::{DummyView, Dialog, LinearLayout, SelectView, ScrollView, TextView, Checkbox}; //use decorators::decorators; use loggers::{log, logger}; const CHARACTERS: [char; 48] = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'W', 'A', 'S', 'D', ' ', 'й', 'ф', 'ы', 'в', 'а', 'у', 'ш', ]; #[derive(Debug)] pub enum Color { Green, Black, Brown, LightGray, LightGreen, LightRed, Yellow, LightMagenta, LightCyan, LightBlue, } pub struct TTileRecord { pub C: char, pub Clr: Color, } pub const TileRecords: [TTileRecord; (map::tileLast + 1) as usize] = [ TTileRecord { C: '.', Clr: Color::Green, }, // Grass TTileRecord { C: ':', Clr: Color::Black, }, // Ground TTileRecord { C: '+', Clr: Color::Brown, }, // StairsUp TTileRecord { C: '-', Clr: Color::Brown, }, // StairsDown TTileRecord { C: ':', Clr: Color::Brown, }, // Trap TTileRecord { C: '*', Clr: Color::Brown, }, // Live TTileRecord { C: '^', Clr: Color::LightGray, }, // Tree TTileRecord { C: 'X', Clr: Color::LightGreen, }, // Stone ]; pub const MonsterRecords: [TTileRecord; monster::MaxMonsterTypes as usize] = [ TTileRecord { C: 'p', Clr: Color::LightRed, }, TTileRecord { C: '%', Clr: Color::Yellow, }, TTileRecord { C: '!', Clr: Color::LightGreen, }, TTileRecord { C: '#', Clr: Color::LightMagenta, }, TTileRecord { C: '&', Clr: Color::LightCyan, }, TTileRecord { C: 'j', Clr: Color::LightGray, }, TTileRecord { C: 'A', Clr: Color::LightBlue, }, ]; pub const ItemRecords: [TTileRecord; 5] = [ TTileRecord { C: '>', Clr: Color::LightCyan, }, TTileRecord { C: '[', Clr: Color::LightGreen, }, TTileRecord { C: '|', Clr: Color::LightCyan, }, TTileRecord { C: '{', Clr: Color::LightGreen, }, TTileRecord { C: 'e', Clr: Color::Black, }, ]; pub fn InitApp(app: &mut Cursive) { create_init_screen(app); } fn disable_current_shortcuts(app: &mut Cursive) { for character in CHARACTERS.iter() { app.clear_global_callbacks(*character); } use map::Direction::*; app.clear_global_callbacks(Key::Backspace); app.clear_global_callbacks(Key::Ins); app.clear_global_callbacks(Key::Esc); app.clear_global_callbacks(Key::Up); app.clear_global_callbacks(Key::Down); app.clear_global_callbacks(Key::Left); app.clear_global_callbacks(Key::Right); } fn enable_main_shortcuts(app: &mut Cursive) { disable_current_shortcuts(app); use map::Direction::*; app.add_global_callback(Key::Backspace, |a| { ClearInfo(a); }); app.add_global_callback(Key::Esc, |_| {}); app.add_global_callback(Key::Ins, take_item); app.add_global_callback('W', |a| combat::HeroShot(a, Up)); app.add_global_callback('S', |a| combat::HeroShot(a, Down)); app.add_global_callback('A', |a| combat::HeroShot(a, Left)); app.add_global_callback('D', |a| combat::HeroShot(a, Right)); app.add_global_callback('e', |a| ShowHeroSlots(a)); app.add_global_callback('i', |a| ShowHeroItems(a)); app.add_global_callback('w', |a| move_cursor(a, Up)); app.add_global_callback('s', |a| move_cursor(a, Down)); app.add_global_callback('a', |a| move_cursor(a, Left)); app.add_global_callback('d', |a| move_cursor(a, Right)); // Special for Russian keyboard layout. app.add_global_callback('у', |a| ShowHeroSlots(a)); app.add_global_callback('ш', |a| ShowHeroItems(a)); app.add_global_callback('ц', |a| move_cursor(a, Up)); app.add_global_callback('ы', |a| move_cursor(a, Down)); app.add_global_callback('ф', |a| move_cursor(a, Left)); app.add_global_callback('в', |a| move_cursor(a, Right)); } fn enable_init_shortcuts(app: &mut Cursive) { disable_current_shortcuts(app); app.add_global_callback(' ', |mut a| { game::GenerateAll(); game::StartGame(&mut a); }); app.add_global_callback(Key::Esc, |a| { a.quit(); }); } pub fn create_main_screen(app: &mut Cursive) { let mut text: String = "".to_owned(); for _ in 0..map::LOCAL_MAP_HEIGHT { for _ in 0..map::LOCAL_MAP_WIDTH { text.push_str(" "); } text.push_str("\n"); } let sep = TextView::empty() .with_name("sep") .fixed_size((1, map::LOCAL_MAP_HEIGHT)); app.add_layer( LinearLayout::horizontal() .child( TextView::new(text) .with_name("area") .fixed_size((map::LOCAL_MAP_WIDTH, map::LOCAL_MAP_HEIGHT)), ).child( LinearLayout::horizontal().child(sep).child( LinearLayout::vertical() .child( LinearLayout::horizontal() .child( TextView::empty() .center() .with_name("minimap") .fixed_size((12, 5)), ).child( TextView::empty() .center() .with_name("compass") .fixed_size((9, 5)), ), ).child(TextView::empty().with_name("sep1").fixed_size((9, 1))) .child(ScrollView::new(LinearLayout::vertical() .child(TextView::empty().with_name("info") )).with_name("history").fixed_size((24, 20))).child(TextView::empty().with_name("sep2").fixed_size((9, 1))) .child( TextView::empty() .with_name("hero_info") .fixed_size((24, map::LOCAL_MAP_HEIGHT - 5 - 1 - 20 - 1 - 9)), ).child( Dialog::around(TextView::new(texts::HELP_EXIT_DIALOG)) .button("Help", |a| a.add_layer(Dialog::info(texts::help()))) .button("Quit", |mut a| { a.pop_layer(); create_init_screen(&mut a); }).with_name("exit") .fixed_size((24, 9)), ), ), ), ); ShowMinimap(app); app.find_name::<TextView>("compass") .unwrap() .set_content(" N \n \n W @ O \n \n S "); app.find_name::<TextView>("sep1") .unwrap() .set_content("________________________"); app.find_name::<TextView>("sep2") .unwrap() .set_content("________________________"); for _ in 0..map::LOCAL_MAP_HEIGHT { app.find_name::<TextView>("sep").unwrap().append("|\n"); } disable_current_shortcuts(app); enable_main_shortcuts(app); } fn create_init_screen(app: &mut Cursive) { let (width, height) = (70, 18); app.add_layer( Dialog::around( LinearLayout::vertical() .child(TextView::empty().with_name("top").fixed_size((width, height))) .child( TextView::new(texts::INIT_DIALOG) .center() .fixed_size((width, 4)), ).child( TextView::empty() .with_name("bottom") .fixed_size((width, height)), ).fixed_size((width, height * 2 + 4)), ).title("THE GAME") .button("Start", |mut a| { game::GenerateAll(); game::StartGame(&mut a); }).button("Quit", |a| a.quit()) .with_name("init"), ); let top = &mut *app.find_name::<TextView>("top").unwrap(); let bottom = &mut *app.find_name::<TextView>("bottom").unwrap(); for _ in 0..width * height { top.append(["^", ":", "."][map::random(0, 3) as usize]); } for _ in 0..width * height { bottom.append(["^", ":", "."][map::random(0, 3) as usize]); } disable_current_shortcuts(app); enable_init_shortcuts(app); } fn create_slots_screen(app: &mut Cursive) { disable_current_shortcuts(app); let hero = get_ref_curhero!(); let mut list: SelectView<usize> = SelectView::new(); for i in 0..hero::MaxSlots { let mut character: char; if i < 10 { character = i.to_string().chars().next().unwrap(); } else if i > 9 && i < 38 { character = CHARACTERS[i]; } else { panic!("Too many slots: {:?}!", i); } list.add_item(format!("[{}] {}", character, match hero.Slots[i] { None => texts::STR_EMPTY_ITEM.to_string(), Some(item) => game_item::GetItemName(item), }), i); app.add_global_callback(character, move |a| { move_slot_to_items(a, i); }); } list.set_on_submit(|a, i| { move_slot_to_items(a, *i); }); app.add_layer(Dialog::new().button("Back", |a| { a.pop_layer(); disable_current_shortcuts(a); enable_main_shortcuts(a); }) .title(texts::STR_HERO_SLOTITEMS) .content(LinearLayout::vertical() .child(TextView::new("\n")) .child(list.with_name("slots_list")) .child(TextView::new(format!("\n{}", texts::STR_HERO_SLOTINFO)))) ); app.add_global_callback('q', |a| { a.pop_layer(); disable_current_shortcuts(a); enable_main_shortcuts(a); }) } fn create_items_screen(app: &mut Cursive) { disable_current_shortcuts(app); let hero = get_ref_curhero!(); let mut list: SelectView<usize> = SelectView::new(); for i in 0..hero::MaxHeroItems { let mut character: char; if i < 10 { character = i.to_string().chars().next().unwrap(); } else if i > 9 && i < 38 { character = CHARACTERS[i]; } else { panic!("Too many items: {:?}!", i); } list.add_item(format!("[{}] {}", character, match hero.Items[i] { None => texts::STR_EMPTY_ITEM.to_string(), Some(item) => game_item::GetItemName(item), }), i); app.add_global_callback(character, move |a| { move_item_to_slots(a, i); }); } list.set_on_submit(|a, i| { move_item_to_slots(a, *i); }); app.add_layer(Dialog::new().button("Back", |a| { a.pop_layer(); disable_current_shortcuts(a); enable_main_shortcuts(a); }) .title(texts::STR_HERO_ITEMS) .content(LinearLayout::vertical() .child(TextView::new("\n")) .child(list.with_name("items_list")) .child(TextView::new(format!("\n{}", texts::STR_HERO_ITEMINFO)))) //.with_name("d") ); app.add_global_callback('q', |a| { a.pop_layer(); disable_current_shortcuts(a); enable_main_shortcuts(a); }); app.add_global_callback(Key::Backspace, throw_item); } fn move_slot_to_items(app: &mut Cursive, index: usize) { let hero = get_mut_ref_curhero!(); let free_bag_index: Option<usize> = hero::GetFreeBag(hero); let slot: Option<game_item::TGameItem> = hero.Slots[index as usize]; if slot.is_some() && free_bag_index.is_some() { hero.Items[free_bag_index.unwrap()] = slot; hero.Slots[index as usize] = None; ShowInfo(app, texts::STR_MOVE_SLOT_TO_ITEMS.to_owned() + slot.unwrap().Name); } app.pop_layer(); create_slots_screen(app); } fn move_item_to_slots(app: &mut Cursive, index: usize) { let hero = get_mut_ref_curhero!(); let item: Option<game_item::TGameItem> = hero.Items[index as usize]; if item.is_some() { let free_slot_index: Option<usize> = hero::GetFreeSlot(hero, item.unwrap()); if free_slot_index.is_some() { hero.Slots[free_slot_index.unwrap()] = item; hero.Items[index as usize] = None; ShowInfo(app, texts::STR_MOVE_ITEM_TO_SLOTS.to_owned() + item.unwrap().Name); } } app.pop_layer(); create_items_screen(app); } fn throw_item(app: &mut Cursive) { use game_item::ITEMS; let selected_id = app.find_name::<SelectView<usize>>("items_list").unwrap().selected_id(); let i = game_item::GetFreeItemNum(); if let Some(i) = i { let mut curhero = get_mut_ref_curhero!(); unsafe { let item = curhero.Items[selected_id.unwrap()]; if item.is_none() { return; } let item = item.unwrap(); let (x, y) = (curhero.x, curhero.y); for i in ITEMS.iter() { if let Some(itm) = i { if itm.x == x && itm.y == y { ShowInfo(app, texts::STR_CANNOT_THROW_ITEM.to_owned()); return; } } } ITEMS[i] = Some(game_item::TGameItem { ID: item.ID, x: curhero.x, y: curhero.y, IType: item.IType, Name: item.Name, Ints: item.Ints, Reals: item.Reals, IsVisible: item.IsVisible, }); ShowInfo(app, format!("{}: {}", texts::STR_YOU_THROW_ITEM, item.Name)); } curhero.Items[selected_id.unwrap()] = None; app.pop_layer(); create_items_screen(app); } } fn take_item(app: &mut Cursive) { use game_item::ITEMS; let curhero = get_mut_ref_curhero!(); let index = hero::GetFreeItem(curhero); if index.is_none() { return; } unsafe { for (n, i) in ITEMS.iter().enumerate() { if let Some(mut itm) = i { if itm.x == curhero.x && itm.y == curhero.y { match itm.IType { // if the item is bow game_item::TGameItemType::ItemRangedWeapon => { let mut items = Vec::new(); for i in 0..hero::MaxHeroItems { if curhero.Items[i].is_some() && curhero.Items[i].unwrap().IType == game_item::TGameItemType::ItemAmmo { items.push(curhero.Items[i].unwrap()); curhero.Items[i] = None; } } let n = itm.Ints[game_item::intAmmo].unwrap() + items.iter().map(|ref i| i.Ints[game_item::intAmmo].unwrap()).sum::<usize>(); let index = hero::GetFreeItem(curhero).unwrap(); curhero.Items[index] = Some(game_item::TGameItem { ID: 5, x: 0, y: 0, IType: game_item::TGameItemType::ItemRangedWeapon, Name: texts::STR_CROSS, Ints: [ Some(n), Some(5), Some(1), Some(4), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], Reals: [None; game_item::MaxRealInt], IsVisible: false, }); ShowInfo(app, texts::STR_TAKED_BOW.to_string()); } // if the item is ammo game_item::TGameItemType::ItemAmmo => { // if the hero has a bow in his hands if let Some(mut hero_item) = curhero.Slots[hero::slotHands] { // if this weapon is a bow if hero_item.IType == game_item::TGameItemType::ItemRangedWeapon { if let Some(n) = hero_item.Ints[game_item::intRangedAmmo] { // if the bow has an arrows curhero.Slots[hero::slotHands] = Some(game_item::TGameItem { ID: 5, x: 0, y: 0, IType: game_item::TGameItemType::ItemRangedWeapon, Name: texts::STR_CROSS, Ints: [ Some(n + itm.Ints[game_item::intAmmo].unwrap()), Some(5), Some(1), Some(4), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], Reals: [None; game_item::MaxRealInt], IsVisible: false, }); } else { // if the bow hasn't any arrows curhero.Slots[hero::slotHands] = Some(game_item::TGameItem { ID: 5, x: 0, y: 0, IType: game_item::TGameItemType::ItemRangedWeapon, Name: texts::STR_CROSS, Ints: itm.Ints, Reals: [None; game_item::MaxRealInt], IsVisible: false, }); } ShowInfo(app, itm.Ints[game_item::intAmmo].unwrap().to_string() + texts::STR_N_TAKED_ARROWS); } } else if let Some(idx) = { // else if the hero hasn't a bow in his hands, but he has a bow in his `Items` let mut idx = None; for i in 0..hero::MaxHeroItems { if curhero.Items[i].is_some() && curhero.Items[i].unwrap().IType == game_item::TGameItemType::ItemRangedWeapon { idx = Some(i); break; } } idx } { // if the bow has an arrows if let Some(n) = curhero.Items[idx].unwrap().Ints[game_item::intRangedAmmo] { curhero.Items[idx] = Some(game_item::TGameItem { ID: 5, x: 0, y: 0, IType: game_item::TGameItemType::ItemRangedWeapon, Name: texts::STR_CROSS, Ints: [ Some(n + itm.Ints[game_item::intAmmo].unwrap()), Some(5), Some(1), Some(4), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], Reals: [None; game_item::MaxRealInt], IsVisible: false, }); } else { // else if the bow hasn't any arrows curhero.Items[idx] = Some(game_item::TGameItem { ID: 5, x: 0, y: 0, IType: game_item::TGameItemType::ItemRangedWeapon, Name: texts::STR_CROSS, Ints: itm.Ints, Reals: [None; game_item::MaxRealInt], IsVisible: false, }); } ShowInfo(app, itm.Ints[game_item::intAmmo].unwrap().to_string() + texts::STR_N_TAKED_ARROWS); } else { // else if the hero hasn't any bow in `Items` let mut flag = false; for it in 0..hero::MaxHeroItems { if curhero.Items[it].is_some() && curhero.Items[it].unwrap().IType == game_item::TGameItemType::ItemAmmo { curhero.Items[it] = Some(game_item::TGameItem { ID: 4, x: 0, y: 0, IType: game_item::TGameItemType::ItemAmmo, Name: texts::STR_AMMO, Ints: [ Some(curhero.Items[it].unwrap().Ints[game_item::intAmmo].unwrap() + i.unwrap().Ints[game_item::intAmmo].unwrap()), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], Reals: [None; game_item::MaxRealInt], IsVisible: false, }); flag = true; break; } } if !flag { curhero.Items[index.unwrap()] = *i; } ShowInfo(app, itm.Ints[game_item::intAmmo].unwrap().to_string() + texts::STR_N_TAKED_ARROWS); } } _ => { // if the item isn't ammo curhero.Items[index.unwrap()] = *i; ShowInfo(app, format!("{}: {}", texts::STR_TAKED_ITEM, itm.Name)); } } ITEMS[n] = None; break; } } } } } pub fn VideoInitialize() {} pub fn PrepareMap() {} pub fn ShowCell(app: &mut Cursive, t: &map::TMapCell, x: usize, y: usize) { let c = TileRecords[t.Tile as usize].C; let mut text: String = app .find_name::<TextView>("area") .unwrap() .get_content() .source() .to_owned(); let cur_map = get_ref_curmap!(); let index = (map::LOCAL_MAP_WIDTH + 1) * (y - cur_map.LocalMapTop) + (x - cur_map.LocalMapLeft); text.remove(index); text.insert(index, if t.IsVisible { c } else { ' ' }); app.find_name::<TextView>("area").unwrap().set_content(text); } pub fn ShowItem(app: &mut Cursive, itm: &game_item::TGameItem) { use game_item::TGameItemType::*; let mut text: String = app .find_name::<TextView>("area") .unwrap() .get_content() .source() .to_owned(); let cur_map = get_ref_curmap!(); if itm.y < cur_map.LocalMapTop || itm.x < cur_map.LocalMapLeft { return; } let index = (map::LOCAL_MAP_WIDTH + 1) * (itm.y - cur_map.LocalMapTop) + (itm.x - cur_map.LocalMapLeft); text.remove(index); text.insert( index, ItemRecords[match itm.IType { ItemHandWeapon => 0, ItemArmor => 1, ItemAmmo => 2, ItemRangedWeapon => 3, } as usize] .C, ); app.find_name::<TextView>("area").unwrap().set_content(text); } pub fn ShowHero(app: &mut Cursive, HeroNum: usize) { let mut text: String = app .find_name::<TextView>("area") .unwrap() .get_content() .source() .to_owned(); let cur_map = get_ref_curmap!(); let h = unsafe { &hero::HEROES[HeroNum] }; if h.y < cur_map.LocalMapTop || h.x < cur_map.LocalMapLeft { return; } //let index = unsafe { ((map::LOCAL_MAP_WIDTH + 1) * CURSOR.y + CURSOR.x) as usize }; let index = (map::LOCAL_MAP_WIDTH + 1) * (h.y - cur_map.LocalMapTop) + (h.x - cur_map.LocalMapLeft); text.remove(index); text.insert(index, '@'); app.find_name::<TextView>("area").unwrap().set_content(text); } pub fn ShowHeroInfo(app: &mut Cursive, HeroNum: usize) { let hero: &hero::THero = get_ref_curhero!(HeroNum); let info = texts::STR_HERO_RACE.to_owned() + match hero.Race { hero::raceHuman => "Human", hero::raceElf => "Elf", hero::raceDwarf => "Dwarf", hero::raceHobbit => "Hobbit", _ => panic!("Error in `ShowHeroInfo`"), } + "\n" + &texts::STR_HERO_CLASS.to_owned() + match hero.Class { hero::classWarrior => "Warrior", hero::classArcher => "Archer", hero::classWizard => "Wizard", _ => panic!("Error in `ShowHeroInfo`"), } + "\n" + &texts::STR_HERO_LEVEL.to_owned() + &hero.Level.to_string() + "\n" + &texts::STR_HERO_EXP.to_owned() + &hero.Exp.to_string() + "\n" + &texts::STR_HERO_HP.to_owned() + &hero.HP.to_string() + "/" + &hero.MaxHP.to_string() + "\n" + &texts::STR_HERO_XY.to_owned() + &hero.x.to_string() + ", " + &hero.y.to_string(); app.find_name::<TextView>("hero_info").unwrap().set_content( info ); } fn ShowHeroItems(app: &mut Cursive) { create_items_screen(app); game::ShowGame(app); } fn ShowHeroSlots(app: &mut Cursive) { create_slots_screen(app); game::ShowGame(app); } pub fn ShowMonster(app: &mut Cursive, m: &monster::TMonster) { let mut text: String = app .find_name::<TextView>("area") .unwrap() .get_content() .source() .to_owned(); let cur_map = get_ref_curmap!(); let index = ((map::LOCAL_MAP_WIDTH + 1) * (m.y - cur_map.LocalMapTop) + (m.x - cur_map.LocalMapLeft)) as usize; text.remove(index); text.insert(index, MonsterRecords[m.ID as usize].C); app.find_name::<TextView>("area").unwrap().set_content(text); } pub fn ShowInfo(app: &mut Cursive, text: String) { let mut old_text = app .find_name::<TextView>("info") .unwrap() .get_content() .source() .to_string(); if old_text.len() > 1024 { old_text = old_text.splitn(2, "\n").collect::<Vec<_>>()[1].into() }; app.find_name::<TextView>("info").unwrap().set_content( old_text + "\n- " + &text ); app.find_name::<ScrollView<LinearLayout>>("history").unwrap().scroll_to_bottom(); } pub fn ClearInfo(app: &mut Cursive) { app.find_name::<TextView>("info").unwrap().set_content(""); } pub fn ShowMinimap(app: &mut Cursive) { let mut text: String = "_|_|_\n_|_|_\n | | ".to_owned(); let hero: &hero::THero = get_ref_curhero!(); let x = if hero.x < map::MAP_WIDTH / 3 { 0 } else if hero.x < 2 * map::MAP_WIDTH / 3 { 2 } else { 4 }; let y = if hero.y < map::MAP_HEIGHT / 3 { 0 } else if hero.y < 2 * map::MAP_HEIGHT / 3 { 1 } else { 2 }; text.remove(6 * y + x); text.insert(6 * y + x, '@'); app.find_name::<TextView>("minimap") .unwrap() .set_content(text); } fn ShowCompassInfo(app: &mut Cursive, direction: map::Direction) { use map::Direction::*; let mut text = app .find_name::<TextView>("compass") .unwrap() .get_content() .source() .to_owned(); text.remove(22); text.insert(22, ' '); text.remove(26); text.insert(26, ' '); text.remove(14); text.insert(14, ' '); text.remove(34); text.insert(34, ' '); match direction { Left => { text.remove(22); text.insert(22, '<'); } Right => { text.remove(26); text.insert(26, '>'); } Up => { text.remove(14); text.insert(14, '^'); } Down => { text.remove(34); text.insert(34, 'v'); } } app.find_name::<TextView>("compass") .unwrap() .set_content(text); } //------------------------------------------------------------------------------ pub struct Cursor { pub x: usize, pub y: usize, } pub static mut CURSOR: Cursor = Cursor { x: 0, y: 0 }; fn move_cursor(mut app: &mut Cursive, direction: map::Direction) { use map::Direction::*; unsafe { let (mut dx, mut dy) = (0i32, 0i32); match direction { Up => { dy = if CURSOR.y > 0 { -1 } else { 0 }; } Down => { dy = if CURSOR.y < map::LOCAL_MAP_HEIGHT - 1 { 1 } else { 0 }; } Left => { dx = if CURSOR.x > 0 { -1 } else { 0 }; } Right => { dx = if CURSOR.x < map::LOCAL_MAP_WIDTH - 1 { 1 } else { 0 }; } } //ShowInfo(app, format!("{:?}, {:?}", dx, dy)); let cur_map = get_ref_curmap_wo_unsafe!(); let hero: &mut hero::THero = get_mut_ref_curhero_wo_unsafe!(hero::CUR_HERO); // If hero died to stop his moving at all. if hero.HP <= 0 { PostMortem(app); return; } if !map::FreeTile( &cur_map.Cells[(hero.x as i32 + dx) as usize][(hero.y as i32 + dy) as usize].Tile, ) { return; } // battle with monster let mnstr = monster::IsMonsterOnTile((hero.x as i32 + dx) as usize, (hero.y as i32 + dy) as usize); if mnstr.is_some() { combat::HeroAttack(app, hero, mnstr.unwrap()); monster::MonstersStep(app); combat::MonstersAttack(app); // Don't change an order of operations! //hero::SetHeroVisible(unsafe { hero::CUR_HERO }); //game::ShowGame(app); return; } // let (prev_x, prev_y) = (CURSOR.x, CURSOR.y); if dx >= 0 { CURSOR.x += dx as usize; hero.x += dx as usize; } else { CURSOR.x = (CURSOR.x as i32 + dx) as usize; hero.x = (hero.x as i32 + dx) as usize; } if dy >= 0 { CURSOR.y += dy as usize; hero.y += dy as usize; } else { CURSOR.y = (CURSOR.y as i32 + dy) as usize; hero.y = (hero.y as i32 + dy) as usize; } //ShowInfo(&mut app, CURSOR.x.to_string() + "-" + &CURSOR.y.to_string()); if prev_x != CURSOR.x || prev_y != CURSOR.y { let cur_cell = get_mut_ref_cell_wo_unsafe!(hero.x, hero.y); for trap in map::TrapTileSet.iter() { if &cur_cell.Tile == trap { cur_cell.Tile = map::tileGrass; if !hero::SkillTest(app, hero, hero::skillTrapSearch) { let dam = map::random(1, hero.MaxHP as usize); //f32::round(hero.MaxHP * 1.1)) + 1; ShowInfo( app, String::from(texts::STR_TRAP) + "(-" + &dam.to_string() + " points)", ); hero::DecHP(app, hero, dam); } } } for live in map::LiveTileSet.iter() { if &cur_cell.Tile == live { ShowInfo(app, String::from(texts::STR_LIVE)); let inc = hero.MaxHP; hero::IncHP(hero, inc, false); } } match dx { 1 => ShowCompassInfo(app, Right), -1 => ShowCompassInfo(app, Left), 0 | _ => (), } match dy { 1 => ShowCompassInfo(app, Down), -1 => ShowCompassInfo(app, Up), 0 | _ => (), } if hero.x - cur_map.LocalMapLeft < map::SCROLL_DELTA { map::ScrollMap(Left); } else if hero.x - cur_map.LocalMapLeft + map::SCROLL_DELTA >= map::LOCAL_MAP_WIDTH { map::ScrollMap(Right); } if hero.y - cur_map.LocalMapTop < map::SCROLL_DELTA { map::ScrollMap(Up); } else if hero.y - cur_map.LocalMapTop + map::SCROLL_DELTA >= map::LOCAL_MAP_HEIGHT { map::ScrollMap(Down); } monster::MonstersStep(app); // Don't change an order of operations! hero::SetHeroVisible(hero::CUR_HERO); game::ShowGame(&mut app); }; } } pub fn GenerateHero(app: &mut Cursive) { app.add_layer( Dialog::new() .title("Select a hero") .button("Create", |a| { let curhero = get_mut_ref_curhero!(); curhero.Race = if a.find_name::<Checkbox>("race0") .unwrap() .is_checked() { hero::raceHuman } else if a.find_name::<Checkbox>("race1") .unwrap() .is_checked() { hero::raceElf } else if a.find_name::<Checkbox>("race2") .unwrap() .is_checked() { hero::raceDwarf } else if a.find_name::<Checkbox>("race3") .unwrap() .is_checked() { hero::raceHobbit } else { panic!("Error in hero creation dialog"); }; curhero.Class = if a.find_name::<Checkbox>("class0") .unwrap() .is_checked() { hero::classWarrior } else if a.find_name::<Checkbox>("class1") .unwrap() .is_checked() { hero::classArcher } else if a.find_name::<Checkbox>("class2") .unwrap() .is_checked() { hero::classWizard } else { panic!("Error in hero creation dialog"); }; if curhero.Class == hero::classArcher { curhero.Items[0] = Some(game_item::TGameItem { ID: 5, x: 0, y: 0, IType: game_item::TGameItemType::ItemRangedWeapon, Name: texts::STR_CROSS, Ints: [ Some(100usize), Some(5), Some(1), Some(4), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ], Reals: [None; game_item::MaxRealInt], IsVisible: false, }); } ShowHeroInfo(a, unsafe { hero::CUR_HERO }); a.pop_layer(); game::ShowGame(a); enable_main_shortcuts(a); }) .content(LinearLayout::vertical() .child(DummyView.fixed_size((45, 1))) .child(LinearLayout::horizontal() .child(Dialog::new() .title("Select a race") .content(LinearLayout::vertical() .child(DummyView.fixed_size((10, 1))) .child(LinearLayout::horizontal() .child(TextView::new(texts::RaceName[0])) .child(Checkbox::new() .on_change(|a, flag| { if flag { a.find_name::<Checkbox>("race1") .unwrap() .uncheck(); a.find_name::<Checkbox>("race2") .unwrap() .uncheck(); a.find_name::<Checkbox>("race3") .unwrap() .uncheck(); } else if !a.find_name::<Checkbox>("race1") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race2") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race3") .unwrap() .is_checked() { a.find_name::<Checkbox>("race0") .unwrap() .check(); } }) .with_name("race0") .fixed_size((10, 1)))) .child(LinearLayout::horizontal() .child(TextView::new(texts::RaceName[1])) .child(Checkbox::new() .on_change(|a, flag| { if flag { a.find_name::<Checkbox>("race0") .unwrap() .uncheck(); a.find_name::<Checkbox>("race2") .unwrap() .uncheck(); a.find_name::<Checkbox>("race3") .unwrap() .uncheck(); } else if !a.find_name::<Checkbox>("race0") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race2") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race3") .unwrap() .is_checked() { a.find_name::<Checkbox>("race1") .unwrap() .check(); } }) .with_name("race1") .fixed_size((10, 1)))) .child(LinearLayout::horizontal() .child(TextView::new(texts::RaceName[2])) .child(Checkbox::new() .on_change(|a, flag| { if flag { a.find_name::<Checkbox>("race0") .unwrap() .uncheck(); a.find_name::<Checkbox>("race1") .unwrap() .uncheck(); a.find_name::<Checkbox>("race3") .unwrap() .uncheck(); } else if !a.find_name::<Checkbox>("race0") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race1") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race3") .unwrap() .is_checked() { a.find_name::<Checkbox>("race2") .unwrap() .check(); } }) .with_name("race2") .fixed_size((10, 1)))) .child(LinearLayout::horizontal() .child(TextView::new(texts::RaceName[3])) .child(Checkbox::new() .on_change(|a, flag| { if flag { a.find_name::<Checkbox>("race0") .unwrap() .uncheck(); a.find_name::<Checkbox>("race1") .unwrap() .uncheck(); a.find_name::<Checkbox>("race2") .unwrap() .uncheck(); } else if !a.find_name::<Checkbox>("race0") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race1") .unwrap() .is_checked() && !a.find_name::<Checkbox>("race2") .unwrap() .is_checked() { a.find_name::<Checkbox>("race3") .unwrap() .check(); } }) .with_name("race3") .fixed_size((10, 1)))))) .child(DummyView.fixed_size((1, 10))) .child(Dialog::new() .title("Select a class") .content(LinearLayout::vertical() .child(DummyView.fixed_size((10, 1))) .child(LinearLayout::horizontal() .child(TextView::new(texts::ClassName[0])) .child(Checkbox::new() .on_change(|a, flag| { if flag { a.find_name::<Checkbox>("class1") .unwrap() .uncheck(); a.find_name::<Checkbox>("class2") .unwrap() .uncheck(); } else if !a.find_name::<Checkbox>("class1") .unwrap() .is_checked() && !a.find_name::<Checkbox>("class2") .unwrap() .is_checked() { a.find_name::<Checkbox>("class0") .unwrap() .check(); } }) .with_name("class0") .fixed_size((12, 1)))) .child(LinearLayout::horizontal() .child(TextView::new(texts::ClassName[1])) .child(Checkbox::new() .on_change(|a, flag| { if flag { a.find_name::<Checkbox>("class0") .unwrap() .uncheck(); a.find_name::<Checkbox>("class2") .unwrap() .uncheck(); } else if !a.find_name::<Checkbox>("class0") .unwrap() .is_checked() && !a.find_name::<Checkbox>("class2") .unwrap() .is_checked() { a.find_name::<Checkbox>("class1") .unwrap() .check(); } }) .with_name("class1") .fixed_size((12, 1)))) .child(LinearLayout::horizontal() .child(TextView::new(texts::ClassName[2])) .child(Checkbox::new() .on_change(|a, flag| { if flag { a.find_name::<Checkbox>("class0") .unwrap() .uncheck(); a.find_name::<Checkbox>("class1") .unwrap() .uncheck(); } else if !a.find_name::<Checkbox>("class0") .unwrap() .is_checked() && !a.find_name::<Checkbox>("class1") .unwrap() .is_checked() { a.find_name::<Checkbox>("class2") .unwrap() .check(); } }) .with_name("class2") .fixed_size((12, 1))))))) .fixed_size((46, 10)))); app.find_name::<Checkbox>("race0").unwrap().check(); app.find_name::<Checkbox>("class0").unwrap().check(); disable_current_shortcuts(app); } pub fn HeroDied(app: &mut Cursive) { ShowInfo(app, String::from(texts::STR_HERO_DIED)); } pub fn PostMortem(app: &mut Cursive) { disable_current_shortcuts(app); app.find_name::<TextView>("area") .unwrap() .set_content(texts::STR_GAME_OVER); //game::ShowGame(app); }
use std::sync::mpsc::{channel, sync_channel}; use std::sync::Mutex; use std::sync::Arc; use std::thread; use std::time::Duration; use std::error::Error; use std::string::FromUtf8Error; pub struct InMemoryData { payload: Vec<u8>, } impl InMemoryData { pub fn from(payload: &[u8]) -> InMemoryData { InMemoryData { payload : Vec::from(payload), } } pub fn len(&self) -> usize { self.payload.len() } pub fn payload_string(self) -> Result<String, FromUtf8Error> { String::from_utf8(self.payload) } } #[test] fn channel_test() { let (sender, receiver) = channel(); let message = Arc::new(Mutex::new("This is a secret message".to_string())); let msg_ref = message.clone(); thread::spawn(move || { thread::sleep(Duration::from_millis(100)); let mut message = message.lock().unwrap(); *message = String::from("Altered secret message"); let data = InMemoryData::from(message.as_bytes()); sender.send(data).unwrap(); }); let memory_message = receiver.recv().unwrap(); assert_eq!(memory_message.payload_string().unwrap(), *msg_ref.lock().unwrap()); }
use std::collections::HashMap; use std::fmt; use std::fmt::Debug; use std::fmt::Error; use std::fmt::Formatter; pub trait Fn { fn eval(&self, Vec<&Value>) -> Value; } #[derive(Debug)] struct AddFn {} impl Fn for AddFn { fn eval(&self, values: Vec<&Value>) -> Value { let a = values[0].get_float(); let b = values[1].get_float(); println!("Add {} + {}", a, b); Value::Float(a + b) } } #[derive(Debug)] struct MakeNumbersFn {} impl Fn for MakeNumbersFn { fn eval(&self, values: Vec<&Value>) -> Value { let mut results = Vec::new(); let s = values[0].get_str(); let sep = values[1].get_str(); for part in s.split(sep) { results.push(Value::String(String::from(part))); } Value::List(results) } } #[derive(Debug)] pub enum ParameterType { Int, Float, String, Color, } #[derive(Debug)] pub enum Value { Int(i32), Float(f32), String(String), Color(f32, f32, f32, f32), List(Vec<Value>), } impl Clone for Value { fn clone(&self) -> Value { match self { &Value::Int(v) => Value::Int(v), &Value::Float(v) => Value::Float(v), &Value::String(ref v) => Value::String(v.to_string()), &Value::Color(r, g, b, a) => Value::Color(r, g, b, a), &Value::List(ref v) => Value::List(v.clone()), } } } impl Value { pub fn get_int(&self) -> i32 { match self { &Value::Int(v) => v, &Value::Float(v) => v as i32, &Value::String(_) => 0, &Value::Color(_, _, _, _) => 0, &Value::List(_) => 0, } } pub fn get_float(&self) -> f32 { match self { &Value::Int(v) => v as f32, &Value::Float(v) => v, &Value::String(_) => 0., &Value::Color(_, _, _, _) => 0., &Value::List(_) => 0., } } pub fn get_str(&self) -> &str { // We can't convert the values for strings because we're returning string references, // and there is no sensible way to set the lifetimes. // If you want to see the value as a string, use the display trait match self { &Value::Int(_) => "", &Value::Float(_) => "", &Value::String(ref v) => v.as_str(), &Value::Color(_, _, _, _) => "", &Value::List(_) => "[]", } } } impl fmt::Display for Value { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { &Value::Int(v) => write!(f, "{}", v), &Value::Float(v) => write!(f, "{}", v), &Value::String(ref v) => write!(f, "{}", v), &Value::Color(r, g, b, a) => write!(f, "({}, {}, {}, {})", r, g, b, a), &Value::List(_) => write!(f, "[]"), } } } #[derive(Debug)] pub struct Parameter { pub name: String, pub kind: ParameterType, pub value: Value, } pub struct Function { pub name: String, pub op: Box<Fn>, pub parameters: Vec<Parameter>, } impl Debug for Function { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { write!(f, "Function {:?}", self.name) } } #[derive(Debug)] pub struct FunctionRegistry { pub functions: HashMap<String, Function>, } #[derive(Debug)] pub struct Node { pub name: String, pub function: String, pub values: HashMap<String, Value>, } impl PartialEq for Node { fn eq(&self, other: &Node) -> bool { self.name == other.name } } #[derive(Debug)] pub struct Connection { pub output: String, pub input: String, pub port: String, } #[derive(Debug)] pub struct Network { pub name: String, pub nodes: Vec<Node>, pub connections: Vec<Connection>, } #[derive(Debug)] pub struct Context {} impl Context { pub fn new() -> Context { Context {} } pub fn find_node<'a>(&self, network: &'a Network, name: &str) -> &'a Node { for node in &network.nodes { if node.name == name { return &node; } } panic!("No node found"); } pub fn find_connections_by_output<'a>( &self, network: &'a Network, node: &'a Node, ) -> Vec<&'a Connection> { let mut results = Vec::new(); for conn in &network.connections { if conn.output.as_str() == node.name { results.push(conn); } } results } pub fn find_connections_by_input<'a>( &self, network: &'a Network, node: &'a Node, ) -> Vec<&'a Connection> { let mut results = Vec::new(); for conn in &network.connections { if conn.input.as_str() == node.name { results.push(conn); } } results } pub fn find_connection_by_input<'a>( &self, network: &'a Network, node: &'a Node, param: &str, ) -> Option<&'a Connection> { for conn in &network.connections { if conn.input.as_str() == node.name && conn.port.as_str() == param { return Some(conn); } } None } pub fn network_evaluation_order<'a>( &self, network: &'a Network, node: &'a Node, order: &mut Vec<&'a Node>, ) { let conns = self.find_connections_by_input(network, node); for conn in conns { let output_node = self.find_node(network, conn.output.as_str()); println!("on {:?}", output_node); self.network_evaluation_order(network, output_node, order); } if !order.contains(&node) { order.push(node); } } pub fn render( &self, function_registry: &FunctionRegistry, network: &Network, node: &Node, ) -> Value { let mut result_map: HashMap<&str, Value> = HashMap::new(); let mut order: Vec<&Node> = Vec::new(); self.network_evaluation_order(network, node, &mut order); println!("ORDER: {:?}", order); for node in order { let result = { let function = &function_registry.functions[node.function.as_str()]; let mut arg_lists: Vec<&Value> = Vec::new(); arg_lists.reserve(function.parameters.len()); for param in &function.parameters { // Check if it's connected let conn = self.find_connection_by_input(network, node, param.name.as_str()); if conn.is_some() { let value = &result_map[conn.unwrap().output.as_str()]; arg_lists.push(value); } else { let value = match node.values.get(param.name.as_str()) { Some(v) => &v, None => &param.value, }; arg_lists.push(value); } } println!("fn {:?} args {:?}", function, arg_lists); function.op.eval(arg_lists) }; println!("Node {:?} Result {:?}", node.name, result); result_map.insert(node.name.as_str(), result); } result_map.remove(node.name.as_str()).unwrap() } } #[cfg(test)] mod tests { #[test] fn make_network() { let a_param = Parameter { name: String::from("a"), kind: ParameterType::Float, value: Value::Float(0.), }; let b_param = Parameter { name: String::from("b"), kind: ParameterType::Float, value: Value::Float(0.), }; let add_parameters = vec![a_param, b_param]; let add_fn = Function { name: String::from("add"), op: Box::new(AddFn {}), parameters: add_parameters, }; let s_param = Parameter { name: String::from("s"), kind: ParameterType::String, value: Value::String(String::from("1;2;3")), }; let sep_param = Parameter { name: String::from("sep"), kind: ParameterType::String, value: Value::String(String::from(";")), }; let make_numbers_parameters = vec![s_param, sep_param]; let make_numbers_fn = Function { name: String::from("make_numbers"), op: Box::new(AddFn {}), parameters: make_numbers_parameters, }; let mut function_map = HashMap::new(); function_map.insert(add_fn.name.clone(), add_fn); function_map.insert(make_numbers_fn.name.clone(), make_numbers_fn); let function_registry = FunctionRegistry { functions: function_map, }; let mut add1_vals = HashMap::new(); add1_vals.insert(String::from("a"), Value::Float(3.)); add1_vals.insert(String::from("b"), Value::Float(5.)); let add1 = Node { name: String::from("add1"), function: String::from("add"), values: add1_vals, }; println!("add1 node: {:?}", add1); let mut make_numbers1_vals = HashMap::new(); make_numbers1_vals.insert(String::from("s"), Value::String(String::from("11;22;33"))); let make_numbers1 = Node { name: String::from("make_numbers1"), function: String::from("make_numbers"), values: make_numbers1_vals, }; let c1 = Connection { output: String::from("make_numbers1"), input: String::from("add1"), port: String::from("a"), }; let net = Network { name: String::from("main"), nodes: vec![make_numbers1, add1], connections: vec![c1], }; let ctx = Context {}; let result = ctx.render(&function_registry, &net, &net.nodes[1]); let result2 = result.clone(); println!("Result: {:?}", result); println!("Result.to_string: {}", result.to_string()); } }
//! Code generation for [GraphQL scalar][1]. //! //! [1]: https://spec.graphql.org/October2021#sec-Scalars use proc_macro2::{Literal, TokenStream}; use quote::{format_ident, quote, ToTokens, TokenStreamExt}; use syn::{ ext::IdentExt as _, parse::{Parse, ParseStream}, parse_quote, spanned::Spanned as _, token, visit_mut::VisitMut, }; use url::Url; use crate::common::{ filter_attrs, parse::{ attr::{err, OptionExt as _}, ParseBufferExt as _, }, scalar, Description, SpanContainer, }; pub mod attr; pub mod derive; /// Available arguments behind `#[graphql]`/`#[graphql_scalar]` attributes when /// generating code for [GraphQL scalar][1]. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars #[derive(Debug, Default)] struct Attr { /// Name of this [GraphQL scalar][1] in GraphQL schema. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars name: Option<SpanContainer<String>>, /// Description of this [GraphQL scalar][1] to put into GraphQL schema. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars description: Option<SpanContainer<Description>>, /// Spec [`Url`] of this [GraphQL scalar][1] to put into GraphQL schema. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars specified_by_url: Option<SpanContainer<Url>>, /// Explicitly specified type (or type parameter with its bounds) of /// [`ScalarValue`] to use for resolving this [GraphQL scalar][1] type with. /// /// If [`None`], then generated code will be generic over any /// [`ScalarValue`] type, which, in turn, requires all [scalar][1] fields to /// be generic over any [`ScalarValue`] type too. That's why this type /// should be specified only if one of the variants implements /// [`GraphQLType`] in a non-generic way over [`ScalarValue`] type. /// /// [`GraphQLType`]: juniper::GraphQLType /// [`ScalarValue`]: juniper::ScalarValue /// [1]: https://spec.graphql.org/October2021#sec-Scalars scalar: Option<SpanContainer<scalar::AttrValue>>, /// Explicitly specified function to be used as /// [`ToInputValue::to_input_value`] implementation. /// /// [`ToInputValue::to_input_value`]: juniper::ToInputValue::to_input_value to_output: Option<SpanContainer<syn::ExprPath>>, /// Explicitly specified function to be used as /// [`FromInputValue::from_input_value`] implementation. /// /// [`FromInputValue::from_input_value`]: juniper::FromInputValue::from_input_value from_input: Option<SpanContainer<syn::ExprPath>>, /// Explicitly specified resolver to be used as /// [`ParseScalarValue::from_str`] implementation. /// /// [`ParseScalarValue::from_str`]: juniper::ParseScalarValue::from_str parse_token: Option<SpanContainer<ParseToken>>, /// Explicitly specified module with all custom resolvers for /// [`Self::to_output`], [`Self::from_input`] and [`Self::parse_token`]. with: Option<SpanContainer<syn::ExprPath>>, /// Explicit where clause added to [`syn::WhereClause`]. where_clause: Option<SpanContainer<Vec<syn::WherePredicate>>>, /// Indicator for single-field structs allowing to delegate implmemntations /// of non-provided resolvers to that field. transparent: bool, } impl Parse for Attr { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let mut out = Self::default(); while !input.is_empty() { let ident = input.parse_any_ident()?; match ident.to_string().as_str() { "name" => { input.parse::<token::Eq>()?; let name = input.parse::<syn::LitStr>()?; out.name .replace(SpanContainer::new( ident.span(), Some(name.span()), name.value(), )) .none_or_else(|_| err::dup_arg(&ident))? } "desc" | "description" => { input.parse::<token::Eq>()?; let desc = input.parse::<Description>()?; out.description .replace(SpanContainer::new(ident.span(), Some(desc.span()), desc)) .none_or_else(|_| err::dup_arg(&ident))? } "specified_by_url" => { input.parse::<token::Eq>()?; let lit = input.parse::<syn::LitStr>()?; let url = lit.value().parse::<Url>().map_err(|err| { syn::Error::new(lit.span(), format!("Invalid URL: {err}")) })?; out.specified_by_url .replace(SpanContainer::new(ident.span(), Some(lit.span()), url)) .none_or_else(|_| err::dup_arg(&ident))? } "scalar" | "Scalar" | "ScalarValue" => { input.parse::<token::Eq>()?; let scl = input.parse::<scalar::AttrValue>()?; out.scalar .replace(SpanContainer::new(ident.span(), Some(scl.span()), scl)) .none_or_else(|_| err::dup_arg(&ident))? } "to_output_with" => { input.parse::<token::Eq>()?; let scl = input.parse::<syn::ExprPath>()?; out.to_output .replace(SpanContainer::new(ident.span(), Some(scl.span()), scl)) .none_or_else(|_| err::dup_arg(&ident))? } "from_input_with" => { input.parse::<token::Eq>()?; let scl = input.parse::<syn::ExprPath>()?; out.from_input .replace(SpanContainer::new(ident.span(), Some(scl.span()), scl)) .none_or_else(|_| err::dup_arg(&ident))? } "parse_token_with" => { input.parse::<token::Eq>()?; let scl = input.parse::<syn::ExprPath>()?; out.parse_token .replace(SpanContainer::new( ident.span(), Some(scl.span()), ParseToken::Custom(scl), )) .none_or_else(|_| err::dup_arg(&ident))? } "parse_token" => { let types; let _ = syn::parenthesized!(types in input); let parsed_types = types.parse_terminated::<_, token::Comma>(syn::Type::parse)?; if parsed_types.is_empty() { return Err(syn::Error::new(ident.span(), "expected at least 1 type.")); } out.parse_token .replace(SpanContainer::new( ident.span(), Some(parsed_types.span()), ParseToken::Delegated(parsed_types.into_iter().collect()), )) .none_or_else(|_| err::dup_arg(&ident))? } "with" => { input.parse::<token::Eq>()?; let scl = input.parse::<syn::ExprPath>()?; out.with .replace(SpanContainer::new(ident.span(), Some(scl.span()), scl)) .none_or_else(|_| err::dup_arg(&ident))? } "where" => { let (span, parsed_predicates) = { let predicates; let _ = syn::parenthesized!(predicates in input); let parsed_predicates = predicates .parse_terminated::<_, token::Comma>(syn::WherePredicate::parse)?; if parsed_predicates.is_empty() { return Err(syn::Error::new( ident.span(), "expected at least 1 where predicate", )); } ( parsed_predicates.span(), parsed_predicates.into_iter().collect(), ) }; out.where_clause .replace(SpanContainer::new( ident.span(), Some(span), parsed_predicates, )) .none_or_else(|_| err::dup_arg(&ident))? } "transparent" => { out.transparent = true; } name => { return Err(err::unknown_arg(&ident, name)); } } input.try_parse::<token::Comma>()?; } Ok(out) } } impl Attr { /// Tries to merge two [`Attr`]s into a single one, reporting about /// duplicates, if any. fn try_merge(self, mut another: Self) -> syn::Result<Self> { Ok(Self { name: try_merge_opt!(name: self, another), description: try_merge_opt!(description: self, another), specified_by_url: try_merge_opt!(specified_by_url: self, another), scalar: try_merge_opt!(scalar: self, another), to_output: try_merge_opt!(to_output: self, another), from_input: try_merge_opt!(from_input: self, another), parse_token: try_merge_opt!(parse_token: self, another), with: try_merge_opt!(with: self, another), where_clause: try_merge_opt!(where_clause: self, another), transparent: self.transparent || another.transparent, }) } /// Parses [`Attr`] from the given multiple `name`d [`syn::Attribute`]s /// placed on a trait definition. fn from_attrs(name: &str, attrs: &[syn::Attribute]) -> syn::Result<Self> { let mut attr = filter_attrs(name, attrs) .map(|attr| attr.parse_args()) .try_fold(Self::default(), |prev, curr| prev.try_merge(curr?))?; if attr.description.is_none() { attr.description = Description::parse_from_doc_attrs(attrs)?; } Ok(attr) } } /// [`syn::Type`] in case of `#[graphql_scalar]` or [`syn::Ident`] in case of /// `#[derive(GraphQLScalar)]`. #[derive(Clone)] enum TypeOrIdent { /// [`syn::Type`]. Type(Box<syn::Type>), /// [`syn::Ident`]. Ident(syn::Ident), } /// Definition of [GraphQL scalar][1] for code generation. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars struct Definition { /// Name of this [GraphQL scalar][1] in GraphQL schema. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars name: String, /// [`TypeOrIdent`] of this [GraphQL scalar][1] in GraphQL schema. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars ty: TypeOrIdent, /// Additional [`Self::generics`] [`syn::WhereClause`] predicates. where_clause: Vec<syn::WherePredicate>, /// Generics of the Rust type that this [GraphQL scalar][1] is implemented /// for. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars generics: syn::Generics, /// [`GraphQLScalarMethods`] representing [GraphQL scalar][1]. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars methods: Methods, /// Description of this [GraphQL scalar][1] to put into GraphQL schema. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars description: Option<Description>, /// Spec [`Url`] of this [GraphQL scalar][1] to put into GraphQL schema. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars specified_by_url: Option<Url>, /// [`ScalarValue`] parametrization to generate [`GraphQLType`] /// implementation with for this [GraphQL scalar][1]. /// /// [`GraphQLType`]: juniper::GraphQLType /// [`ScalarValue`]: juniper::ScalarValue /// [1]: https://spec.graphql.org/October2021#sec-Scalars scalar: scalar::Type, } impl ToTokens for Definition { fn to_tokens(&self, into: &mut TokenStream) { self.impl_output_and_input_type_tokens().to_tokens(into); self.impl_type_tokens().to_tokens(into); self.impl_value_tokens().to_tokens(into); self.impl_value_async_tokens().to_tokens(into); self.impl_to_input_value_tokens().to_tokens(into); self.impl_from_input_value_tokens().to_tokens(into); self.impl_parse_scalar_value_tokens().to_tokens(into); self.impl_reflection_traits_tokens().to_tokens(into); } } impl Definition { /// Returns generated code implementing [`marker::IsInputType`] and /// [`marker::IsOutputType`] trait for this [GraphQL scalar][1]. /// /// [`marker::IsInputType`]: juniper::marker::IsInputType /// [`marker::IsOutputType`]: juniper::marker::IsOutputType /// [1]: https://spec.graphql.org/October2021#sec-Scalars #[must_use] fn impl_output_and_input_type_tokens(&self) -> TokenStream { let scalar = &self.scalar; let (ty, generics) = self.impl_self_and_generics(false); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::marker::IsInputType<#scalar> for #ty #where_clause { } #[automatically_derived] impl #impl_gens ::juniper::marker::IsOutputType<#scalar> for #ty #where_clause { } } } /// Returns generated code implementing [`GraphQLType`] trait for this /// [GraphQL scalar][1]. /// /// [`GraphQLType`]: juniper::GraphQLType /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn impl_type_tokens(&self) -> TokenStream { let scalar = &self.scalar; let name = &self.name; let description = &self.description; let specified_by_url = self.specified_by_url.as_ref().map(|url| { let url_lit = url.as_str(); quote! { .specified_by_url(#url_lit) } }); let (ty, generics) = self.impl_self_and_generics(false); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::GraphQLType<#scalar> for #ty #where_clause { fn name(_: &Self::TypeInfo) -> Option<&'static str> { Some(#name) } fn meta<'r>( info: &Self::TypeInfo, registry: &mut ::juniper::Registry<'r, #scalar>, ) -> ::juniper::meta::MetaType<'r, #scalar> where #scalar: 'r, { registry.build_scalar_type::<Self>(info) #description #specified_by_url .into_meta() } } } } /// Returns generated code implementing [`GraphQLValue`] trait for this /// [GraphQL scalar][1]. /// /// [`GraphQLValue`]: juniper::GraphQLValue /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn impl_value_tokens(&self) -> TokenStream { let scalar = &self.scalar; let resolve = self.methods.expand_resolve(scalar); let (ty, generics) = self.impl_self_and_generics(false); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::GraphQLValue<#scalar> for #ty #where_clause { type Context = (); type TypeInfo = (); fn type_name<'i>(&self, info: &'i Self::TypeInfo) -> Option<&'i str> { <Self as ::juniper::GraphQLType<#scalar>>::name(info) } fn resolve( &self, info: &(), selection: Option<&[::juniper::Selection<'_, #scalar>]>, executor: &::juniper::Executor<'_, '_, Self::Context, #scalar>, ) -> ::juniper::ExecutionResult<#scalar> { #resolve } } } } /// Returns generated code implementing [`GraphQLValueAsync`] trait for this /// [GraphQL scalar][1]. /// /// [`GraphQLValueAsync`]: juniper::GraphQLValueAsync /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn impl_value_async_tokens(&self) -> TokenStream { let scalar = &self.scalar; let (ty, generics) = self.impl_self_and_generics(true); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::GraphQLValueAsync<#scalar> for #ty #where_clause { fn resolve_async<'b>( &'b self, info: &'b Self::TypeInfo, selection_set: Option<&'b [::juniper::Selection<'_, #scalar>]>, executor: &'b ::juniper::Executor<'_, '_, Self::Context, #scalar>, ) -> ::juniper::BoxFuture<'b, ::juniper::ExecutionResult<#scalar>> { use ::juniper::futures::future; let v = ::juniper::GraphQLValue::resolve(self, info, selection_set, executor); Box::pin(future::ready(v)) } } } } /// Returns generated code implementing [`InputValue`] trait for this /// [GraphQL scalar][1]. /// /// [`InputValue`]: juniper::InputValue /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn impl_to_input_value_tokens(&self) -> TokenStream { let scalar = &self.scalar; let to_input_value = self.methods.expand_to_input_value(scalar); let (ty, generics) = self.impl_self_and_generics(false); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::ToInputValue<#scalar> for #ty #where_clause { fn to_input_value(&self) -> ::juniper::InputValue<#scalar> { #to_input_value } } } } /// Returns generated code implementing [`FromInputValue`] trait for this /// [GraphQL scalar][1]. /// /// [`FromInputValue`]: juniper::FromInputValue /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn impl_from_input_value_tokens(&self) -> TokenStream { let scalar = &self.scalar; let from_input_value = self.methods.expand_from_input_value(scalar); let (ty, generics) = self.impl_self_and_generics(false); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::FromInputValue<#scalar> for #ty #where_clause { type Error = ::juniper::executor::FieldError<#scalar>; fn from_input_value(input: &::juniper::InputValue<#scalar>) -> Result<Self, Self::Error> { #from_input_value .map_err(::juniper::executor::IntoFieldError::<#scalar>::into_field_error) } } } } /// Returns generated code implementing [`ParseScalarValue`] trait for this /// [GraphQL scalar][1]. /// /// [`ParseScalarValue`]: juniper::ParseScalarValue /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn impl_parse_scalar_value_tokens(&self) -> TokenStream { let scalar = &self.scalar; let from_str = self.methods.expand_parse_scalar_value(scalar); let (ty, generics) = self.impl_self_and_generics(false); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::ParseScalarValue<#scalar> for #ty #where_clause { fn from_str( token: ::juniper::parser::ScalarToken<'_>, ) -> ::juniper::ParseScalarResult<#scalar> { #from_str } } } } /// Returns generated code implementing [`BaseType`], [`BaseSubTypes`] and /// [`WrappedType`] traits for this [GraphQL scalar][1]. /// /// [`BaseSubTypes`]: juniper::macros::reflection::BaseSubTypes /// [`BaseType`]: juniper::macros::reflection::BaseType /// [`WrappedType`]: juniper::macros::reflection::WrappedType /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn impl_reflection_traits_tokens(&self) -> TokenStream { let scalar = &self.scalar; let name = &self.name; let (ty, generics) = self.impl_self_and_generics(false); let (impl_gens, _, where_clause) = generics.split_for_impl(); quote! { #[automatically_derived] impl #impl_gens ::juniper::macros::reflect::BaseType<#scalar> for #ty #where_clause { const NAME: ::juniper::macros::reflect::Type = #name; } #[automatically_derived] impl #impl_gens ::juniper::macros::reflect::BaseSubTypes<#scalar> for #ty #where_clause { const NAMES: ::juniper::macros::reflect::Types = &[<Self as ::juniper::macros::reflect::BaseType<#scalar>>::NAME]; } #[automatically_derived] impl #impl_gens ::juniper::macros::reflect::WrappedType<#scalar> for #ty #where_clause { const VALUE: ::juniper::macros::reflect::WrappedValue = 1; } } } /// Returns prepared self type and [`syn::Generics`] for [`GraphQLType`] /// trait (and similar) implementation. /// /// If `for_async` is `true`, then additional predicates are added to suit /// the [`GraphQLAsyncValue`] trait (and similar) requirements. /// /// [`GraphQLAsyncValue`]: juniper::GraphQLAsyncValue /// [`GraphQLType`]: juniper::GraphQLType #[must_use] fn impl_self_and_generics(&self, for_async: bool) -> (TokenStream, syn::Generics) { let mut generics = self.generics.clone(); let ty = match &self.ty { TypeOrIdent::Type(ty) => ty.into_token_stream(), TypeOrIdent::Ident(ident) => { let (_, ty_gen, _) = self.generics.split_for_impl(); quote! { #ident #ty_gen } } }; if !self.where_clause.is_empty() { generics .make_where_clause() .predicates .extend(self.where_clause.clone()) } let scalar = &self.scalar; if scalar.is_implicit_generic() { generics.params.push(parse_quote! { #scalar }); } if scalar.is_generic() { generics .make_where_clause() .predicates .push(parse_quote! { #scalar: ::juniper::ScalarValue }); } if let Some(bound) = scalar.bounds() { generics.make_where_clause().predicates.push(bound); } if for_async { let self_ty = if self.generics.lifetimes().next().is_some() { let mut generics = self.generics.clone(); ModifyLifetimes.visit_generics_mut(&mut generics); let lifetimes = generics.lifetimes().map(|lt| &lt.lifetime); let ty = match self.ty.clone() { TypeOrIdent::Type(mut ty) => { ModifyLifetimes.visit_type_mut(&mut ty); ty.into_token_stream() } TypeOrIdent::Ident(ident) => { let (_, ty_gens, _) = generics.split_for_impl(); quote! { #ident #ty_gens } } }; quote! { for<#( #lifetimes ),*> #ty } } else { quote! { Self } }; generics .make_where_clause() .predicates .push(parse_quote! { #self_ty: Sync }); if scalar.is_generic() { generics .make_where_clause() .predicates .push(parse_quote! { #scalar: Send + Sync }); } } (ty, generics) } } /// Adds `__fa__` prefix to all lifetimes to avoid "lifetime name `'a` shadows a /// lifetime name that is already in scope" error. struct ModifyLifetimes; impl VisitMut for ModifyLifetimes { fn visit_lifetime_mut(&mut self, lf: &mut syn::Lifetime) { lf.ident = format_ident!("__fa__{}", lf.ident.unraw()); } } /// Methods representing [GraphQL scalar][1]. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars enum Methods { /// [GraphQL scalar][1] represented with only custom resolvers. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars Custom { /// Function provided with `#[graphql(to_output_with = ...)]`. to_output: syn::ExprPath, /// Function provided with `#[graphql(from_input_with = ...)]`. from_input: syn::ExprPath, /// [`ParseToken`] provided with `#[graphql(parse_token_with = ...)]` /// or `#[graphql(parse_token(...))]`. parse_token: ParseToken, }, /// [GraphQL scalar][1] maybe partially represented with custom resolver. /// Other methods are used from [`Field`]. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars Delegated { /// Function provided with `#[graphql(to_output_with = ...)]`. to_output: Option<syn::ExprPath>, /// Function provided with `#[graphql(from_input_with = ...)]`. from_input: Option<syn::ExprPath>, /// [`ParseToken`] provided with `#[graphql(parse_token_with = ...)]` /// or `#[graphql(parse_token(...))]`. parse_token: Option<ParseToken>, /// [`Field`] to resolve not provided methods. field: Box<Field>, }, } impl Methods { /// Expands [`GraphQLValue::resolve`] method. /// /// [`GraphQLValue::resolve`]: juniper::GraphQLValue::resolve fn expand_resolve(&self, scalar: &scalar::Type) -> TokenStream { match self { Self::Custom { to_output, .. } | Self::Delegated { to_output: Some(to_output), .. } => { quote! { Ok(#to_output(self)) } } Self::Delegated { field, .. } => { quote! { ::juniper::GraphQLValue::<#scalar>::resolve( &self.#field, info, selection, executor, ) } } } } /// Expands [`ToInputValue::to_input_value`] method. /// /// [`ToInputValue::to_input_value`]: juniper::ToInputValue::to_input_value fn expand_to_input_value(&self, scalar: &scalar::Type) -> TokenStream { match self { Self::Custom { to_output, .. } | Self::Delegated { to_output: Some(to_output), .. } => { quote! { let v = #to_output(self); ::juniper::ToInputValue::to_input_value(&v) } } Self::Delegated { field, .. } => { quote! { ::juniper::ToInputValue::<#scalar>::to_input_value(&self.#field) } } } } /// Expands [`FromInputValue::from_input_value`][1] method. /// /// [1]: juniper::FromInputValue::from_input_value fn expand_from_input_value(&self, scalar: &scalar::Type) -> TokenStream { match self { Self::Custom { from_input, .. } | Self::Delegated { from_input: Some(from_input), .. } => { quote! { #from_input(input) } } Self::Delegated { field, .. } => { let field_ty = field.ty(); let self_constructor = field.closure_constructor(); quote! { <#field_ty as ::juniper::FromInputValue<#scalar>>::from_input_value(input) .map(#self_constructor) } } } } /// Expands [`ParseScalarValue::from_str`] method. /// /// [`ParseScalarValue::from_str`]: juniper::ParseScalarValue::from_str fn expand_parse_scalar_value(&self, scalar: &scalar::Type) -> TokenStream { match self { Self::Custom { parse_token, .. } | Self::Delegated { parse_token: Some(parse_token), .. } => { let parse_token = parse_token.expand_from_str(scalar); quote! { #parse_token } } Self::Delegated { field, .. } => { let field_ty = field.ty(); quote! { <#field_ty as ::juniper::ParseScalarValue<#scalar>>::from_str(token) } } } } } /// Representation of [`ParseScalarValue::from_str`] method. /// /// [`ParseScalarValue::from_str`]: juniper::ParseScalarValue::from_str #[derive(Clone, Debug)] enum ParseToken { /// Custom method. Custom(syn::ExprPath), /// Tries to parse using [`syn::Type`]s [`ParseScalarValue`] impls until /// first success. /// /// [`ParseScalarValue`]: juniper::ParseScalarValue Delegated(Vec<syn::Type>), } impl ParseToken { /// Expands [`ParseScalarValue::from_str`] method. /// /// [`ParseScalarValue::from_str`]: juniper::ParseScalarValue::from_str fn expand_from_str(&self, scalar: &scalar::Type) -> TokenStream { match self { Self::Custom(parse_token) => { quote! { #parse_token(token) } } Self::Delegated(delegated) => delegated .iter() .fold(None, |acc, ty| { acc.map_or_else( || Some(quote! { <#ty as ::juniper::ParseScalarValue<#scalar>>::from_str(token) }), |prev| { Some(quote! { #prev.or_else(|_| { <#ty as ::juniper::ParseScalarValue<#scalar>>::from_str(token) }) }) } ) }) .unwrap_or_default(), } } } /// Struct field to resolve not provided methods. enum Field { /// Named [`Field`]. Named(syn::Field), /// Unnamed [`Field`]. Unnamed(syn::Field), } impl ToTokens for Field { fn to_tokens(&self, tokens: &mut TokenStream) { match self { Self::Named(f) => f.ident.to_tokens(tokens), Self::Unnamed(_) => tokens.append(Literal::u8_unsuffixed(0)), } } } impl Field { /// [`syn::Type`] of this [`Field`]. fn ty(&self) -> &syn::Type { match self { Self::Named(f) | Self::Unnamed(f) => &f.ty, } } /// Closure to construct [GraphQL scalar][1] struct from [`Field`]. /// /// [1]: https://spec.graphql.org/October2021#sec-Scalars fn closure_constructor(&self) -> TokenStream { match self { Field::Named(syn::Field { ident, .. }) => { quote! { |v| Self { #ident: v } } } Field::Unnamed(_) => quote! { Self }, } } }
use cvar::{INode, IVisit}; pub use soldank_shared::cvars::*; #[derive(Default)] pub struct Config { pub server: ServerInfo, pub net: NetConfig, pub phys: Physics, } impl IVisit for Config { fn visit(&mut self, f: &mut dyn FnMut(&mut dyn INode)) { f(&mut cvar::List("server", &mut self.server)); f(&mut cvar::List("net", &mut self.net)); f(&mut cvar::List("phys", &mut self.phys)); } } pub struct ServerInfo { pub motd: String, } fn default_motd() -> String { format!( "{} {} - {}", clap::crate_name!(), clap::crate_version!(), clap::crate_description!() ) } impl Default for ServerInfo { fn default() -> Self { Self { motd: default_motd(), } } } impl IVisit for ServerInfo { fn visit(&mut self, f: &mut dyn FnMut(&mut dyn INode)) { f(&mut cvar::Property("motd", &mut self.motd, default_motd())); } }
use input_i_scanner::InputIScanner; fn main() { let stdin = std::io::stdin(); let mut _i_i = InputIScanner::from(stdin.lock()); macro_rules! scan { (($($t: ty),+)) => { ($(scan!($t)),+) }; ($t: ty) => { _i_i.scan::<$t>() as $t }; (($($t: ty),+); $n: expr) => { std::iter::repeat_with(|| scan!(($($t),+))).take($n).collect::<Vec<_>>() }; ($t: ty; $n: expr) => { std::iter::repeat_with(|| scan!($t)).take($n).collect::<Vec<_>>() }; } let t = scan!(u64); let ans = f(f(f(t) + t) + f(f(t))); println!("{}", ans); } fn f(x: u64) -> u64 { x * x + 2 * x + 3 }
use crate::{FileTypeIdentifier, SegmentEntry, SegmentIdBytes, SequencedWalOp}; use byteorder::{BigEndian, ReadBytesExt}; use crc32fast::Hasher; use generated_types::influxdata::iox::wal::v1::WalOpBatch as ProtoWalOpBatch; use prost::Message; use snafu::prelude::*; use snap::read::FrameDecoder; use std::{ fs::File, io::{self, BufReader, Read}, path::{Path, PathBuf}, }; #[derive(Debug)] pub struct ClosedSegmentFileReader<R>(R); impl ClosedSegmentFileReader<BufReader<File>> { pub fn from_path(path: impl AsRef<Path>) -> Result<Self> { let path = path.as_ref(); let f = File::open(path).context(UnableToOpenFileSnafu { path })?; let f = BufReader::new(f); Ok(Self::new(f)) } } impl<R> ClosedSegmentFileReader<R> where R: Read, { pub fn new(f: R) -> Self { Self(f) } fn read_array<const N: usize>(&mut self) -> Result<[u8; N]> { let mut data = [0u8; N]; self.0 .read_exact(&mut data) .context(UnableToReadArraySnafu { length: N })?; Ok(data) } pub fn read_header(&mut self) -> Result<(FileTypeIdentifier, SegmentIdBytes)> { Ok((self.read_array()?, self.read_array()?)) } fn one_entry(&mut self) -> Result<Option<SegmentEntry>> { let expected_checksum = match self.0.read_u32::<BigEndian>() { Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => return Ok(None), other => other.context(UnableToReadChecksumSnafu)?, }; let expected_len = self .0 .read_u32::<BigEndian>() .context(UnableToReadLengthSnafu)? .into(); let compressed_read = self.0.by_ref().take(expected_len); let hashing_read = CrcReader::new(compressed_read); let mut decompressing_read = FrameDecoder::new(hashing_read); let mut data = Vec::with_capacity(100); decompressing_read .read_to_end(&mut data) .context(UnableToReadDataSnafu)?; let (actual_compressed_len, actual_checksum) = decompressing_read.get_mut().checksum(); ensure!( expected_len == actual_compressed_len, LengthMismatchSnafu { expected: expected_len, actual: actual_compressed_len } ); ensure!( expected_checksum == actual_checksum, ChecksumMismatchSnafu { expected: expected_checksum, actual: actual_checksum } ); Ok(Some(SegmentEntry { data })) } pub fn next_batch(&mut self) -> Result<Option<Vec<SequencedWalOp>>> { if let Some(entry) = self.one_entry()? { let decoded = ProtoWalOpBatch::decode(&*entry.data).context(UnableToDeserializeDataSnafu)?; let mut ops = Vec::with_capacity(decoded.ops.len()); for op in decoded.ops { ops.push(op.try_into().context(InvalidMessageSnafu)?); } return Ok(Some(ops)); } Ok(None) } } struct CrcReader<R> { inner: R, hasher: Hasher, bytes_seen: u64, } impl<R> CrcReader<R> { fn new(inner: R) -> Self { let hasher = Hasher::default(); Self { inner, hasher, bytes_seen: 0, } } fn checksum(&mut self) -> (u64, u32) { // FIXME: If rust-snappy added an `into_inner`, we should // take `self` by value ( std::mem::take(&mut self.bytes_seen), std::mem::take(&mut self.hasher).finalize(), ) } } impl<R> Read for CrcReader<R> where R: Read, { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let len = self.inner.read(buf)?; let len_u64 = u64::try_from(len).expect("Only designed to run on 32-bit systems or higher"); self.bytes_seen += len_u64; self.hasher.update(&buf[..len]); Ok(len) } } #[derive(Debug, Snafu)] pub enum Error { UnableToOpenFile { source: io::Error, path: PathBuf, }, UnableToReadArray { source: io::Error, length: usize, }, UnableToReadChecksum { source: io::Error, }, UnableToReadLength { source: io::Error, }, UnableToReadData { source: io::Error, }, LengthMismatch { expected: u64, actual: u64, }, ChecksumMismatch { expected: u32, actual: u32, }, UnableToDecompressData { source: snap::Error, }, UnableToDeserializeData { source: prost::DecodeError, }, InvalidMessage { source: generated_types::google::FieldViolation, }, } pub type Result<T, E = Error> = std::result::Result<T, E>; #[cfg(test)] mod tests { use super::*; use crate::{SegmentId, FILE_TYPE_IDENTIFIER}; use byteorder::WriteBytesExt; use std::io::Write; use test_helpers::assert_error; #[test] fn successful_read_no_entries() { let segment_file = FakeSegmentFile::new(); let data = segment_file.data(); let mut reader = ClosedSegmentFileReader::new(data.as_slice()); let (file_type_id, uuid) = reader.read_header().unwrap(); assert_eq!(&file_type_id, FILE_TYPE_IDENTIFIER); assert_eq!(uuid, segment_file.id.as_bytes()); let entry = reader.one_entry().unwrap(); assert!(entry.is_none()); } #[test] fn successful_read_with_entries() { let mut segment_file = FakeSegmentFile::new(); let entry_input_1 = FakeSegmentEntry::new(b"hello"); segment_file.add_entry(entry_input_1.clone()); let entry_input_2 = FakeSegmentEntry::new(b"goodbye"); segment_file.add_entry(entry_input_2.clone()); let data = segment_file.data(); let mut reader = ClosedSegmentFileReader::new(data.as_slice()); let (file_type_id, uuid) = reader.read_header().unwrap(); assert_eq!(&file_type_id, FILE_TYPE_IDENTIFIER); assert_eq!(uuid, segment_file.id.as_bytes()); let entry_output_1 = reader.one_entry().unwrap().unwrap(); let expected_1 = SegmentEntry::from(&entry_input_1); assert_eq!(entry_output_1.data, expected_1.data); let entry_output_2 = reader.one_entry().unwrap().unwrap(); let expected_2 = SegmentEntry::from(&entry_input_2); assert_eq!(entry_output_2.data, expected_2.data); let entry = reader.one_entry().unwrap(); assert!(entry.is_none()); } #[test] fn unsuccessful_read_too_short_len() { let mut segment_file = FakeSegmentFile::new(); let bad_entry_input = FakeSegmentEntry::new(b"hello"); let good_length = bad_entry_input.compressed_len(); let bad_entry_input = bad_entry_input.with_compressed_len(good_length - 1); segment_file.add_entry(bad_entry_input); let good_entry_input = FakeSegmentEntry::new(b"goodbye"); segment_file.add_entry(good_entry_input); let data = segment_file.data(); let mut reader = ClosedSegmentFileReader::new(data.as_slice()); let (file_type_id, uuid) = reader.read_header().unwrap(); assert_eq!(&file_type_id, FILE_TYPE_IDENTIFIER); assert_eq!(uuid, segment_file.id.as_bytes()); let read_fail = reader.one_entry(); assert_error!(read_fail, Error::UnableToReadData { .. }); // Trying to continue reading will fail as well, see: // <https://github.com/influxdata/influxdb_iox/issues/6222> assert_error!(reader.one_entry(), Error::UnableToReadData { .. }); } #[test] fn unsuccessful_read_too_long_len() { let mut segment_file = FakeSegmentFile::new(); let bad_entry_input = FakeSegmentEntry::new(b"hello"); let good_length = bad_entry_input.compressed_len(); let bad_entry_input = bad_entry_input.with_compressed_len(good_length + 1); segment_file.add_entry(bad_entry_input); let good_entry_input = FakeSegmentEntry::new(b"goodbye"); segment_file.add_entry(good_entry_input); let data = segment_file.data(); let mut reader = ClosedSegmentFileReader::new(data.as_slice()); let (file_type_id, uuid) = reader.read_header().unwrap(); assert_eq!(&file_type_id, FILE_TYPE_IDENTIFIER); assert_eq!(uuid, segment_file.id.as_bytes()); let read_fail = reader.one_entry(); assert_error!(read_fail, Error::UnableToReadData { .. }); // Trying to continue reading will fail as well, see: // <https://github.com/influxdata/influxdb_iox/issues/6222> assert_error!(reader.one_entry(), Error::UnableToReadData { .. }); } #[test] fn unsuccessful_read_checksum_mismatch() { let mut segment_file = FakeSegmentFile::new(); let bad_entry_input = FakeSegmentEntry::new(b"hello"); let good_checksum = bad_entry_input.checksum(); let bad_entry_input = bad_entry_input.with_checksum(good_checksum + 1); segment_file.add_entry(bad_entry_input); let good_entry_input = FakeSegmentEntry::new(b"goodbye"); segment_file.add_entry(good_entry_input.clone()); let data = segment_file.data(); let mut reader = ClosedSegmentFileReader::new(data.as_slice()); let (file_type_id, uuid) = reader.read_header().unwrap(); assert_eq!(&file_type_id, FILE_TYPE_IDENTIFIER); assert_eq!(uuid, segment_file.id.as_bytes()); let read_fail = reader.one_entry(); assert_error!(read_fail, Error::ChecksumMismatch { .. }); // A bad checksum won't corrupt further entries let entry_output_2 = reader.one_entry().unwrap().unwrap(); let expected_2 = SegmentEntry::from(&good_entry_input); assert_eq!(entry_output_2.data, expected_2.data); let entry = reader.one_entry().unwrap(); assert!(entry.is_none()); } #[derive(Debug)] struct FakeSegmentFile { id: SegmentId, entries: Vec<FakeSegmentEntry>, } impl FakeSegmentFile { fn new() -> Self { Self { id: SegmentId::new(0), entries: Default::default(), } } fn add_entry(&mut self, entry: FakeSegmentEntry) { self.entries.push(entry); } fn data(&self) -> Vec<u8> { let mut f = Vec::new(); f.write_all(FILE_TYPE_IDENTIFIER).unwrap(); let id_bytes = self.id.as_bytes(); f.write_all(&id_bytes).unwrap(); for entry in &self.entries { f.write_u32::<BigEndian>(entry.checksum()).unwrap(); f.write_u32::<BigEndian>(entry.compressed_len()).unwrap(); f.write_all(&entry.compressed_data()).unwrap(); } f } } #[derive(Debug, Clone, PartialEq)] struct FakeSegmentEntry { checksum: Option<u32>, compressed_len: Option<u32>, uncompressed_data: Vec<u8>, } impl FakeSegmentEntry { fn new(data: &[u8]) -> Self { Self { checksum: None, compressed_len: None, uncompressed_data: data.to_vec(), } } fn with_compressed_len(self, compressed_len: u32) -> Self { Self { compressed_len: Some(compressed_len), ..self } } fn with_checksum(self, checksum: u32) -> Self { Self { checksum: Some(checksum), ..self } } fn checksum(&self) -> u32 { self.checksum.unwrap_or_else(|| { let mut hasher = Hasher::new(); hasher.update(&self.compressed_data()); hasher.finalize() }) } fn compressed_data(&self) -> Vec<u8> { let mut encoder = snap::write::FrameEncoder::new(Vec::new()); encoder.write_all(&self.uncompressed_data).unwrap(); encoder.into_inner().expect("cannot fail to flush to a Vec") } fn compressed_len(&self) -> u32 { self.compressed_len .unwrap_or_else(|| self.compressed_data().len() as u32) } } impl From<&FakeSegmentEntry> for SegmentEntry { fn from(fake: &FakeSegmentEntry) -> Self { Self { data: fake.uncompressed_data.clone(), } } } }
#![no_std] #![no_main] #![feature(format_args_nl)] use core::{fmt::Write, panic::PanicInfo}; use ferr_os::{ drivers::io::{ self, vgat_out::{VgatChar, VgatOut, DEFAULT_VGA_TEXT_BUFF_HEIGHT, DEFAULT_VGA_TEXT_BUFF_WIDTH}, }, osattrs, runtime::{Core}, println, }; #[no_mangle] pub extern "C" fn _start() -> ! { let mut vgatout = VgatOut::default(); let mut rt: Core<'static> = Core::new(Some(osattrs::FERROS_BANNER), None); let greeter = rt.greeter().unwrap(); println!(rt, "{}", greeter); loop {} } #[panic_handler] fn panic(_info: &PanicInfo) -> ! { loop {} }
mod error; use gstreamer::prelude::*; use gstreamer::*; fn main() -> Result<(), error::Error> { /* Initialize GStreamer */ init()?; /* Create the elements */ let source = ElementFactory::make("uridecodebin") .name("source") .build()?; let convert = ElementFactory::make("audioconvert") .name("convert") .build()?; let resample = ElementFactory::make("audioresample") .name("resample") .build()?; let sink = ElementFactory::make("autoaudiosink").name("sink").build()?; /* Create the empty pipeline */ let pipeline = Pipeline::new(Some("test-pipeline")); /* Build the pipeline. Note that we are NOT linking the source at this * point. We will do it later. */ pipeline.add_many(&[&source, &convert, &resample, &sink])?; Element::link_many(&[&convert, &resample, &sink]) .map_err(|_| "Elements could not be linked.")?; /* Set the URI to play */ source.set_property( "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", ); /* Connect to the pad-added signal */ source.connect_pad_added(pad_added_handler(convert)); /* Start playing */ pipeline .set_state(State::Playing) .map_err(|_| "Unable to set the pipeline to the playing state.")?; /* Listen to the bus */ let bus = pipeline.bus().unwrap(); loop { match bus .timed_pop_filtered( None, &[ MessageType::StateChanged, MessageType::Error, MessageType::Eos, ], ) .as_ref() .map(|msg| msg.view()) { /* Parse message */ Some(MessageView::Error(err)) => { println!( "Error received from element {}: {:?}", err.src().unwrap().name(), err.error() ); println!( "Debugging information: {}", err.debug().unwrap_or_else(|| "none".to_string()) ); break; } Some(MessageView::Eos(_)) => { println!("End-Of-Stream reached."); break; } Some(MessageView::StateChanged(message)) => { /* We are only interested in state-changed messages from the pipeline */ if message .src() .map(|object| object.as_object_ref() == pipeline.as_object_ref()) .unwrap_or(false) { let old_state = message.old(); let new_state = message.current(); let _pending_state = message.pending(); println!( "Pipeline state changed from {:?} to {:?}:", old_state, new_state ); } } _ => { /* We should not reach here */ panic!("Unexpected message received."); } } } /* Free resources */ pipeline.set_state(State::Null)?; Ok(()) } /* This function will be called by the pad-added signal */ fn pad_added_handler(convert: Element) -> impl Fn(&Element, &Pad) + Send + Sync + 'static { move |src: &Element, new_pad: &Pad| { if let Some(sink_pad) = convert.static_pad("sink") { println!( "Received new pad '{}' from '{}':\n", new_pad.name(), src.name() ); /* If our converter is already linked, we have nothing to do here */ if sink_pad.is_linked() { println!("We are already linked. Ignoring."); return; } /* Check the new pad's type */ let new_pad_caps = new_pad.current_caps().unwrap(); let new_pad_struct = new_pad_caps.structure(0).unwrap(); let new_pad_type = new_pad_struct.name(); if !new_pad_type.starts_with("audio/x-raw") { println!( "It has type '{}' which is not raw audio. Ignoring.", new_pad_type ); return; } /* Attempt the link */ if new_pad.link(&sink_pad).is_err() { println!("Type is '{}' but link failed.", new_pad_type); } else { println!("Link succeeded (type '{}').", new_pad_type); } } } }
pub use super::raytracing::*;
//! Implement virtual machine to run instructions. //! //! See also: //! <https://github.com/ProgVal/pythonvm-rust/blob/master/src/processor/mod.rs> #[cfg(feature = "rustpython-compiler")] mod compile; mod context; mod interpreter; mod method; mod setting; pub mod thread; mod vm_new; mod vm_object; mod vm_ops; use crate::{ builtins::{ code::PyCode, pystr::AsPyStr, tuple::{PyTuple, PyTupleTyped}, PyBaseExceptionRef, PyDictRef, PyInt, PyList, PyModule, PyStr, PyStrInterned, PyStrRef, PyTypeRef, }, codecs::CodecsRegistry, common::{hash::HashSecret, lock::PyMutex, rc::PyRc}, convert::ToPyObject, frame::{ExecutionResult, Frame, FrameRef}, frozen::FrozenModule, function::{ArgMapping, FuncArgs, PySetterValue}, import, protocol::PyIterIter, scope::Scope, signal, stdlib, warn::WarningsState, AsObject, Py, PyObject, PyObjectRef, PyPayload, PyRef, PyResult, }; use crossbeam_utils::atomic::AtomicCell; #[cfg(unix)] use nix::{ sys::signal::{kill, sigaction, SaFlags, SigAction, SigSet, Signal::SIGINT}, unistd::getpid, }; use std::sync::atomic::AtomicBool; use std::{ borrow::Cow, cell::{Cell, Ref, RefCell}, collections::{HashMap, HashSet}, }; pub use context::Context; pub use interpreter::Interpreter; pub(crate) use method::PyMethod; pub use setting::Settings; // Objects are live when they are on stack, or referenced by a name (for now) /// Top level container of a python virtual machine. In theory you could /// create more instances of this struct and have them operate fully isolated. /// /// To construct this, please refer to the [`Interpreter`](Interpreter) pub struct VirtualMachine { pub builtins: PyRef<PyModule>, pub sys_module: PyRef<PyModule>, pub ctx: PyRc<Context>, pub frames: RefCell<Vec<FrameRef>>, pub wasm_id: Option<String>, exceptions: RefCell<ExceptionStack>, pub import_func: PyObjectRef, pub profile_func: RefCell<PyObjectRef>, pub trace_func: RefCell<PyObjectRef>, pub use_tracing: Cell<bool>, pub recursion_limit: Cell<usize>, pub(crate) signal_handlers: Option<Box<RefCell<[Option<PyObjectRef>; signal::NSIG]>>>, pub(crate) signal_rx: Option<signal::UserSignalReceiver>, pub repr_guards: RefCell<HashSet<usize>>, pub state: PyRc<PyGlobalState>, pub initialized: bool, recursion_depth: Cell<usize>, } #[derive(Debug, Default)] struct ExceptionStack { exc: Option<PyBaseExceptionRef>, prev: Option<Box<ExceptionStack>>, } pub struct PyGlobalState { pub settings: Settings, pub module_inits: stdlib::StdlibMap, pub frozen: HashMap<&'static str, FrozenModule, ahash::RandomState>, pub stacksize: AtomicCell<usize>, pub thread_count: AtomicCell<usize>, pub hash_secret: HashSecret, pub atexit_funcs: PyMutex<Vec<(PyObjectRef, FuncArgs)>>, pub codec_registry: CodecsRegistry, pub finalizing: AtomicBool, pub warnings: WarningsState, pub override_frozen_modules: AtomicCell<isize>, pub before_forkers: PyMutex<Vec<PyObjectRef>>, pub after_forkers_child: PyMutex<Vec<PyObjectRef>>, pub after_forkers_parent: PyMutex<Vec<PyObjectRef>>, pub int_max_str_digits: AtomicCell<usize>, } pub fn process_hash_secret_seed() -> u32 { use once_cell::sync::OnceCell; static SEED: OnceCell<u32> = OnceCell::new(); *SEED.get_or_init(rand::random) } impl VirtualMachine { /// Create a new `VirtualMachine` structure. fn new(settings: Settings, ctx: PyRc<Context>) -> VirtualMachine { flame_guard!("new VirtualMachine"); // make a new module without access to the vm; doesn't // set __spec__, __loader__, etc. attributes let new_module = |def| { PyRef::new_ref( PyModule::from_def(def), ctx.types.module_type.to_owned(), Some(ctx.new_dict()), ) }; // Hard-core modules: let builtins = new_module(stdlib::builtins::__module_def(&ctx)); let sys_module = new_module(stdlib::sys::__module_def(&ctx)); let import_func = ctx.none(); let profile_func = RefCell::new(ctx.none()); let trace_func = RefCell::new(ctx.none()); // hack to get around const array repeat expressions, rust issue #79270 const NONE: Option<PyObjectRef> = None; // putting it in a const optimizes better, prevents linear initialization of the array #[allow(clippy::declare_interior_mutable_const)] const SIGNAL_HANDLERS: RefCell<[Option<PyObjectRef>; signal::NSIG]> = RefCell::new([NONE; signal::NSIG]); let signal_handlers = Some(Box::new(SIGNAL_HANDLERS)); let module_inits = stdlib::get_module_inits(); let seed = match settings.hash_seed { Some(seed) => seed, None => process_hash_secret_seed(), }; let hash_secret = HashSecret::new(seed); let codec_registry = CodecsRegistry::new(&ctx); let warnings = WarningsState::init_state(&ctx); let int_max_str_digits = AtomicCell::new(match settings.int_max_str_digits { -1 => 4300, other => other, } as usize); let mut vm = VirtualMachine { builtins, sys_module, ctx, frames: RefCell::new(vec![]), wasm_id: None, exceptions: RefCell::default(), import_func, profile_func, trace_func, use_tracing: Cell::new(false), recursion_limit: Cell::new(if cfg!(debug_assertions) { 256 } else { 1000 }), signal_handlers, signal_rx: None, repr_guards: RefCell::default(), state: PyRc::new(PyGlobalState { settings, module_inits, frozen: HashMap::default(), stacksize: AtomicCell::new(0), thread_count: AtomicCell::new(0), hash_secret, atexit_funcs: PyMutex::default(), codec_registry, finalizing: AtomicBool::new(false), warnings, override_frozen_modules: AtomicCell::new(0), before_forkers: PyMutex::default(), after_forkers_child: PyMutex::default(), after_forkers_parent: PyMutex::default(), int_max_str_digits, }), initialized: false, recursion_depth: Cell::new(0), }; if vm.state.hash_secret.hash_str("") != vm .ctx .interned_str("") .expect("empty str must be interned") .hash(&vm) { panic!("Interpreters in same process must share the hash seed"); } let frozen = core_frozen_inits().collect(); PyRc::get_mut(&mut vm.state).unwrap().frozen = frozen; vm.builtins.init_dict( vm.ctx.intern_str("builtins"), Some(vm.ctx.intern_str(stdlib::builtins::DOC.unwrap()).to_owned()), &vm, ); vm.sys_module.init_dict( vm.ctx.intern_str("sys"), Some(vm.ctx.intern_str(stdlib::sys::DOC.unwrap()).to_owned()), &vm, ); // let name = vm.sys_module.get_attr("__name__", &vm).unwrap(); vm } /// set up the encodings search function /// init_importlib must be called before this call #[cfg(feature = "encodings")] fn import_encodings(&mut self) -> PyResult<()> { self.import("encodings", None, 0).map_err(|import_err| { let rustpythonpath_env = std::env::var("RUSTPYTHONPATH").ok(); let pythonpath_env = std::env::var("PYTHONPATH").ok(); let env_set = rustpythonpath_env.as_ref().is_some() || pythonpath_env.as_ref().is_some(); let path_contains_env = self.state.settings.path_list.iter().any(|s| { Some(s.as_str()) == rustpythonpath_env.as_deref() || Some(s.as_str()) == pythonpath_env.as_deref() }); let guide_message = if !env_set { "Neither RUSTPYTHONPATH nor PYTHONPATH is set. Try setting one of them to the stdlib directory." } else if path_contains_env { "RUSTPYTHONPATH or PYTHONPATH is set, but it doesn't contain the encodings library. If you are customizing the RustPython vm/interpreter, try adding the stdlib directory to the path. If you are developing the RustPython interpreter, it might be a bug during development." } else { "RUSTPYTHONPATH or PYTHONPATH is set, but it wasn't loaded to `Settings::path_list`. If you are going to customize the RustPython vm/interpreter, those environment variables are not loaded in the Settings struct by default. Please try creating a customized instance of the Settings struct. If you are developing the RustPython interpreter, it might be a bug during development." }; let msg = format!( "RustPython could not import the encodings module. It usually means something went wrong. Please carefully read the following messages and follow the steps.\n\ \n\ {guide_message}\n\ If you don't have access to a consistent external environment (e.g. targeting wasm, embedding \ rustpython in another application), try enabling the `freeze-stdlib` feature.\n\ If this is intended and you want to exclude the encodings module from your interpreter, please remove the `encodings` feature from `rustpython-vm` crate." ); let err = self.new_runtime_error(msg); err.set_cause(Some(import_err)); err })?; Ok(()) } fn import_utf8_encodings(&mut self) -> PyResult<()> { import::import_frozen(self, "codecs")?; // FIXME: See corresponding part of `core_frozen_inits` // let encoding_module_name = if cfg!(feature = "freeze-stdlib") { // "encodings.utf_8" // } else { // "encodings_utf_8" // }; let encoding_module_name = "encodings_utf_8"; let encoding_module = import::import_frozen(self, encoding_module_name)?; let getregentry = encoding_module.get_attr("getregentry", self)?; let codec_info = getregentry.call((), self)?; self.state .codec_registry .register_manual("utf-8", codec_info.try_into_value(self)?)?; Ok(()) } fn initialize(&mut self) { flame_guard!("init VirtualMachine"); if self.initialized { panic!("Double Initialize Error"); } stdlib::builtins::init_module(self, &self.builtins); stdlib::sys::init_module(self, &self.sys_module, &self.builtins); let mut essential_init = || -> PyResult { #[cfg(not(target_arch = "wasm32"))] import::import_builtin(self, "_signal")?; #[cfg(any(feature = "parser", feature = "compiler"))] import::import_builtin(self, "_ast")?; #[cfg(not(feature = "threading"))] import::import_frozen(self, "_thread")?; let importlib = import::init_importlib_base(self)?; self.import_utf8_encodings()?; #[cfg(any(not(target_arch = "wasm32"), target_os = "wasi"))] { // this isn't fully compatible with CPython; it imports "io" and sets // builtins.open to io.OpenWrapper, but this is easier, since it doesn't // require the Python stdlib to be present let io = import::import_builtin(self, "_io")?; let set_stdio = |name, fd, mode: &str| { let stdio = crate::stdlib::io::open( self.ctx.new_int(fd).into(), Some(mode), Default::default(), self, )?; let dunder_name = self.ctx.intern_str(format!("__{name}__")); self.sys_module.set_attr( dunder_name, // e.g. __stdin__ stdio.clone(), self, )?; self.sys_module.set_attr(name, stdio, self)?; Ok(()) }; set_stdio("stdin", 0, "r")?; set_stdio("stdout", 1, "w")?; set_stdio("stderr", 2, "w")?; let io_open = io.get_attr("open", self)?; self.builtins.set_attr("open", io_open, self)?; } Ok(importlib) }; let res = essential_init(); let importlib = self.expect_pyresult(res, "essential initialization failed"); if self.state.settings.allow_external_library && cfg!(feature = "rustpython-compiler") { if let Err(e) = import::init_importlib_package(self, importlib) { eprintln!("importlib initialization failed. This is critical for many complicated packages."); self.print_exception(e); } } #[cfg(feature = "encodings")] if cfg!(feature = "freeze-stdlib") || !self.state.settings.path_list.is_empty() { if let Err(e) = self.import_encodings() { eprintln!( "encodings initialization failed. Only utf-8 encoding will be supported." ); self.print_exception(e); } } else { // Here may not be the best place to give general `path_list` advice, // but bare rustpython_vm::VirtualMachine users skipped proper settings must hit here while properly setup vm never enters here. eprintln!( "feature `encodings` is enabled but `settings.path_list` is empty. \ Please add the library path to `settings.path_list`. If you intended to disable the entire standard library (including the `encodings` feature), please also make sure to disable the `encodings` feature.\n\ Tip: You may also want to add `\"\"` to `settings.path_list` in order to enable importing from the current working directory." ); } self.initialized = true; } fn state_mut(&mut self) -> &mut PyGlobalState { PyRc::get_mut(&mut self.state) .expect("there should not be multiple threads while a user has a mut ref to a vm") } /// Can only be used in the initialization closure passed to [`Interpreter::with_init`] pub fn add_native_module<S>(&mut self, name: S, module: stdlib::StdlibInitFunc) where S: Into<Cow<'static, str>>, { self.state_mut().module_inits.insert(name.into(), module); } pub fn add_native_modules<I>(&mut self, iter: I) where I: IntoIterator<Item = (Cow<'static, str>, stdlib::StdlibInitFunc)>, { self.state_mut().module_inits.extend(iter); } /// Can only be used in the initialization closure passed to [`Interpreter::with_init`] pub fn add_frozen<I>(&mut self, frozen: I) where I: IntoIterator<Item = (&'static str, FrozenModule)>, { self.state_mut().frozen.extend(frozen); } /// Set the custom signal channel for the interpreter pub fn set_user_signal_channel(&mut self, signal_rx: signal::UserSignalReceiver) { self.signal_rx = Some(signal_rx); } pub fn run_code_obj(&self, code: PyRef<PyCode>, scope: Scope) -> PyResult { let frame = Frame::new(code, scope, self.builtins.dict(), &[], self).into_ref(&self.ctx); self.run_frame(frame) } #[cold] pub fn run_unraisable(&self, e: PyBaseExceptionRef, msg: Option<String>, object: PyObjectRef) { let sys_module = self.import("sys", None, 0).unwrap(); let unraisablehook = sys_module.get_attr("unraisablehook", self).unwrap(); let exc_type = e.class().to_owned(); let exc_traceback = e.traceback().to_pyobject(self); // TODO: actual traceback let exc_value = e.into(); let args = stdlib::sys::UnraisableHookArgs { exc_type, exc_value, exc_traceback, err_msg: self.new_pyobj(msg), object, }; if let Err(e) = unraisablehook.call((args,), self) { println!("{}", e.as_object().repr(self).unwrap().as_str()); } } #[inline(always)] pub fn run_frame(&self, frame: FrameRef) -> PyResult { match self.with_frame(frame, |f| f.run(self))? { ExecutionResult::Return(value) => Ok(value), _ => panic!("Got unexpected result from function"), } } pub fn current_recursion_depth(&self) -> usize { self.recursion_depth.get() } /// Used to run the body of a (possibly) recursive function. It will raise a /// RecursionError if recursive functions are nested far too many times, /// preventing a stack overflow. pub fn with_recursion<R, F: FnOnce() -> PyResult<R>>(&self, _where: &str, f: F) -> PyResult<R> { self.check_recursive_call(_where)?; self.recursion_depth.set(self.recursion_depth.get() + 1); let result = f(); self.recursion_depth.set(self.recursion_depth.get() - 1); result } pub fn with_frame<R, F: FnOnce(FrameRef) -> PyResult<R>>( &self, frame: FrameRef, f: F, ) -> PyResult<R> { self.with_recursion("", || { self.frames.borrow_mut().push(frame.clone()); let result = f(frame); // defer dec frame let _popped = self.frames.borrow_mut().pop(); result }) } /// Returns a basic CompileOpts instance with options accurate to the vm. Used /// as the CompileOpts for `vm.compile()`. #[cfg(feature = "rustpython-codegen")] pub fn compile_opts(&self) -> crate::compiler::CompileOpts { crate::compiler::CompileOpts { optimize: self.state.settings.optimize, } } // To be called right before raising the recursion depth. fn check_recursive_call(&self, _where: &str) -> PyResult<()> { if self.recursion_depth.get() >= self.recursion_limit.get() { Err(self.new_recursion_error(format!("maximum recursion depth exceeded {_where}"))) } else { Ok(()) } } pub fn current_frame(&self) -> Option<Ref<FrameRef>> { let frames = self.frames.borrow(); if frames.is_empty() { None } else { Some(Ref::map(self.frames.borrow(), |frames| { frames.last().unwrap() })) } } pub fn current_locals(&self) -> PyResult<ArgMapping> { self.current_frame() .expect("called current_locals but no frames on the stack") .locals(self) } pub fn current_globals(&self) -> Ref<PyDictRef> { let frame = self .current_frame() .expect("called current_globals but no frames on the stack"); Ref::map(frame, |f| &f.globals) } pub fn try_class(&self, module: &'static str, class: &'static str) -> PyResult<PyTypeRef> { let class = self .import(module, None, 0)? .get_attr(class, self)? .downcast() .expect("not a class"); Ok(class) } pub fn class(&self, module: &'static str, class: &'static str) -> PyTypeRef { let module = self .import(module, None, 0) .unwrap_or_else(|_| panic!("unable to import {module}")); let class = module .get_attr(class, self) .unwrap_or_else(|_| panic!("module {module:?} has no class {class}")); class.downcast().expect("not a class") } #[inline] pub fn import<'a>( &self, module_name: impl AsPyStr<'a>, from_list: Option<PyTupleTyped<PyStrRef>>, level: usize, ) -> PyResult { let module_name = module_name.as_pystr(&self.ctx); self.import_inner(module_name, from_list, level) } fn import_inner( &self, module: &Py<PyStr>, from_list: Option<PyTupleTyped<PyStrRef>>, level: usize, ) -> PyResult { // if the import inputs seem weird, e.g a package import or something, rather than just // a straight `import ident` let weird = module.as_str().contains('.') || level != 0 || from_list.as_ref().map_or(false, |x| !x.is_empty()); let cached_module = if weird { None } else { let sys_modules = self.sys_module.get_attr("modules", self)?; sys_modules.get_item(module, self).ok() }; match cached_module { Some(cached_module) => { if self.is_none(&cached_module) { Err(self.new_import_error( format!("import of {module} halted; None in sys.modules"), module.to_owned(), )) } else { Ok(cached_module) } } None => { let import_func = self .builtins .get_attr(identifier!(self, __import__), self) .map_err(|_| { self.new_import_error("__import__ not found".to_owned(), module.to_owned()) })?; let (locals, globals) = if let Some(frame) = self.current_frame() { (Some(frame.locals.clone()), Some(frame.globals.clone())) } else { (None, None) }; let from_list = match from_list { Some(tup) => tup.to_pyobject(self), None => self.new_tuple(()).into(), }; import_func .call((module.to_owned(), globals, locals, from_list, level), self) .map_err(|exc| import::remove_importlib_frames(self, &exc)) } } } pub fn extract_elements_with<T, F>(&self, value: &PyObject, func: F) -> PyResult<Vec<T>> where F: Fn(PyObjectRef) -> PyResult<T>, { // Extract elements from item, if possible: let cls = value.class(); let list_borrow; let slice = if cls.is(self.ctx.types.tuple_type) { value.payload::<PyTuple>().unwrap().as_slice() } else if cls.is(self.ctx.types.list_type) { list_borrow = value.payload::<PyList>().unwrap().borrow_vec(); &list_borrow } else { return self.map_pyiter(value, func); }; slice.iter().map(|obj| func(obj.clone())).collect() } pub fn map_iterable_object<F, R>(&self, obj: &PyObject, mut f: F) -> PyResult<PyResult<Vec<R>>> where F: FnMut(PyObjectRef) -> PyResult<R>, { match_class!(match obj { ref l @ PyList => { let mut i: usize = 0; let mut results = Vec::with_capacity(l.borrow_vec().len()); loop { let elem = { let elements = &*l.borrow_vec(); if i >= elements.len() { results.shrink_to_fit(); return Ok(Ok(results)); } else { elements[i].clone() } // free the lock }; match f(elem) { Ok(result) => results.push(result), Err(err) => return Ok(Err(err)), } i += 1; } } ref t @ PyTuple => Ok(t.iter().cloned().map(f).collect()), // TODO: put internal iterable type obj => { Ok(self.map_pyiter(obj, f)) } }) } fn map_pyiter<F, R>(&self, value: &PyObject, mut f: F) -> PyResult<Vec<R>> where F: FnMut(PyObjectRef) -> PyResult<R>, { let iter = value.to_owned().get_iter(self)?; let cap = match self.length_hint_opt(value.to_owned()) { Err(e) if e.class().is(self.ctx.exceptions.runtime_error) => return Err(e), Ok(Some(value)) => Some(value), // Use a power of 2 as a default capacity. _ => None, }; // TODO: fix extend to do this check (?), see test_extend in Lib/test/list_tests.py, // https://github.com/python/cpython/blob/v3.9.0/Objects/listobject.c#L922-L928 if let Some(cap) = cap { if cap >= isize::max_value() as usize { return Ok(Vec::new()); } } let mut results = PyIterIter::new(self, iter.as_ref(), cap) .map(|element| f(element?)) .collect::<PyResult<Vec<_>>>()?; results.shrink_to_fit(); Ok(results) } pub fn get_attribute_opt<'a>( &self, obj: PyObjectRef, attr_name: impl AsPyStr<'a>, ) -> PyResult<Option<PyObjectRef>> { let attr_name = attr_name.as_pystr(&self.ctx); match obj.get_attr_inner(attr_name, self) { Ok(attr) => Ok(Some(attr)), Err(e) if e.fast_isinstance(self.ctx.exceptions.attribute_error) => Ok(None), Err(e) => Err(e), } } pub fn set_attribute_error_context( &self, exc: &PyBaseExceptionRef, obj: PyObjectRef, name: PyStrRef, ) { if exc.class().is(self.ctx.exceptions.attribute_error) { let exc = exc.as_object(); exc.set_attr("name", name, self).unwrap(); exc.set_attr("obj", obj, self).unwrap(); } } // get_method should be used for internal access to magic methods (by-passing // the full getattribute look-up. pub fn get_method_or_type_error<F>( &self, obj: PyObjectRef, method_name: &'static PyStrInterned, err_msg: F, ) -> PyResult where F: FnOnce() -> String, { let method = obj .class() .get_attr(method_name) .ok_or_else(|| self.new_type_error(err_msg()))?; self.call_if_get_descriptor(&method, obj) } // TODO: remove + transfer over to get_special_method pub(crate) fn get_method( &self, obj: PyObjectRef, method_name: &'static PyStrInterned, ) -> Option<PyResult> { let method = obj.get_class_attr(method_name)?; Some(self.call_if_get_descriptor(&method, obj)) } pub(crate) fn get_str_method(&self, obj: PyObjectRef, method_name: &str) -> Option<PyResult> { let method_name = self.ctx.interned_str(method_name)?; self.get_method(obj, method_name) } #[inline] /// Checks for triggered signals and calls the appropriate handlers. A no-op on /// platforms where signals are not supported. pub fn check_signals(&self) -> PyResult<()> { #[cfg(not(target_arch = "wasm32"))] { crate::signal::check_signals(self) } #[cfg(target_arch = "wasm32")] { Ok(()) } } pub(crate) fn push_exception(&self, exc: Option<PyBaseExceptionRef>) { let mut excs = self.exceptions.borrow_mut(); let prev = std::mem::take(&mut *excs); excs.prev = Some(Box::new(prev)); excs.exc = exc } pub(crate) fn pop_exception(&self) -> Option<PyBaseExceptionRef> { let mut excs = self.exceptions.borrow_mut(); let cur = std::mem::take(&mut *excs); *excs = *cur.prev.expect("pop_exception() without nested exc stack"); cur.exc } pub(crate) fn take_exception(&self) -> Option<PyBaseExceptionRef> { self.exceptions.borrow_mut().exc.take() } pub(crate) fn current_exception(&self) -> Option<PyBaseExceptionRef> { self.exceptions.borrow().exc.clone() } pub(crate) fn set_exception(&self, exc: Option<PyBaseExceptionRef>) { // don't be holding the RefCell guard while __del__ is called let prev = std::mem::replace(&mut self.exceptions.borrow_mut().exc, exc); drop(prev); } pub(crate) fn contextualize_exception(&self, exception: &PyBaseExceptionRef) { if let Some(context_exc) = self.topmost_exception() { if !context_exc.is(exception) { let mut o = context_exc.clone(); while let Some(context) = o.context() { if context.is(exception) { o.set_context(None); break; } o = context; } exception.set_context(Some(context_exc)) } } } pub(crate) fn topmost_exception(&self) -> Option<PyBaseExceptionRef> { let excs = self.exceptions.borrow(); let mut cur = &*excs; loop { if let Some(exc) = &cur.exc { return Some(exc.clone()); } cur = cur.prev.as_deref()?; } } pub fn handle_exit_exception(&self, exc: PyBaseExceptionRef) -> u8 { if exc.fast_isinstance(self.ctx.exceptions.system_exit) { let args = exc.args(); let msg = match args.as_slice() { [] => return 0, [arg] => match_class!(match arg { ref i @ PyInt => { use num_traits::cast::ToPrimitive; return i.as_bigint().to_u8().unwrap_or(0); } arg => { if self.is_none(arg) { return 0; } else { arg.str(self).ok() } } }), _ => args.as_object().repr(self).ok(), }; if let Some(msg) = msg { let stderr = stdlib::sys::PyStderr(self); writeln!(stderr, "{msg}"); } 1 } else if exc.fast_isinstance(self.ctx.exceptions.keyboard_interrupt) { #[allow(clippy::if_same_then_else)] { self.print_exception(exc); #[cfg(unix)] { let action = SigAction::new( nix::sys::signal::SigHandler::SigDfl, SaFlags::SA_ONSTACK, SigSet::empty(), ); let result = unsafe { sigaction(SIGINT, &action) }; if result.is_ok() { interpreter::flush_std(self); kill(getpid(), SIGINT).expect("Expect to be killed."); } (libc::SIGINT as u8) + 128u8 } #[cfg(not(unix))] { 1 } } } else { self.print_exception(exc); 1 } } #[doc(hidden)] pub fn __module_set_attr( &self, module: &Py<PyModule>, attr_name: &'static PyStrInterned, attr_value: impl Into<PyObjectRef>, ) -> PyResult<()> { let val = attr_value.into(); module .as_object() .generic_setattr(attr_name, PySetterValue::Assign(val), self) } pub fn insert_sys_path(&self, obj: PyObjectRef) -> PyResult<()> { let sys_path = self.sys_module.get_attr("path", self).unwrap(); self.call_method(&sys_path, "insert", (0, obj))?; Ok(()) } pub fn run_module(&self, module: &str) -> PyResult<()> { let runpy = self.import("runpy", None, 0)?; let run_module_as_main = runpy.get_attr("_run_module_as_main", self)?; run_module_as_main.call((module,), self)?; Ok(()) } } impl AsRef<Context> for VirtualMachine { fn as_ref(&self) -> &Context { &self.ctx } } fn core_frozen_inits() -> impl Iterator<Item = (&'static str, FrozenModule)> { let iter = std::iter::empty(); macro_rules! ext_modules { ($iter:ident, $($t:tt)*) => { let $iter = $iter.chain(py_freeze!($($t)*)); }; } // keep as example but use file one now // ext_modules!( // iter, // source = "initialized = True; print(\"Hello world!\")\n", // module_name = "__hello__", // ); // Python modules that the vm calls into, but are not actually part of the stdlib. They could // in theory be implemented in Rust, but are easiest to do in Python for one reason or another. // Includes _importlib_bootstrap and _importlib_bootstrap_external ext_modules!( iter, dir = "./Lib/python_builtins", crate_name = "rustpython_compiler_core" ); // core stdlib Python modules that the vm calls into, but are still used in Python // application code, e.g. copyreg // FIXME: Initializing core_modules here results duplicated frozen module generation for core_modules. // We need a way to initialize this modules for both `Interpreter::without_stdlib()` and `InterpreterConfig::new().init_stdlib().interpreter()` // #[cfg(not(feature = "freeze-stdlib"))] ext_modules!( iter, dir = "./Lib/core_modules", crate_name = "rustpython_compiler_core" ); iter } #[test] fn test_nested_frozen() { use rustpython_vm as vm; vm::Interpreter::with_init(Default::default(), |vm| { // vm.add_native_modules(rustpython_stdlib::get_module_inits()); vm.add_frozen(rustpython_vm::py_freeze!(dir = "../extra_tests/snippets")); }) .enter(|vm| { let scope = vm.new_scope_with_builtins(); let source = "from dir_module.dir_module_inner import value2"; let code_obj = vm .compile(source, vm::compiler::Mode::Exec, "<embedded>".to_owned()) .map_err(|err| vm.new_syntax_error(&err, Some(source))) .unwrap(); if let Err(e) = vm.run_code_obj(code_obj, scope) { vm.print_exception(e); panic!(); } }) }
// // Sysinfo // // Copyright (c) 2017 Guillaume Gomez // use ::DiskExt; use ::utils; use libc::statfs; use std::mem; use std::fmt::{Debug, Error, Formatter}; use std::path::{Path, PathBuf}; use std::ffi::{OsStr, OsString}; /// Enum containing the different handled disks types. #[derive(Debug, PartialEq, Clone, Copy)] pub enum DiskType { /// HDD type. HDD, /// SSD type. SSD, /// Unknown type. Unknown(isize), } impl From<isize> for DiskType { fn from(t: isize) -> DiskType { match t { 0 => DiskType::HDD, 1 => DiskType::SSD, id => DiskType::Unknown(id), } } } pub fn new(name: OsString, mount_point: &Path, type_: DiskType) -> Disk { let mount_point_cpath = utils::to_cpath(mount_point); let mut total_space = 0; let mut available_space = 0; let mut file_system = None; unsafe { let mut stat: statfs = mem::zeroed(); if statfs(mount_point_cpath.as_ptr() as *const i8, &mut stat) == 0 { total_space = u64::from(stat.f_bsize) * stat.f_blocks; available_space = stat.f_bfree * stat.f_blocks; let mut vec = Vec::with_capacity(stat.f_fstypename.len()); for x in &stat.f_fstypename { if *x == 0 { break } vec.push(*x as u8); } file_system = Some(vec); } } Disk { type_, name, file_system: file_system.unwrap_or_else(|| b"<Unknown>".to_vec()), mount_point: mount_point.to_owned(), total_space, available_space, } } /// Struct containing a disk information. pub struct Disk { type_: DiskType, name: OsString, file_system: Vec<u8>, mount_point: PathBuf, total_space: u64, available_space: u64, } impl Debug for Disk { fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { write!(fmt, "Disk({:?})[FS: {:?}][Type: {:?}] mounted on {:?}: {}/{} B", self.get_name(), self.get_file_system(), self.get_type(), self.get_mount_point(), self.get_available_space(), self.get_total_space()) } } impl DiskExt for Disk { fn get_type(&self) -> DiskType { self.type_ } fn get_name(&self) -> &OsStr { &self.name } fn get_file_system(&self) -> &[u8] { &self.file_system } fn get_mount_point(&self) -> &Path { &self.mount_point } fn get_total_space(&self) -> u64 { self.total_space } fn get_available_space(&self) -> u64 { self.available_space } fn update(&mut self) -> bool { unsafe { let mut stat: statfs = mem::zeroed(); let mount_point_cpath = utils::to_cpath(&self.mount_point); if statfs(mount_point_cpath.as_ptr() as *const i8, &mut stat) == 0 { self.available_space = u64::from(stat.f_bsize) * stat.f_bavail; true } else { false } } } }
use crate::attribute::Attribute; /// Enum returned from Handler implementations to instruct the parser to /// continue parsing or cancel parsing. #[derive(PartialEq)] pub enum HandlerResult { Continue, // continue (decode the element's data) Cancel, // stop parsing } /// The Handler trait defines a callback interface that is called when /// parsing a DICOM DataSet allowing the parsed data to be processed. /// Note that DICOM DataSet are tree like due to sequences, implementations /// of this trait must be aware and keep track of this. pub trait Handler { /// Invoked every time an Attribute is parsed. Note that the data for the /// attribute is provided via the data function below and may have not been /// provided to the parser yet (due to streaming). /// /// # Arguments /// /// * `_attribute` - The attribute parsed (Tag, VR, Length) /// * `_position` - The offset from the beginning of the stream of the /// first byte of the Attribute /// * `_data_offset` - The offset from _position of the attribute's value /// field fn attribute( &mut self, _attribute: &Attribute, _position: usize, _data_offset: usize, ) -> HandlerResult { HandlerResult::Continue } /// Invoked after attribute() with the value field/data for the attribute /// This function may be invoked multiple times for the same attribute /// due to streaming. Handler implementations are responsible for /// concatenating the received data in this case /// /// # Arguments /// * `_attribute` - the Attribute corresponding to this data /// * `_data` - the raw bytes for the value field /// * `_complete` - true if this is the data is complete, false if not fn data(&mut self, _attribute: &Attribute, _data: &[u8], _complete: bool) {} /// Invoked after attribute() for Sequences Attributes instead of data(). /// A corresponding call to end_sequence() will be made once the value /// field for the sequence is fully parsed. fn start_sequence(&mut self, _attribute: &Attribute) {} /// Invoked for each sequence item parsed in a sequence attribute. A /// corresponding call to end_sequence_item() will be made once the /// sequence item is fully parsed. Parsing a sequence item includes /// zero or more calls to attribute() for each attribute in the sequence /// item fn start_sequence_item(&mut self, _attribute: &Attribute) {} /// Invoked after all attributes in a sequence item are parsed. /// Corresponds to exactly one prior call to start_sequence_item() fn end_sequence_item(&mut self, _attribute: &Attribute) {} /// Invoked once the value field for a sequence is fully parsed. /// Corresponds to exaclty one prior call to start_sequence fn end_sequence(&mut self, _attribute: &Attribute) {} /// Invoked when the basic offset table is parsed in an encaspulated pixel /// data attribute. Note that basic offset table is not required so may be /// empty (or zero length) /// This function may be invoked multiple times for the same attribute /// due to streaming. Handler implementations are responsible for /// concatenating the received data in this case fn basic_offset_table( &mut self, _attribute: &Attribute, _data: &[u8], _complete: bool, ) -> HandlerResult { HandlerResult::Continue } /// Invoked for each pixel data fragment parsed in an encapsulated pixel /// data attribute. Note that a given image frame may consist of multiple /// fragments (although this may only occur in single frame - need to /// confirm this) /// This function may be invoked multiple times for the same attribute /// due to streaming. Handler implementations are responsible for /// concatenating the received data in this case fn pixel_data_fragment( &mut self, _attribute: &Attribute, _fragment_number: usize, _data: &[u8], _complete: bool, ) -> HandlerResult { HandlerResult::Continue } } pub mod cancel; pub mod tee;
use crate::prelude::*; use std::sync::{Arc, Mutex}; use std::time::Duration; pub struct SingleSubmit<'a, F, T> where F: FnOnce(&Arc<CommandBuffer>) -> VerboseResult<T>, { command_buffer: &'a Arc<CommandBuffer>, queue: &'a Arc<Mutex<Queue>>, f: F, timeout: Option<Duration>, } impl<'a, F, T> SingleSubmit<'a, F, T> where F: FnOnce(&Arc<CommandBuffer>) -> VerboseResult<T>, { pub fn builder( command_buffer: &'a Arc<CommandBuffer>, queue: &'a Arc<Mutex<Queue>>, f: F, ) -> Self { SingleSubmit { command_buffer, queue, f, timeout: None, } } pub fn wait_for_timeout(mut self, timeout: Duration) -> Self { self.timeout = Some(timeout); self } pub fn submit(self) -> VerboseResult<T> { self.command_buffer.begin(VkCommandBufferBeginInfo::new( VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, ))?; let result = (self.f)(&self.command_buffer)?; self.command_buffer.end()?; let submit = SubmitInfo::default().add_command_buffer(self.command_buffer); let queue_lock = self.queue.lock()?; match self.timeout { Some(timeout) => { let fence = Fence::builder().build(self.command_buffer.device().clone())?; queue_lock.submit(Some(&fence), &[submit])?; self.command_buffer .device() .wait_for_fences(&[&fence], true, timeout)?; } None => { queue_lock.submit(None, &[submit])?; queue_lock.wait_idle()?; } } Ok(result) } }
pub mod artists; pub mod events; pub mod organization_invites; pub mod organizations; pub mod regions; pub mod ticket_types; pub mod tickets; pub mod users; pub mod venues;
#![feature(plugin)] #![feature(field_init_shorthand)] #![plugin(rocket_codegen)] extern crate rocket; extern crate rocket_contrib; extern crate mpd; extern crate lazy_static; extern crate serde_json; #[macro_use] extern crate serde_derive; // steps: // 1. show currently playing on start page // 2. show playlist // 3. pause/play use std::io; use std::net::TcpStream; use std::convert::From; use std::collections::BTreeMap; //use rocket::response::NamedFile; use rocket::response::Redirect; use rocket_contrib::Template; use rocket_contrib::JSON; use mpd::Client as MpdClient; use mpd::song::Song as MpdSong; mod mpd_wrappers { use super::*; #[derive(Serialize)] pub struct Song { /// filename pub file: String, /// name (for streams) pub name: Option<String>, /// title pub title: Option<String>, pub tags: BTreeMap<String, String>, pub running: bool } impl From<MpdSong> for Song { fn from(song:MpdSong) -> Song { Song { file: song.file, name: song.name, title: song.title, tags: song.tags, running: false, } } } } use mpd_wrappers::Song; fn get_client() -> MpdClient { let stream = TcpStream::connect(option_env!("MPD_HOST").unwrap_or("localhost:6600")).unwrap(); let mut client = MpdClient::new(stream).unwrap(); if let Some(password) = option_env!("MPD_PW") { client.login(password).unwrap(); } client } #[derive(Serialize)] struct PlayerStatus{ song: Option<Song>, queue: Vec<Song> } impl PlayerStatus { pub fn current() -> PlayerStatus { let mut client = get_client(); let song = client.currentsong(); //println!("{:?}", song); let song:Option<Song> = song .unwrap(/*Result*/) .map(|song| Song::from(song)); println!("{}", serde_json::ser::to_string(&song).unwrap()); let queue = client.queue() .unwrap() .into_iter() //.map(Into::into) .map(|s| Song::from(s)) .map(|mut s| { if let Some(ref song) = song { s.running = song.title == s.title } s }) .collect::<Vec<_>>(); PlayerStatus{ song, queue } } } #[get("/current")] fn current() -> Template { Template::render("index", &PlayerStatus::current()) } #[get("/current.js")] fn current_js() -> JSON<PlayerStatus> { JSON(PlayerStatus::current()) } #[post("/prev")] fn prev() -> Redirect { let mut client = get_client(); client.prev().unwrap(); Redirect::to("/") } #[post("/next")] fn next() -> io::Result<Redirect> { let mut client = get_client(); client.next() .map_err(|_|io::Error::new(io::ErrorKind::Other, "cannot go further"))?; Ok(Redirect::to("/")) } #[post("/play")] fn play() -> Redirect { let mut client = get_client(); client.play().unwrap(); Redirect::to("/") } #[post("/pause")] fn pause() -> Redirect { let mut client = get_client(); client.pause(true).unwrap(); Redirect::to("/") } #[get("/")] fn home() -> Redirect { //fn home() -> io::Result<NamedFile> { //NamedFile::open("static/index.html") Redirect::to("/current") } fn main() { rocket::ignite() .mount("/", routes![home]) .mount("/", routes![play,pause,next,prev]) .mount("/", routes![current, current_js]) .launch(); }
/* * Copyright 2020 Fluence Labs Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use async_std::task; use faas_api::{service, Address, FunctionCall}; use fluence_libp2p::build_memory_transport; use fluence_server::ServerBehaviour; use futures::select; use libp2p::{ identity::{ ed25519::{Keypair, PublicKey}, PublicKey::Ed25519, }, PeerId, Swarm, }; use parity_multiaddr::Multiaddr; use serde_json::{json, Value}; use std::time::{Duration, Instant}; use trust_graph::{Certificate, TrustGraph}; use uuid::Uuid; /// Utility functions for tests. pub(crate) type Result<T> = core::result::Result<T, Box<dyn std::error::Error>>; pub(crate) static TIMEOUT: Duration = Duration::from_secs(5); pub(crate) static SHORT_TIMEOUT: Duration = Duration::from_millis(100); pub(crate) static KAD_TIMEOUT: Duration = Duration::from_millis(500); pub(crate) fn certificates_call(peer_id: PeerId, reply_to: Address) -> FunctionCall { FunctionCall { uuid: uuid(), target: Some(service!("certificates")), reply_to: Some(reply_to), arguments: json!({ "peer_id": peer_id.to_string(), "msg_id": uuid() }), name: None, } } pub(crate) fn add_certificates_call( peer_id: PeerId, reply_to: Address, certs: Vec<Certificate>, ) -> FunctionCall { let certs: Vec<_> = certs.into_iter().map(|c| c.to_string()).collect(); FunctionCall { uuid: uuid(), target: Some(service!("add_certificates")), reply_to: Some(reply_to), arguments: json!({ "peer_id": peer_id.to_string(), "msg_id": uuid(), "certificates": certs }), name: None, } } pub(crate) fn provide_call(service_id: &str, reply_to: Address) -> FunctionCall { FunctionCall { uuid: uuid(), target: Some(service!("provide")), reply_to: Some(reply_to), arguments: json!({ "service_id": service_id }), name: None, } } pub(crate) fn service_call(service_id: &str, consumer: Address) -> FunctionCall { FunctionCall { uuid: uuid(), target: Some(service!(service_id)), reply_to: Some(consumer), arguments: Value::Null, name: None, } } pub(crate) fn reply_call(reply_to: Address) -> FunctionCall { FunctionCall { uuid: uuid(), target: Some(reply_to), reply_to: None, arguments: Value::Null, name: Some("reply".into()), } } pub(crate) fn uuid() -> String { Uuid::new_v4().to_string() } pub(crate) fn get_cert() -> Certificate { use std::str::FromStr; Certificate::from_str( r#"11 1111 EqpwyPYjbRbGPcp7Q1UtSnkeCDG9x3JrY96strN4uaXv 4Td1uTWzqWp1PyUzoUZyvWNjgPWQKpMFDYeqzoAJSXHQtkVispifSrnnqBFM8yFPkgmSHwQ4kTuACBifjoRryvFK 18446744073709551615 1589892496362 DYVjCCtVPnJNEDfRDzYn6a2GKJ6Qn4FNVwDhEAQBvdQS 3Tt8UxBr2pixgMMbRM4gnJDkX3zH3NnS5q4A5fCj3taMLpS2QathgUqkW4KHysQLeRoGxy3JNVtYEWLsL6kySrqv 1621450096362 1589892496362 HFF3V9XXbhdTLWGVZkJYd9a7NyuD5BLWLdwc4EFBcCZa 38FUPbDMrrb1FaRoRTsupjqysaH3vvpJJgp9NxLFBjBYoU353bb6LkDZLDsNwvnpVysrs6TdHeZAAe3iXrJuGLkn 101589892496363 1589892496363 "#, ) .expect("deserialize cert") } #[allow(dead_code)] // Enables logging, filtering out unnecessary details pub(crate) fn enable_logs() { use log::LevelFilter::{Debug, Info}; env_logger::builder() .filter_level(Debug) .filter(Some("yamux::connection::stream"), Info) .filter(Some("tokio_threadpool"), Info) .filter(Some("tokio_reactor"), Info) .filter(Some("mio"), Info) .filter(Some("tokio_io"), Info) .filter(Some("soketto"), Info) .filter(Some("yamux"), Info) .filter(Some("multistream_select"), Info) .filter(Some("libp2p_secio"), Info) .filter(Some("libp2p_websocket::framed"), Info) .filter(Some("libp2p_ping"), Info) .filter(Some("libp2p_core::upgrade::apply"), Info) .filter(Some("libp2p_kad::kbucket"), Info) .filter(Some("libp2p_plaintext"), Info) .filter(Some("libp2p_identify::protocol"), Info) .try_init() .ok(); } pub(crate) struct CreatedSwarm(pub PeerId, pub Multiaddr); pub(crate) fn make_swarms(n: usize) -> Vec<CreatedSwarm> { make_swarms_with(n, |bs, maddr| create_swarm(bs, maddr, None)) } pub(crate) fn make_swarms_with<F>(n: usize, create_swarm: F) -> Vec<CreatedSwarm> where F: Fn(Vec<Multiaddr>, Multiaddr) -> (PeerId, Swarm<ServerBehaviour>), { use futures::stream::FuturesUnordered; use futures_util::StreamExt; use libp2p::core::ConnectedPoint::Dialer; use libp2p::swarm::SwarmEvent::ConnectionEstablished; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; let addrs = (0..n).map(|_| create_maddr()).collect::<Vec<_>>(); let mut swarms = addrs .iter() .map(|addr| { #[rustfmt::skip] let addrs = addrs.iter().filter(|&a| a != addr).cloned().collect::<Vec<_>>(); let (id, swarm) = create_swarm(addrs, addr.clone()); (CreatedSwarm(id, addr.clone()), swarm) }) .collect::<Vec<_>>(); #[rustfmt::skip] swarms.iter_mut().for_each(|(_, s)| s.dial_bootstrap_nodes()); let (infos, mut swarms): (Vec<CreatedSwarm>, Vec<_>) = swarms.into_iter().unzip(); let connected = Arc::new(AtomicUsize::new(0)); let shared_connected = connected.clone(); let _swarms_handle = task::spawn(async move { let connected = shared_connected; let start = Instant::now(); let mut local_start = Instant::now(); loop { let swarms = swarms .iter_mut() .map(|s| Swarm::next_event(s)) .collect::<FuturesUnordered<_>>(); let mut swarms = swarms.fuse(); select!( event = swarms.next() => { if let Some(ConnectionEstablished { endpoint: Dialer { .. }, .. }) = event { connected.fetch_add(1, Ordering::SeqCst); let total = connected.load(Ordering::Relaxed); if total % 10 == 0 { log::trace!( "established {: <10} +{: <10} (= {:<5})", total, format_args!("{:.3}s", start.elapsed().as_secs_f32()), format_args!("{}ms", local_start.elapsed().as_millis()) ); local_start = Instant::now(); } } }, ) } }); let now = Instant::now(); while connected.load(Ordering::SeqCst) < (n * (n - 1)) {} log::debug!("Connection took {}s", now.elapsed().as_secs_f32()); infos } #[derive(Default, Clone)] pub(crate) struct Trust { pub(crate) root_weights: Vec<(PublicKey, u32)>, pub(crate) certificates: Vec<Certificate>, pub(crate) cur_time: Duration, } pub(crate) fn create_swarm( bootstraps: Vec<Multiaddr>, listen_on: Multiaddr, trust: Option<Trust>, ) -> (PeerId, Swarm<ServerBehaviour>) { use libp2p::identity; let kp = Keypair::generate(); let public_key = Ed25519(kp.public()); let peer_id = PeerId::from(public_key); let mut swarm: Swarm<ServerBehaviour> = { use identity::Keypair::Ed25519; let root_weights: &[_] = trust.as_ref().map_or(&[], |t| &t.root_weights); let mut trust_graph = TrustGraph::new(root_weights.to_vec()); if let Some(trust) = trust { for cert in trust.certificates.into_iter() { trust_graph.add(cert, trust.cur_time).expect("add cert"); } } let server = ServerBehaviour::new( kp.clone(), peer_id.clone(), vec![listen_on.clone()], trust_graph, bootstraps, ); let transport = build_memory_transport(Ed25519(kp)); Swarm::new(transport, server, peer_id.clone()) }; Swarm::listen_on(&mut swarm, listen_on).unwrap(); (peer_id, swarm) } fn create_maddr() -> Multiaddr { use libp2p::core::multiaddr::Protocol; let port = 1 + rand::random::<u64>(); let addr: Multiaddr = Protocol::Memory(port).into(); addr }
use proconio::input; use binary_search_range::BinarySearchRange; fn main() { input! { n: usize, m: usize, d: u64, a: [u64; n], mut b: [u64; m], }; // A - b <= d // b >= A - d // -d <= A - b // b <= A + d b.sort(); let mut ans = 0; for x in a { let rng = b.range(x.saturating_sub(d)..(x + d + 1)); if rng.start < rng.end { let y = b[rng.end - 1]; ans = ans.max(x + y); } } if ans == 0 { println!("-1"); } else { println!("{}", ans); } }
use std::fmt; use crate::cell::Cell; #[derive(Clone)] pub struct Row { pub cells: Vec<Cell>, } impl fmt::Display for Row { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut row = String::new(); for cell in self.cells.iter() { row.push_str(&cell.to_string()); } write!(f, "{}", row) } } impl Row { pub fn len(&self) -> usize { self.cells.len() } }
use base64; use rand::prelude::*; use reqwest::Client; use serde_json::{from_str, json, Map, Value}; use urlencoding; // twitter access // token: 703880420-JgOPobTDO6U7aPJaFdwBImxTowBhiTXTZiBT3V12 // secret: 1F8XJN65rF1OfKYu0K4qUv71wSIDk1DX2xpzsuKJYKtis #[derive(Debug)] pub struct TweetInfo { user_id: i64, user_name: String, user_screen_name: String, tweet_id: i64, tweet_text: String, truncated: bool, } impl TweetInfo { pub fn user_id(&self) -> i64 { return self.user_id; } pub fn user_name(&self) -> &String { return &self.user_name; } pub fn user_screen_name(&self) -> &String { return &self.user_screen_name; } pub fn tweet_id(&self) -> i64 { return self.tweet_id; } pub fn tweet_text(&self) -> &String { return &self.tweet_text; } pub fn truncated(&self) -> bool { return self.truncated; } } #[derive(Debug)] pub struct SearchMetadata { completed_in: f64, max_id: i64, next_results: Option<String>, } #[derive(Clone, Debug)] pub struct TwitterToken { token: String, } #[derive(Debug)] pub struct SearchJSON { val: Value, } #[derive(Debug)] pub struct AuthJSON { val: Value, } #[derive(Debug)] pub enum ApiJSON { Auth(AuthJSON), Search(SearchJSON), } #[derive(Debug)] pub enum Error { Reqwest(reqwest::Error), Serde(serde_json::Error), String(String), NoNextResult(), } impl From<reqwest::Error> for Error { fn from(e: reqwest::Error) -> Self { return Error::Reqwest(e); } } impl From<serde_json::Error> for Error { fn from(e: serde_json::Error) -> Self { return Error::Serde(e); } } impl From<String> for Error { fn from(s: String) -> Self { return Error::String(s); } } pub fn create_bearer_token_creds(key: &str, secret: &str) -> String { let combined = format!("{}:{}", key, secret); let b64 = base64::encode(&combined); return b64; } fn get_access_token_from_json(auth: &AuthJSON) -> Result<TwitterToken, serde_json::Error> { let token = auth.val["access_token"].as_str().unwrap().to_string(); return Ok(TwitterToken { token }); } pub async fn request_bearer_token(creds: &str) -> Result<TwitterToken, Error> { let client = reqwest::Client::new(); let params = [("grant_type", "client_credentials")]; let content = client .post("https://api.twitter.com/oauth2/token") .header("Authorization", format!("Basic {}", creds)) .header( "Content-Type", "application/x-www-form-urlencoded;charset=UTF-8", ) .form(&params) .send() .await? .text() .await?; let auth_val: serde_json::Value = serde_json::from_str(content.as_str())?; let auth = AuthJSON { val: auth_val }; let token_result = get_access_token_from_json(&auth); let token = match token_result { Ok(res) => res, _ => return Err(Error::String("Fail".to_string())), }; return Ok(token); } fn get_tweet_json_from_response(response: &str) -> Result<SearchJSON, Error> { let resp_json: Value = match from_str(response) { Ok(val) => val, Err(e) => return Err(Error::from(e)), }; return Ok(SearchJSON { val: resp_json }); } pub fn get_tweets_from_json(search_json: &SearchJSON) -> Result<&Vec<Value>, Error> { let status_res = &search_json.val["statuses"]; let tweets = match status_res { Value::Array(v) => Ok(v), _ => Err(Error::from("Expect Array for 'statuses'".to_string())), }; return tweets; } pub fn get_search_metadata_from_json(search_json: &SearchJSON) -> Result<SearchMetadata, Error> { let status_res = &search_json.val["search_metadata"]; let metadata_raw = match status_res { Value::Object(v) => v, _ => { return Err(Error::from( "Expect Object for 'search_metadata'".to_string(), )) } }; let has_next = metadata_raw.contains_key("next_results"); let metadata = SearchMetadata { completed_in: metadata_raw["completed_in"].as_f64().unwrap(), max_id: metadata_raw["max_id"].as_i64().unwrap(), next_results: match has_next { true => Some(metadata_raw["next_results"].as_str().unwrap().to_string()), false => None, }, }; return Ok(metadata); } pub fn get_tweet_info_from_tweet(tweet: &Value) -> Result<TweetInfo, Error> { let info = TweetInfo { user_id: tweet["user"]["id"].as_i64().unwrap(), user_name: tweet["user"]["name"].as_str().unwrap().to_string(), user_screen_name: tweet["user"]["screen_name"].as_str().unwrap().to_string(), tweet_id: tweet["id"].as_i64().unwrap(), tweet_text: tweet["text"].as_str().unwrap().to_string(), truncated: tweet["truncated"].as_bool().unwrap(), }; return Ok(info); } pub fn get_tweet_infos_from_tweets(tweets: &Vec<Value>) -> Result<Vec<TweetInfo>, Error> { let res: Vec<TweetInfo> = tweets .into_iter() .map(|t| get_tweet_info_from_tweet(t).unwrap()) .collect(); return Ok(res); } fn build_query_string(params: &[(&str, &str)]) -> String { let mut sb = String::with_capacity(32); sb.push('?'); for (i, param) in params.iter().enumerate() { if i != 0 { sb.push('&'); } sb.push_str(urlencoding::encode(param.0).as_str()); sb.push('='); sb.push_str(urlencoding::encode(param.1).as_str()); } return sb; } fn build_query_from_next_results(query_in: &str) -> String { return format!("{}{}", query_in, "&include_entities=0"); } async fn search_request_helper(token: &TwitterToken, url_query: &str) -> Result<String, Error> { let endpoint = "https://api.twitter.com/1.1/search/tweets.json"; let req_url = format!("{}{}", endpoint, url_query); let client = reqwest::Client::new(); let content = client .get(&req_url) .bearer_auth(&token.token) .send() .await? .text() .await?; return Ok(content); } pub async fn search_request_next( token: &TwitterToken, meta: &SearchMetadata, ) -> Result<SearchJSON, Error> { if meta.next_results.is_none() { return Err(Error::NoNextResult()); } let q = build_query_from_next_results(meta.next_results.as_ref().unwrap()); let content = search_request_helper(token, &q).await?; return get_tweet_json_from_response(content.as_str()); } pub async fn search_request(token: &TwitterToken, query: &str) -> Result<SearchJSON, Error> { let params = [ ("q", query), ("count", "100".as_ref()), ("include_entities", "0".as_ref()), ]; let q = build_query_string(&params); let content = search_request_helper(token, &q).await?; return get_tweet_json_from_response(content.as_str()); } // Chebs, hooters, boobs, fun bags, tits, hoo hars, breasts, baps, rack, mammaries, melons, puppie, tiddies // Track oldest key
// Copyright (C) 2019 Sebastian Dröge <sebastian@centricular.com> // // Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT> use super::*; /// Parsing errors that can be returned from [`Session::parse`]. /// /// [`Session::parse`]: struct.Session.html#method.parse #[derive(Debug, PartialEq, Eq)] pub enum ParserError { /// The given line started with an unexpected character. #[deprecated(note = "This is no longer considered an error.")] UnexpectedLine(usize, u8), /// The given line was not formatted correctly. InvalidLineFormat(usize, &'static str), /// The given line contained an invalid encoding at the specified field. InvalidFieldEncoding(usize, &'static str), /// The given line contained an invalid format at the specified field. InvalidFieldFormat(usize, &'static str), /// The given line was missing field data at the specified field. MissingField(usize, &'static str), /// The given line had some trailing data at the specified field. FieldTrailingData(usize, &'static str), /// The given version line did not contain a valid version number. InvalidVersion(usize, Vec<u8>), /// A second version line was found at the given line. MultipleVersions(usize), /// The SDP did not contain a version line. NoVersion, /// A second origin line was found at the given line. MultipleOrigins(usize), /// The SDP did not contain an origin line. #[deprecated(note = "This is no longer considered an error.")] NoOrigin, /// A second session name line was found at the given line. MultipleSessionNames(usize), /// The SDP did not contain a session name line. #[deprecated(note = "This is no longer considered an error.")] NoSessionName, /// A second session description line was found at the given line. MultipleSessionDescription(usize), /// A second URI line was found at the given line. MultipleUris(usize), /// A second connection line was found at the given line. MultipleConnections(usize), /// A second time zone line was found at the given line. MultipleTimeZones(usize), /// A second media title line was found at the given line. MultipleMediaTitles(usize), /// A second key line was found at the given line. MultipleKeys(usize), } impl std::error::Error for ParserError {} impl std::fmt::Display for ParserError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { use std::convert::TryFrom; match *self { #[allow(deprecated)] ParserError::UnexpectedLine(line, c) => { if let Ok(c) = char::try_from(c as u32) { write!(f, "Unexpected line {} starting with '{}'", line, c) } else { write!(f, "Unexpected line {} starting with U+{:04x}", line, c) } } ParserError::InvalidLineFormat(line, ref s) => { write!(f, "Invalid formatted line {}: \"{}\"", line, s) } ParserError::InvalidFieldEncoding(line, s) => { write!(f, "Invalid field encoding in line {} at {}", line, s) } ParserError::InvalidFieldFormat(line, s) => { write!(f, "Invalid field formatting in line {} at {}", line, s) } ParserError::MissingField(line, s) => write!(f, "Missing field {} in line {}", s, line), ParserError::FieldTrailingData(line, s) => { write!(f, "Field {} in line {} has trailing data", s, line) } ParserError::InvalidVersion(line, ref s) => write!( f, "Invalid version at line {}: {}", line, String::from_utf8_lossy(s) ), ParserError::MultipleVersions(line) => write!(f, "Multiple versions in line {}", line), ParserError::NoVersion => write!(f, "No version line"), ParserError::MultipleOrigins(line) => write!(f, "Multiple origins in line {}", line), #[allow(deprecated)] ParserError::NoOrigin => write!(f, "No origin line"), ParserError::MultipleSessionNames(line) => { write!(f, "Multiple session-names in line {}", line) } #[allow(deprecated)] ParserError::NoSessionName => write!(f, "No session-name line"), ParserError::MultipleSessionDescription(line) => { write!(f, "Multiple session-information in line {}", line) } ParserError::MultipleUris(line) => write!(f, "Multiple URIs in line {}", line), ParserError::MultipleConnections(line) => { write!(f, "Multiple connections in line {}", line) } ParserError::MultipleTimeZones(line) => write!(f, "Multiple zones in line {}", line), ParserError::MultipleMediaTitles(line) => { write!(f, "Multiple media titles in line {}", line) } ParserError::MultipleKeys(line) => write!(f, "Multiple keys in line {}", line), } } } impl Origin { fn parse(line: &Line) -> Result<Origin, ParserError> { // <username> <sess-id> <sess-version> <nettype> <addrtype> <unicast-address> let mut origin = line.value.splitn_str(6, b" "); let username = parse_str(&mut origin, line.n, "Origin username")?; let sess_id = parse_str(&mut origin, line.n, "Origin sess-id")?; let sess_version = parse_str_u64(&mut origin, line.n, "Origin sess-version")?; let nettype = parse_str(&mut origin, line.n, "Origin nettype")?; let addrtype = parse_str(&mut origin, line.n, "Origin addrtype")?; let unicast_address = parse_str(&mut origin, line.n, "Origin unicast-address")?; Ok(Origin { username: if username == "-" { None } else { Some(username) }, sess_id, sess_version, nettype, addrtype, unicast_address, }) } } impl Connection { fn parse(line: &Line) -> Result<Connection, ParserError> { // <nettype> <addrtype> <connection-address> let mut connection = line.value.splitn_str(3, b" "); let nettype = parse_str(&mut connection, line.n, "Connection nettype")?; let addrtype = parse_str(&mut connection, line.n, "Connection addrtype")?; let connection_address = parse_str(&mut connection, line.n, "Connection connection-address")?; Ok(Connection { nettype, addrtype, connection_address, }) } } impl Bandwidth { fn parse(line: &Line) -> Result<Bandwidth, ParserError> { // <bwtype>:<bandwidth> let mut bandwidth = line.value.split_str(b":"); let bwtype = parse_str(&mut bandwidth, line.n, "Bandwidth bwtype")?; let bw = parse_str_u64(&mut bandwidth, line.n, "Bandwidth bandwidth")?; if bandwidth.next().is_some() { return Err(ParserError::FieldTrailingData(line.n, "Bandwidth")); } Ok(Bandwidth { bwtype, bandwidth: bw, }) } } impl Time { fn parse(line: &Line) -> Result<Time, ParserError> { // <start-time> <stop-time> let mut time = line.value.split_str(b" "); let start_time = parse_str_u64(&mut time, line.n, "Time start-time")?; let stop_time = parse_str_u64(&mut time, line.n, "Time stop-time")?; if time.next().is_some() { return Err(ParserError::FieldTrailingData(line.n, "Time")); } Ok(Time { start_time, stop_time, repeats: Vec::new(), }) } } fn parse_typed_time(s: &[u8], line: usize, field: &'static str) -> Result<u64, ParserError> { let (num, factor) = match s .split_last() .ok_or(ParserError::InvalidFieldFormat(line, field))? { (b'd', prefix) => (prefix, 86_400), (b'h', prefix) => (prefix, 3_600), (b'm', prefix) => (prefix, 60), (b's', prefix) => (prefix, 1), (_, _) => (s, 1), }; let num = std::str::from_utf8(num).map_err(|_| ParserError::InvalidFieldEncoding(line, field))?; let num = num .parse::<u64>() .map_err(|_| ParserError::InvalidFieldFormat(line, field))?; num.checked_mul(factor) .ok_or(ParserError::InvalidFieldFormat(line, field)) } impl Repeat { fn parse(line: &Line) -> Result<Repeat, ParserError> { // <repeat interval> <active duration> <offsets from start-time> let mut repeat = line.value.split_str(b" "); let repeat_interval = repeat .next() .ok_or(ParserError::MissingField(line.n, "Repeat repeat-interval")) .and_then(|s| parse_typed_time(s, line.n, "Repeat repeat-interval"))?; let active_duration = repeat .next() .ok_or(ParserError::MissingField(line.n, "Repeat active-duration")) .and_then(|s| parse_typed_time(s, line.n, "Repeat active-duration"))?; let offsets = repeat .map(|s| parse_typed_time(s, line.n, "Repeat active-duration")) .collect::<Result<Vec<_>, _>>()?; Ok(Repeat { repeat_interval, active_duration, offsets, }) } } impl TimeZone { fn parse(line: &Line) -> Result<Vec<TimeZone>, ParserError> { // <adjustment time> <offset> <adjustment time> <offset> .... let mut zones = line.value.split_str(b" "); let mut ret = Vec::new(); loop { let adjustment_time = parse_str_u64(&mut zones, line.n, "TimeZone adjustment-time"); let adjustment_time = match adjustment_time { Ok(adjustment_time) => adjustment_time, Err(ParserError::MissingField(..)) => break, Err(err) => return Err(err), }; let offset = zones .next() .ok_or(ParserError::MissingField(line.n, "TimeZone offset")) .and_then(|s| { use std::convert::TryInto; let (sign, s) = if s.first() == Some(&b'-') { (true, &s[1..]) } else { (false, s) }; parse_typed_time(s, line.n, "TimeZone offset") .and_then(|t| { t.try_into().map_err(|_| { ParserError::InvalidFieldFormat(line.n, "TimeZone offset") }) }) .map(|t: i64| if sign { -t } else { t }) })?; ret.push(TimeZone { adjustment_time, offset, }); } Ok(ret) } } impl Attribute { fn parse(line: &Line) -> Result<Attribute, ParserError> { // <attribute>:<value> // <attribute> let mut attribute = line.value.splitn_str(2, b":"); let name = parse_str(&mut attribute, line.n, "Attribute name")?; let value = parse_str_opt(&mut attribute, line.n, "Attribute value")?; Ok(Attribute { attribute: name, value, }) } } impl Key { fn parse(line: &Line) -> Result<Key, ParserError> { // <method>:<encryption key> // <method> let mut key = line.value.splitn_str(2, b":"); let method = parse_str(&mut key, line.n, "Key method")?; let encryption_key = parse_str_opt(&mut key, line.n, "Key encryption-key")?; Ok(Key { method, encryption_key, }) } } impl Media { fn parse_m_line(line: &Line) -> Result<Media, ParserError> { // <media> <port> <proto> <fmt> ... let mut media = line.value.splitn_str(4, b" "); let name = parse_str(&mut media, line.n, "Media name")?; let (port, num_ports) = media .next() .ok_or(ParserError::MissingField(line.n, "Media port")) .and_then(|s| str_from_utf8(line.n, s, "Media Port")) .and_then(|port| { let mut split = port.splitn(2, '/'); let port = split .next() .ok_or(ParserError::MissingField(line.n, "Media port")) .and_then(|port| { port.parse() .map_err(|_| ParserError::InvalidFieldFormat(line.n, "Media port")) })?; let num_ports = split .next() .ok_or(ParserError::MissingField(line.n, "Media num-ports")) .and_then(|num_ports| { num_ports .parse() .map_err(|_| ParserError::InvalidFieldFormat(line.n, "Media num-ports")) }); match num_ports { Ok(num_ports) => Ok((port, Some(num_ports))), Err(ParserError::MissingField(..)) => Ok((port, None)), Err(err) => Err(err), } })?; let proto = parse_str(&mut media, line.n, "Media proto")?; let fmt = parse_str(&mut media, line.n, "Media fmt")?; Ok(Media { media: name, port, num_ports, proto, fmt, media_title: None, connections: Vec::new(), bandwidths: Vec::new(), key: None, attributes: Vec::new(), }) } fn parse<'a, I: FallibleIterator<Item = Line<'a>, Error = ParserError>>( lines: &mut fallible_iterator::Peekable<I>, ) -> Result<Option<Media>, ParserError> { let media = match lines.next()? { None => return Ok(None), Some(line) => { assert_eq!(line.key, b'm'); Media::parse_m_line(&line)? } }; // As with Session::parse, be more permissive about order than RFC 8866. let mut media_title = None; let mut connections = vec![]; let mut bandwidths = vec![]; let mut key = None; let mut attributes = vec![]; while matches!(lines.peek(), Ok(Some(Line { key, .. })) if *key != b'm') { let line = lines.next().unwrap().unwrap(); match line.key { // Parse media information line // - Can exist not at all or exactly once b'i' => parse_rejecting_duplicates( &mut media_title, &line, ParserError::MultipleMediaTitles, |l| str_from_utf8(l.n, l.value, "Media Title"), )?, // Parse connection lines // - Can exist not at all, once or multiple times b'c' => connections.push(Connection::parse(&line)?), // Parse bandwidth lines: // - Can exist not at all, once or multiple times b'b' => bandwidths.push(Bandwidth::parse(&line)?), // Parse key line // - Can exist not at all or exactly once b'k' => parse_rejecting_duplicates( &mut key, &line, ParserError::MultipleKeys, Key::parse, )?, // Parse attribute lines: // - Can exist not at all, once or multiple times b'a' => attributes.push(Attribute::parse(&line)?), _ => (), } } Ok(Some(Media { media_title, connections, bandwidths, key, attributes, ..media })) } } impl Session { /// Parse an SDP session description from a byte slice. pub fn parse(data: &[u8]) -> Result<Session, ParserError> { // Create an iterator which returns for each line its human-readable // (1-based) line number and contents. let mut lines = LineParser(data.lines().enumerate().map(|(i, bytes)| (i + 1, bytes))).peekable(); // Parses anything allowed by RFC 8866 Section 9 and more: // - be more lax about order. As in the RFC, "v=" must come first and // "m=" starts the media descriptions. Other fields can come in // almost any order. "r=" refers to the most recent "t=", even if it's // not the most recent line. // - allow "o=", "t=" and "s=" lines to be missing. // Check version line, which we expect to come first. match lines.next()? { Some(Line { n, key: b'v', value, }) => { if value != b"0" { return Err(ParserError::InvalidVersion(n, value.into())); } } _ => return Err(ParserError::NoVersion), } let mut origin = None; let mut session_name = None; let mut session_description = None; let mut uri = None; let mut emails = vec![]; let mut phones = vec![]; let mut connection = None; let mut bandwidths = vec![]; let mut times = vec![]; let mut time_zones = None; let mut attributes = vec![]; let mut key = None; while matches!(lines.peek(), Ok(Some(Line { key, .. })) if *key != b'm') { let line = lines.next().unwrap().unwrap(); match line.key { // Parse origin line: // - Must only exist once or not at all b'o' => parse_rejecting_duplicates( &mut origin, &line, ParserError::MultipleOrigins, Origin::parse, )?, // Parse session name line: // - Must only exist once or not at all b's' => parse_rejecting_duplicates( &mut session_name, &line, ParserError::MultipleSessionNames, |l| str_from_utf8(l.n, l.value, "Session Name"), )?, // Parse session information line: // - Must only exist once or not at all b'i' => parse_rejecting_duplicates( &mut session_description, &line, ParserError::MultipleSessionDescription, |l| str_from_utf8(l.n, l.value, "Session Description"), )?, // Parse URI line: // - Must only exist once or not at all b'u' => { parse_rejecting_duplicates(&mut uri, &line, ParserError::MultipleUris, |l| { str_from_utf8(l.n, l.value, "Uri") })? } // Parse E-Mail lines: // - Can exist not at all, once or multiple times b'e' => emails.push(str_from_utf8(line.n, line.value, "E-Mail")?), // Parse phone number lines: // - Can exist not at all, once or multiple times b'p' => phones.push(str_from_utf8(line.n, line.value, "Phone")?), // Parse connection line: // - Can exist not at all or exactly once per session b'c' => parse_rejecting_duplicates( &mut connection, &line, ParserError::MultipleConnections, Connection::parse, )?, // Parse bandwidth lines: // - Can exist not at all, once or multiple times b'b' => bandwidths.push(Bandwidth::parse(&line)?), // Parse time lines // - If followed by "r" lines then these are part of the same time field b't' => times.push(Time::parse(&line)?), // Parse repeat lines // - Can exist not at all, once or multiple times b'r' => { if let Some(t) = times.last_mut() { t.repeats.push(Repeat::parse(&line)?); } } // Parse zones line: // - Can exist not at all or exactly once per session b'z' => parse_rejecting_duplicates( &mut time_zones, &line, ParserError::MultipleTimeZones, TimeZone::parse, )?, // Parse key line // - Can exist not at all or exactly once b'k' => parse_rejecting_duplicates( &mut key, &line, ParserError::MultipleKeys, Key::parse, )?, // Parse attribute lines: // - Can exist not at all, once or multiple times b'a' => attributes.push(Attribute::parse(&line)?), _ => (), } } let origin = origin.unwrap_or_else(|| Origin { username: None, sess_id: String::new(), sess_version: 0, nettype: String::new(), addrtype: String::new(), unicast_address: String::new(), }); let session_name = session_name.unwrap_or_default(); let time_zones = time_zones.unwrap_or_default(); // Parse media lines: // - Can exist not at all, once or multiple times let mut medias = vec![]; while let Some(media) = Media::parse(&mut lines)? { medias.push(media); } Ok(Session { origin, session_name, session_description, uri, emails, phones, connection, bandwidths, times, time_zones, key, attributes, medias, }) } } fn parse_rejecting_duplicates< T, E: Fn(usize) -> ParserError, P: Fn(&Line) -> Result<T, ParserError>, >( value: &mut Option<T>, line: &Line<'_>, duplicate_error_fn: E, parser: P, ) -> Result<(), ParserError> { if value.is_some() { return Err(duplicate_error_fn(line.n)); } *value = Some(parser(line)?); Ok(()) } // Field parser helpers on byte slice iterators fn parse_str<'a>( it: &mut impl Iterator<Item = &'a [u8]>, line: usize, field: &'static str, ) -> Result<String, ParserError> { it.next() .ok_or(ParserError::MissingField(line, field)) .and_then(|b| { std::str::from_utf8(b) .map(String::from) .map_err(|_| ParserError::InvalidFieldEncoding(line, field)) }) } fn parse_str_u64<'a>( it: &mut impl Iterator<Item = &'a [u8]>, line: usize, field: &'static str, ) -> Result<u64, ParserError> { it.next() .ok_or(ParserError::MissingField(line, field)) .and_then(|b| { std::str::from_utf8(b).map_err(|_| ParserError::InvalidFieldEncoding(line, field)) }) .and_then(|s| { s.parse() .map_err(|_| ParserError::InvalidFieldFormat(line, field)) }) } fn parse_str_opt<'a>( it: &mut impl Iterator<Item = &'a [u8]>, line: usize, field: &'static str, ) -> Result<Option<String>, ParserError> { it.next() .map(|b| { std::str::from_utf8(b) .map(String::from) .map_err(|_| ParserError::InvalidFieldEncoding(line, field)) }) .transpose() } // Line parser helper for converting a byte slice to a string fn str_from_utf8(line: usize, s: &[u8], field: &'static str) -> Result<String, ParserError> { std::str::from_utf8(s) .map(String::from) .map_err(|_| ParserError::InvalidFieldEncoding(line, field)) } struct Line<'item> { /// The 1-based line number. n: usize, key: u8, value: &'item [u8], } // Parsing helper iterators below struct LineParser<'item, I: Iterator<Item = (usize, &'item [u8])>>(I); impl<'item, I: Iterator<Item = (usize, &'item [u8])>> FallibleIterator for LineParser<'item, I> { type Item = Line<'item>; type Error = ParserError; fn next(&mut self) -> Result<Option<Self::Item>, Self::Error> { for (n, line) in &mut self.0 { if line.is_empty() { continue; } let equals = line.iter().position(|b| *b == b'='); let key = match equals { None => { return Err(ParserError::InvalidLineFormat( n, "Line not in key=value format", )) } Some(i) if i == 1 => line[0], _ => { return Err(ParserError::InvalidLineFormat( n, "Line key not 1 character", )) } }; return Ok(Some(Line { n, key, value: &line[2..], })); } Ok(None) } } #[cfg(test)] mod tests { use super::*; #[test] fn parse_sdp() { let sdp = b"v=0\r o=jdoe 2890844526 2890842807 IN IP4 10.47.16.5\r s=SDP Seminar\r i=A Seminar on the session description protocol\r u=http://www.example.com/seminars/sdp.pdf\r e=j.doe@example.com (Jane Doe)\r p=+1 617 555-6011\r c=IN IP4 224.2.17.12/127\r b=AS:128\r t=2873397496 2873404696\r r=7d 1h 0 25h\r z=2882844526 -1h 2898848070 0\r k=clear:1234\r a=recvonly\r m=audio 49170 RTP/AVP 0\r m=video 51372/2 RTP/AVP 99\r a=rtpmap:99 h263-1998/90000\r a=fingerprint:sha-256 3A:96:6D:57:B2:C2:C7:61:A0:46:3E:1C:97:39:D3:F7:0A:88:A0:B1:EC:03:FB:10:A5:5D:3A:37:AB:DD:02:AA\r "; let parsed = Session::parse(&sdp[..]).unwrap(); let expected = Session { origin: Origin { username: Some("jdoe".into()), sess_id: "2890844526".into(), sess_version: 2890842807, nettype: "IN".into(), addrtype: "IP4".into(), unicast_address: "10.47.16.5".into(), }, session_name: "SDP Seminar".into(), session_description: Some("A Seminar on the session description protocol".into()), uri: Some("http://www.example.com/seminars/sdp.pdf".into()), emails: vec!["j.doe@example.com (Jane Doe)".into()], phones: vec!["+1 617 555-6011".into()], connection: Some(Connection { nettype: "IN".into(), addrtype: "IP4".into(), connection_address: "224.2.17.12/127".into(), }), bandwidths: vec![Bandwidth { bwtype: "AS".into(), bandwidth: 128, }], times: vec![Time { start_time: 2873397496, stop_time: 2873404696, repeats: vec![Repeat { repeat_interval: 604800, active_duration: 3600, offsets: vec![0, 90000], }], }], time_zones: vec![ TimeZone { adjustment_time: 2882844526, offset: -3600, }, TimeZone { adjustment_time: 2898848070, offset: 0, }, ], key: Some(Key { method: "clear".into(), encryption_key: Some("1234".into()), }), attributes: vec![Attribute { attribute: "recvonly".into(), value: None, }], medias: vec![ Media { media: "audio".into(), port: 49170, num_ports: None, proto: "RTP/AVP".into(), fmt: "0".into(), media_title: None, connections: vec![], bandwidths: vec![], key: None, attributes: vec![], }, Media { media: "video".into(), port: 51372, num_ports: Some(2), proto: "RTP/AVP".into(), fmt: "99".into(), media_title: None, connections: vec![], bandwidths: vec![], key: None, attributes: vec![ Attribute { attribute: "rtpmap".into(), value: Some("99 h263-1998/90000".into()), }, Attribute { attribute: "fingerprint".into(), value: Some("sha-256 3A:96:6D:57:B2:C2:C7:61:A0:46:3E:1C:97:39:D3:F7:0A:88:A0:B1:EC:03:FB:10:A5:5D:3A:37:AB:DD:02:AA".into()), } ], }, ], }; assert_eq!(parsed, expected); } #[test] fn parse_only_key() { Session::parse(b"v\n").unwrap_err(); } #[test] fn parse_sdp_real_camera() { let sdp = b"v=0\r o=VSTC 3828747520 3828747520 IN IP4 192.168.1.165\r s=streamed by the VSTARCAM RTSP server\r e=NONE\r c=IN IP4 0.0.0.0\r t=0 0\r m=video 0 RTP/AVP 96\r b=AS:1024\r a=control:track0\r a=rtpmap:96 H264/90000\r a=fmtp:96 packetization-mode=1;profile-level-id=4d001f;sprop-parameter-sets=Z00AH52oFAFum4CAgKAAAAMAIAAAAwHwgA==,aO48gA==\r m=audio 0 RTP/AVP 8 \r b=AS:64\r a=control:track1\r a=rtpmap:8 PCMA/8000/1\r "; let _parsed = Session::parse(&sdp[..]).unwrap(); } /// Parses SDP from a Geovision camera which (incorrectly) omits the "t=" /// line. #[test] fn parse_sdp_geovision() { let sdp = b"v=0\r o=- 1001 1 IN IP4 192.168.5.237\r s=VCP IPC Realtime stream\r m=video 0 RTP/AVP 105\r c=IN IP4 192.168.5.237\r a=control:rtsp://192.168.5.237/media/video1/video\r a=rtpmap:105 H264/90000\r a=fmtp:105 profile-level-id=4d4032; packetization-mode=1; sprop-parameter-sets=Z01AMpWgCoAwfiZuAgICgAAB9AAAdTBC,aO48gA==\r a=recvonly\r m=application 0 RTP/AVP 107\r c=IN IP4 192.168.5.237\r a=control:rtsp://192.168.5.237/media/video1/metadata\r a=rtpmap:107 vnd.onvif.metadata/90000\r a=fmtp:107 DecoderTag=h3c-v3 RTCP=0\r a=recvonly\r "; let _parsed = Session::parse(&sdp[..]).unwrap(); } /// Parses SDP from an Anpviz camera which (incorrectly) places an `a=` /// between the `c=` and `t=` lines of a session. #[test] fn parse_sdp_anpviz() { let sdp = b"v=0\r o=- 1109162014219182 1109162014219192 IN IP4 x.y.z.w\r s=RTSP/RTP stream from anjvision ipcamera\r e=NONE\r c=IN IP4 0.0.0.0\r a=tool:LIVE555 Streaming Media v2011.05.25 CHAM.LI@ANJVISION.COM\r t=0 0\r a=range:npt=0-\r a=control:*\r m=video 0 RTP/AVP 96\r a=rtpmap:96 H264/90000\r a=control:trackID=1\r a=fmtp:96 profile-level-id=4D401F;packetization-mode=0;sprop-parameter-sets=Z01AH5WgLASabAQ=,aO48gA==;config=00000001674d401f95a02c049a6c040000000168ee3c800000000106f02c0445c6f5000620ebc2f3f7639e48250bfcb561bb2b85dda6fe5f06cc8b887b6a915f5aa3bebfffffffffff7380\r a=x-dimensions: 704, 576\r a=x-framerate: 12\r m=audio 0 RTP/AVP 0\r a=rtpmap:0 MPEG4-GENERIC/16000/2\r a=fmtp:0 config=1408\r a=control:trackID=2\r a=Media_header:MEDIAINFO=494D4B48010100000400010010710110401F000000FA000000000000000000000000000000000000;\r a=appversion:1.0\r "; let _parsed = Session::parse(&sdp[..]).unwrap(); } #[test] fn parse_overflowing_time() { assert_eq!( Session::parse(b"v=0\no= 0 =\x00 \ns=q\nt=0 5\nz=00 666666000079866660m "), Err(ParserError::InvalidFieldFormat(5, "TimeZone offset")) ); } #[test] fn parse_sdp_without_origin() { let sdp = b"v=0\r s=streamed by the macro-video rtsp server\r t=0 0\r a=control:*\r a=range:npt=0-\r a=x-qt-text-nam:streamed by the macro-video rtsp server\r c=IN IP4 0.0.0.0\r m=video 0 RTP/AVP 96\r b=AS:500\r a=rtpmap:96 H264/90000\r a=fmtp:96 profile-level-id=TeAo;packetization-mode=1;sprop-parameter-sets=J03gKI1oBQBboQAAAwABAAADACgPFCKg,KO4BNJI=\r a=control:track1\r "; let _parsed = Session::parse(&sdp[..]).unwrap(); } #[test] fn parse_sdp_data_after_media() { let sdp = b"v=0\r o=- 1691154453 1 IN IP4 192.168.1.100\r i=Pagos\r a=type:broadcast\r s=RandD2\r m=video 15002 RTP/AVP 97\r a=range:npt=0-\r a=rtpmap:97 H264/90000\r a=fmtp:97 profile-level-id=4D4029; packetization-mode=1; sprop-parameter-sets=Z01AKZZUBQHsgA==,aO44gA==\r a=framerate:15.000\r a=control:rtsp://192.168.1.20/camera1.sdp\r c=IN IP4 0.0.0.0\r t=0 0\r "; let _parsed = Session::parse(&sdp[..]).unwrap(); } #[test] fn parse_sdp_without_session_name() { let sdp = b"v=0\r o=- 1109162014219182 1109162014219192 IN IP4 x.y.z.w\r t=0 0\r a=control:*\r a=range:npt=0-\r a=x-qt-text-nam:streamed by the macro-video rtsp server\r c=IN IP4 0.0.0.0\r m=video 0 RTP/AVP 96\r b=AS:500\r a=rtpmap:96 H264/90000\r a=fmtp:96 profile-level-id=TeAo;packetization-mode=1;sprop-parameter-sets=J03gKI1oBQBboQAAAwABAAADACgPFCKg,KO4BNJI=\r a=control:track1\r "; let _parsed = Session::parse(&sdp[..]).unwrap(); } }
extern crate olin; use log::{error, info}; use olin::Resource; use std::io::Write; pub extern "C" fn test() -> Result<(), i32> { info!("testing for issue 39: https://github.com/Xe/olin/issues/39"); const ZERO_LEN: usize = 16; let zeroes = [0u8; ZERO_LEN]; let mut fout: Resource = Resource::open("null://").map_err(|e| { error!("can't open file: {:?}", e); 1 })?; let res = fout.write(&zeroes).map_err(|e| { error!("can't write: {:?}", e); 1 })?; if res != ZERO_LEN { error!("wanted res to be {} but got: {}", ZERO_LEN, res); return Err(1); } info!("issue 39 test passed"); Ok(()) }
use crate::slice_index::SliceIndex; use crate::FlannError; use crate::Indexable; use crate::Parameters; pub struct VecIndex<T: Indexable + 'static> { storage: Vec<Vec<T>>, slice_index: Option<SliceIndex<'static, T>>, } impl<T: Indexable> Drop for VecIndex<T> { fn drop(&mut self) { // We absolutely must destroy the index before our storage because // we are basically lying about the lifetime of the index using unsafe. // Be careful when changing this! self.slice_index.take(); } } impl<T: Indexable> std::ops::Deref for VecIndex<T> { type Target = SliceIndex<'static, T>; fn deref(&self) -> &Self::Target { self.slice_index.as_ref().unwrap() } } impl<T: Indexable> std::ops::DerefMut for VecIndex<T> { fn deref_mut(&mut self) -> &mut Self::Target { self.slice_index.as_mut().unwrap() } } impl<T: Indexable> VecIndex<T> { pub fn new<I, P>( point_len: usize, points: I, parameters: Parameters, ) -> Result<Self, FlannError> where I: IntoIterator<Item = P>, P: IntoIterator<Item = T>, { let mut points_vec = Vec::new(); for point in points { let count = point.into_iter().map(|d| points_vec.push(d)).count(); if count != point_len { return Err(FlannError::InvalidPointDimensionality { expected: point_len, got: count, }); } } if points_vec.is_empty() { return Err(FlannError::ZeroInputPoints); } let index = SliceIndex::new( point_len, unsafe { std::mem::transmute(&points_vec[..]) }, parameters, )?; Ok(Self { storage: vec![points_vec], slice_index: Some(index), }) } /// Adds a point to the index. pub fn add(&mut self, point: Vec<T>) -> Result<(), FlannError> { self.slice_index .as_mut() .unwrap() .add_slice(unsafe { std::mem::transmute(&point[..]) })?; self.storage.push(point); Ok(()) } /// Adds multiple points to the index. pub fn add_many<I, P>(&mut self, points: I) -> Result<(), FlannError> where I: IntoIterator<Item = P>, P: IntoIterator<Item = T>, { let mut points_vec = Vec::new(); for point in points { let count = point.into_iter().map(|d| points_vec.push(d)).count(); if count != self.point_len { return Err(FlannError::InvalidPointDimensionality { expected: self.point_len, got: count, }); } } self.add_many_slices(unsafe { std::mem::transmute(&points_vec[..]) })?; self.storage.push(points_vec); Ok(()) } }
use bytes::IntoBuf; use futures::{Async, Future, Poll, Stream}; use futures::future::{self, Either}; use futures::sync::mpsc; use h2::client::{Builder, Handshake, SendRequest}; use tokio_io::{AsyncRead, AsyncWrite}; use headers::content_length_parse_all; use body::Payload; use ::common::{Exec, Never}; use headers; use ::proto::Dispatched; use super::{PipeToSendStream, SendBuf}; use ::{Body, Request, Response}; type ClientRx<B> = ::client::dispatch::Receiver<Request<B>, Response<Body>>; /// An mpsc channel is used to help notify the `Connection` task when *all* /// other handles to it have been dropped, so that it can shutdown. type ConnDropRef = mpsc::Sender<Never>; pub(crate) struct Client<T, B> where B: Payload, { executor: Exec, rx: ClientRx<B>, state: State<T, SendBuf<B::Data>>, } enum State<T, B> where B: IntoBuf { Handshaking(Handshake<T, B>), Ready(SendRequest<B>, ConnDropRef), } impl<T, B> Client<T, B> where T: AsyncRead + AsyncWrite + Send + 'static, B: Payload, { pub(crate) fn new(io: T, rx: ClientRx<B>, exec: Exec) -> Client<T, B> { let handshake = Builder::new() // we don't expose PUSH promises yet .enable_push(false) .handshake(io); Client { executor: exec, rx: rx, state: State::Handshaking(handshake), } } } impl<T, B> Future for Client<T, B> where T: AsyncRead + AsyncWrite + Send + 'static, B: Payload + 'static, { type Item = Dispatched; type Error = ::Error; fn poll(&mut self) -> Poll<Self::Item, Self::Error> { loop { let next = match self.state { State::Handshaking(ref mut h) => { let (request_tx, conn) = try_ready!(h.poll().map_err(::Error::new_h2)); // An mpsc channel is used entirely to detect when the // 'Client' has been dropped. This is to get around a bug // in h2 where dropping all SendRequests won't notify a // parked Connection. let (tx, rx) = mpsc::channel(0); let rx = rx.into_future() .map(|(msg, _)| match msg { Some(never) => match never {}, None => (), }) .map_err(|_| -> Never { unreachable!("mpsc cannot error") }); let fut = conn .inspect(|_| trace!("connection complete")) .map_err(|e| debug!("connection error: {}", e)) .select2(rx) .then(|res| match res { Ok(Either::A(((), _))) | Err(Either::A(((), _))) => { // conn has finished either way Either::A(future::ok(())) }, Ok(Either::B(((), conn))) => { // mpsc has been dropped, hopefully polling // the connection some more should start shutdown // and then close trace!("send_request dropped, starting conn shutdown"); Either::B(conn) } Err(Either::B((never, _))) => match never {}, }); self.executor.execute(fut)?; State::Ready(request_tx, tx) }, State::Ready(ref mut tx, ref conn_dropper) => { try_ready!(tx.poll_ready().map_err(::Error::new_h2)); match self.rx.poll() { Ok(Async::Ready(Some((req, mut cb)))) => { // check that future hasn't been canceled already if let Async::Ready(()) = cb.poll_cancel().expect("poll_cancel cannot error") { trace!("request canceled"); continue; } let (head, body) = req.into_parts(); let mut req = ::http::Request::from_parts(head, ()); super::strip_connection_headers(req.headers_mut(), true); if let Some(len) = body.content_length() { headers::set_content_length_if_missing(req.headers_mut(), len); } let eos = body.is_end_stream(); let (fut, body_tx) = match tx.send_request(req, eos) { Ok(ok) => ok, Err(err) => { debug!("client send request error: {}", err); let _ = cb.send(Err((::Error::new_h2(err), None))); continue; } }; if !eos { let mut pipe = PipeToSendStream::new(body, body_tx) .map_err(|e| debug!("client request body error: {}", e)); // eagerly see if the body pipe is ready and // can thus skip allocating in the executor match pipe.poll() { Ok(Async::Ready(())) | Err(()) => (), Ok(Async::NotReady) => { let conn_drop_ref = conn_dropper.clone(); let pipe = pipe.then(move |x| { drop(conn_drop_ref); x }); self.executor.execute(pipe)?; } } } let fut = fut .then(move |result| { match result { Ok(res) => { let content_length = content_length_parse_all(res.headers()); let res = res.map(|stream| ::Body::h2(stream, content_length)); let _ = cb.send(Ok(res)); }, Err(err) => { debug!("client response error: {}", err); let _ = cb.send(Err((::Error::new_h2(err), None))); } } Ok(()) }); self.executor.execute(fut)?; continue; }, Ok(Async::NotReady) => return Ok(Async::NotReady), Ok(Async::Ready(None)) | Err(_) => { trace!("client::dispatch::Sender dropped"); return Ok(Async::Ready(Dispatched::Shutdown)); } } }, }; self.state = next; } } }
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { crate::{ constants::PKG_URL_PREFIX, font_catalog as fi, font_catalog::{AssetInFamilyIndex, FamilyIndex, TypefaceInAssetIndex}, FontCatalog, FontPackageListing, FontSets, }, char_set::CharSet, failure::{format_err, Error, Fail}, font_info::{FontAssetSource, FontInfo, FontInfoLoader}, fuchsia_url::pkg_url::PkgUrl, itertools::{Either, Itertools}, manifest::v2, std::{ collections::{BTreeMap, BTreeSet}, path::{Path, PathBuf}, }, }; type AssetKey = (FamilyIndex, AssetInFamilyIndex); /// Collection of font metadata used for generating a font manifest for a particular target. /// /// Contains indices by family and asset names, which all provide access only to those fonts that /// are included in `font_sets`. (`font_catalog` may contain other fonts that are not included in /// the target product.) /// /// For test coverage, please see the integration tests. pub(crate) struct FontDb { font_catalog: FontCatalog, font_sets: FontSets, family_name_to_family: BTreeMap<String, FamilyIndex>, asset_name_to_assets: BTreeMap<String, BTreeSet<AssetKey>>, asset_name_to_pkg_url: BTreeMap<String, PkgUrl>, typeface_to_char_set: BTreeMap<(String, TypefaceInAssetIndex), CharSet>, } impl FontDb { /// Tries to create a new instance of `FontDb`. pub fn new<P: AsRef<Path>>( font_catalog: FontCatalog, font_pkgs: FontPackageListing, font_sets: FontSets, font_info_loader: impl FontInfoLoader, font_dir: P, ) -> Result<FontDb, FontDbErrors> { let mut family_name_to_family = BTreeMap::new(); let mut asset_name_to_assets = BTreeMap::new(); let mut asset_name_to_pkg_url = BTreeMap::new(); let typeface_to_char_set = BTreeMap::new(); let mut errors: Vec<FontDbError> = vec![]; for (family_idx, family) in (&font_catalog.families).iter().enumerate() { let family_idx = FamilyIndex(family_idx); let mut asset_count = 0; for (asset_idx, asset) in (&family.assets).iter().enumerate() { let asset_idx = AssetInFamilyIndex(asset_idx); let asset_name = asset.file_name.clone(); if asset.typefaces.is_empty() { errors.push(FontDbError::FontCatalogNoTypeFaces { asset_name }); continue; } if font_sets.get_package_set(&asset.file_name).is_some() { let safe_name = font_pkgs.get_safe_name(&asset_name); if safe_name.is_none() { errors.push(FontDbError::FontPkgsMissingEntry { asset_name }); continue; } let pkg_url = Self::make_pkg_url(safe_name.unwrap()); if let Err(error) = pkg_url { errors.push(error); continue; } let pkg_url = pkg_url.unwrap(); asset_name_to_assets .entry(asset_name.clone()) .or_insert_with(|| BTreeSet::new()) .insert((family_idx, asset_idx)); asset_name_to_pkg_url.insert(asset_name.clone(), pkg_url); asset_count += 1; } } // Skip families where no assets are included in the target product if asset_count > 0 { family_name_to_family.insert(family.name.clone(), family_idx); } } // typeface_to_char_set is empty at this point. let mut db = FontDb { font_catalog, font_sets, family_name_to_family, asset_name_to_assets, asset_name_to_pkg_url, typeface_to_char_set, }; let font_infos = Self::load_font_infos(&db, &font_pkgs, font_info_loader, font_dir); match font_infos { Ok(font_infos) => { for (request, font_info) in font_infos { db.typeface_to_char_set.insert( (request.asset_name(), TypefaceInAssetIndex(request.index)), font_info.char_set, ); } } Err(mut font_info_errors) => { errors.append(&mut font_info_errors); } } if errors.is_empty() { Ok(db) } else { Err(FontDbErrors(errors)) } } pub fn get_family_by_name(&self, family_name: impl AsRef<str>) -> Option<&fi::Family> { let family_idx = self.family_name_to_family.get(family_name.as_ref())?; self.font_catalog.families.get(family_idx.0) } /// Get all [`Asset`]s with the given file name. There may be more than one instance if the /// asset appears in multiple font families. pub fn get_assets_by_name(&self, asset_name: impl AsRef<str>) -> Vec<&fi::Asset> { self.asset_name_to_assets .get(asset_name.as_ref()) // Iterate over the 0 or 1 values inside Option .iter() .flat_map(|asset_keys| asset_keys.iter()) .flat_map(move |(family_idx, asset_idx)| { self.font_catalog .families .get(family_idx.0) .and_then(|family| family.get_asset(*asset_idx)) }) .collect_vec() } /// The asset must be in the `FontDb` or this method will panic. pub fn get_code_points(&self, asset: &fi::Asset, index: TypefaceInAssetIndex) -> &CharSet { // Alas, no sane way to transpose between `(&str, &x)` and `&(String, x)`. let key = (asset.file_name.to_owned(), index); self.typeface_to_char_set .get(&key) .ok_or_else(|| format_err!("No code points for {:?}", &key)) .unwrap() } /// The asset must be in the `FontDb` or this method will panic. pub fn get_asset_location(&self, asset: &fi::Asset) -> v2::AssetLocation { v2::AssetLocation::Package(v2::PackageLocator { url: self.asset_name_to_pkg_url.get(&*asset.file_name).unwrap().clone(), set: self.font_sets.get_package_set(&*asset.file_name).unwrap().clone(), }) } /// Iterates over all the _included_ font families in the `FontDb`. pub fn iter_families(&self) -> impl Iterator<Item = &'_ fi::Family> + '_ { self.font_catalog .families .iter() .filter(move |family| self.get_family_by_name(&*family.name).is_some()) } /// Iterates over all the _included_ assets in the given font family. Note this is _not_ the /// same as iterating over `Family::assets`. pub fn iter_assets<'a>( &'a self, family: &'a fi::Family, ) -> impl Iterator<Item = &'a fi::Asset> + 'a { family .assets .iter() .filter(move |asset| !self.get_assets_by_name(&*asset.file_name).is_empty()) } fn make_pkg_url(safe_name: impl AsRef<str>) -> Result<PkgUrl, FontDbError> { let pkg_url = format!("{}{}", PKG_URL_PREFIX, safe_name.as_ref()); Ok(PkgUrl::parse(&pkg_url).map_err(|error| FontDbError::PkgUrl { error: error.into() })?) } fn load_font_infos( db: &FontDb, font_pkgs: &FontPackageListing, font_info_loader: impl FontInfoLoader, font_dir: impl AsRef<Path>, ) -> Result<Vec<(FontInfoRequest, FontInfo)>, Vec<FontDbError>> { let (requests, errors): (Vec<_>, Vec<_>) = db .font_sets .iter() .map(|(asset_name, _)| { Self::asset_to_font_info_requests(db, font_pkgs, font_dir.as_ref(), asset_name) }) .flatten() .partition_map(|r| match r { Ok(data) => Either::Left(data), Err(err) => Either::Right(err), }); if !errors.is_empty() { return Err(errors); } let (font_infos, errors): (Vec<_>, Vec<_>) = requests .into_iter() .map(|request| { let source = FontAssetSource::FilePath(request.path.to_str().unwrap().to_owned()); let font_info = font_info_loader.load_font_info(source, request.index); match font_info { Ok(font_info) => Ok((request, font_info)), Err(error) => Err(FontDbError::FontInfo { request, error }), } }) .partition_map(|r| match r { Ok(data) => Either::Left(data), Err(err) => Either::Right(err), }); if !errors.is_empty() { Err(errors) } else { Ok(font_infos) } } fn asset_to_font_info_requests( db: &FontDb, font_pkgs: &FontPackageListing, font_dir: impl AsRef<Path>, asset_name: &str, ) -> Vec<Result<FontInfoRequest, FontDbError>> { let mut path = font_dir.as_ref().to_path_buf(); let path_prefix = font_pkgs.get_path_prefix(asset_name); if path_prefix.is_none() { return vec![Err(FontDbError::FontPkgsMissingEntry { asset_name: asset_name.to_owned(), })]; } let path_prefix = path_prefix.unwrap(); path.push(path_prefix); path.push(asset_name); // We have to collect into a vector here because otherwise there's no way to return a // consistent `Iterator` type. let requests = db .get_assets_by_name(asset_name) .iter() .flat_map(|asset| asset.typefaces.keys()) .map(move |index| Ok(FontInfoRequest { path: path.clone(), index: index.0 })) .collect_vec(); if requests.is_empty() { vec![Err(FontDbError::FontCatalogMissingEntry { asset_name: asset_name.to_owned() })] } else { requests } } } /// Collection of errors from loading / building `FontDb`. #[derive(Debug, Fail)] #[fail(display = "Errors occurred while building FontDb: {:#?}", _0)] pub(crate) struct FontDbErrors(Vec<FontDbError>); /// An error in a single `FontDb` operation. #[derive(Debug, Fail)] pub(crate) enum FontDbError { #[fail(display = "Asset {} has no typefaces", asset_name)] FontCatalogNoTypeFaces { asset_name: String }, #[fail(display = "Asset {} is not listed in *.font_pkgs.json", asset_name)] FontPkgsMissingEntry { asset_name: String }, #[fail(display = "Asset {} is not listed in *.font_catalog.json", asset_name)] FontCatalogMissingEntry { asset_name: String }, #[fail(display = "PkgUrl error: {:?}", error)] PkgUrl { #[cause] error: Error, }, #[fail(display = "Failed to load font info for {:?}: {:?}", request, error)] FontInfo { request: FontInfoRequest, #[cause] error: Error, }, } /// Metadata needed for [`FontInfoLoader::load_font_info`]. #[derive(Debug, Clone)] pub(crate) struct FontInfoRequest { /// Path to the file path: PathBuf, /// Index of the font in the file index: u32, } impl FontInfoRequest { fn asset_name(&self) -> String { self.path.file_name().and_then(|os_str| os_str.to_str()).unwrap().to_owned() } }
use core::time::Duration; use ggez::{GameResult, Context}; use ggez::graphics::{Point2,Vector2}; use car::{self,Car}; use globals::*; #[derive(Clone, Copy, Debug)] pub enum DrawableObject { DrawableCar(Car), } #[derive(Clone, Copy, Debug)] pub struct TranslationAnimation { start_time: Duration, duration: f32, start_position: Point2, delta: Vector2, drawable_object: DrawableObject, } impl TranslationAnimation { pub fn new( start_time: Duration, duration: f32, start_position: Point2, end_position: Point2, drawable_object: DrawableObject, ) -> TranslationAnimation { TranslationAnimation { start_time, duration, start_position, delta: end_position - start_position, drawable_object, } } pub fn is_finished(self, current_time: Duration) -> bool { let fraction = duration_to_f32(current_time - self.start_time) / self.duration; fraction >= 1.0 } pub fn draw(self, ctx: &mut Context, car_assets: &car::Assets, current_time: Duration) -> GameResult<()> { let fraction = duration_to_f32(current_time - self.start_time) / self.duration; let current_position = self.start_position + self.delta * fraction; match self.drawable_object { DrawableObject::DrawableCar(car) => car.draw(ctx, car_assets, current_position), } } }
#[doc = r"Value read from the register"] pub struct R { bits: u32, } #[doc = r"Value to write to the register"] pub struct W { bits: u32, } impl super::_0_RIS { #[doc = r"Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); self.register.set(f(&R { bits }, &mut W { bits }).bits); } #[doc = r"Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r"Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { self.register.set( f(&mut W { bits: Self::reset_value(), }) .bits, ); } #[doc = r"Reset value of the register"] #[inline(always)] pub const fn reset_value() -> u32 { 0 } #[doc = r"Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.register.set(Self::reset_value()) } } #[doc = r"Value of the field"] pub struct PWM_0_RIS_INTCNTZEROR { bits: bool, } impl PWM_0_RIS_INTCNTZEROR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_0_RIS_INTCNTZEROW<'a> { w: &'a mut W, } impl<'a> _PWM_0_RIS_INTCNTZEROW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 0); self.w.bits |= ((value as u32) & 1) << 0; self.w } } #[doc = r"Value of the field"] pub struct PWM_0_RIS_INTCNTLOADR { bits: bool, } impl PWM_0_RIS_INTCNTLOADR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_0_RIS_INTCNTLOADW<'a> { w: &'a mut W, } impl<'a> _PWM_0_RIS_INTCNTLOADW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 1); self.w.bits |= ((value as u32) & 1) << 1; self.w } } #[doc = r"Value of the field"] pub struct PWM_0_RIS_INTCMPAUR { bits: bool, } impl PWM_0_RIS_INTCMPAUR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_0_RIS_INTCMPAUW<'a> { w: &'a mut W, } impl<'a> _PWM_0_RIS_INTCMPAUW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 2); self.w.bits |= ((value as u32) & 1) << 2; self.w } } #[doc = r"Value of the field"] pub struct PWM_0_RIS_INTCMPADR { bits: bool, } impl PWM_0_RIS_INTCMPADR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_0_RIS_INTCMPADW<'a> { w: &'a mut W, } impl<'a> _PWM_0_RIS_INTCMPADW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 3); self.w.bits |= ((value as u32) & 1) << 3; self.w } } #[doc = r"Value of the field"] pub struct PWM_0_RIS_INTCMPBUR { bits: bool, } impl PWM_0_RIS_INTCMPBUR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_0_RIS_INTCMPBUW<'a> { w: &'a mut W, } impl<'a> _PWM_0_RIS_INTCMPBUW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 4); self.w.bits |= ((value as u32) & 1) << 4; self.w } } #[doc = r"Value of the field"] pub struct PWM_0_RIS_INTCMPBDR { bits: bool, } impl PWM_0_RIS_INTCMPBDR { #[doc = r"Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r"Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r"Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r"Proxy"] pub struct _PWM_0_RIS_INTCMPBDW<'a> { w: &'a mut W, } impl<'a> _PWM_0_RIS_INTCMPBDW<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits &= !(1 << 5); self.w.bits |= ((value as u32) & 1) << 5; self.w } } impl R { #[doc = r"Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - Counter=0 Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcntzero(&self) -> PWM_0_RIS_INTCNTZEROR { let bits = ((self.bits >> 0) & 1) != 0; PWM_0_RIS_INTCNTZEROR { bits } } #[doc = "Bit 1 - Counter=Load Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcntload(&self) -> PWM_0_RIS_INTCNTLOADR { let bits = ((self.bits >> 1) & 1) != 0; PWM_0_RIS_INTCNTLOADR { bits } } #[doc = "Bit 2 - Comparator A Up Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpau(&self) -> PWM_0_RIS_INTCMPAUR { let bits = ((self.bits >> 2) & 1) != 0; PWM_0_RIS_INTCMPAUR { bits } } #[doc = "Bit 3 - Comparator A Down Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpad(&self) -> PWM_0_RIS_INTCMPADR { let bits = ((self.bits >> 3) & 1) != 0; PWM_0_RIS_INTCMPADR { bits } } #[doc = "Bit 4 - Comparator B Up Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpbu(&self) -> PWM_0_RIS_INTCMPBUR { let bits = ((self.bits >> 4) & 1) != 0; PWM_0_RIS_INTCMPBUR { bits } } #[doc = "Bit 5 - Comparator B Down Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpbd(&self) -> PWM_0_RIS_INTCMPBDR { let bits = ((self.bits >> 5) & 1) != 0; PWM_0_RIS_INTCMPBDR { bits } } } impl W { #[doc = r"Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Counter=0 Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcntzero(&mut self) -> _PWM_0_RIS_INTCNTZEROW { _PWM_0_RIS_INTCNTZEROW { w: self } } #[doc = "Bit 1 - Counter=Load Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcntload(&mut self) -> _PWM_0_RIS_INTCNTLOADW { _PWM_0_RIS_INTCNTLOADW { w: self } } #[doc = "Bit 2 - Comparator A Up Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpau(&mut self) -> _PWM_0_RIS_INTCMPAUW { _PWM_0_RIS_INTCMPAUW { w: self } } #[doc = "Bit 3 - Comparator A Down Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpad(&mut self) -> _PWM_0_RIS_INTCMPADW { _PWM_0_RIS_INTCMPADW { w: self } } #[doc = "Bit 4 - Comparator B Up Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpbu(&mut self) -> _PWM_0_RIS_INTCMPBUW { _PWM_0_RIS_INTCMPBUW { w: self } } #[doc = "Bit 5 - Comparator B Down Interrupt Status"] #[inline(always)] pub fn pwm_0_ris_intcmpbd(&mut self) -> _PWM_0_RIS_INTCMPBDW { _PWM_0_RIS_INTCMPBDW { w: self } } }
use std::io::{Read, Bytes}; pub struct BinaryReader<T: Read> { buffer: u8, index: i8, reader: Bytes<T>, } impl<T: Read> BinaryReader<T> { pub fn new(r: T) -> Self { BinaryReader { buffer: 0, index: -1, reader: r.bytes(), } } pub fn read_bit(&mut self) -> u8 { if self.index < 0 { if let Some(byte) = self.reader.next() { self.buffer = byte.unwrap(); self.index = 7; } else { panic!("run out of input"); } } let bit = (self.buffer >> self.index) & 1; self.index -= 1; bit } pub fn read_u8(&mut self) -> u8 { if self.index == -1 { self.buffer = 0; } let x = self.buffer.wrapping_shl((7 - self.index) as u32); if let Some(byte) = self.reader.next() { self.buffer = byte.unwrap(); } x | (self.buffer >> (self.index + 1)) } // big-endian pub fn read_u64(&mut self) -> u64 { let mut x = 0; for _ in 0..8 { x <<= 8; x |= u64::from(self.read_u8()); } x } } #[test] fn test_binary_read() { let data = [0, 0, 0, 0, 0, 0, 0, 0xb, 0x23, 0xFA, 0xCE]; let mut r = BinaryReader::new(&data[..]); assert_eq!(r.read_u64(), 0xb); assert_eq!(r.read_u8(), 0x23); assert_eq!(r.read_bit(), 1); assert_eq!(r.read_bit(), 1); assert_eq!(r.read_bit(), 1); assert_eq!(r.read_bit(), 1); assert_eq!(r.read_bit(), 1); assert_eq!(r.read_bit(), 0); assert_eq!(r.read_bit(), 1); assert_eq!(r.read_u8(), 0b01100111); assert_eq!(r.read_bit(), 0); }
use std::convert::TryFrom; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum GnssId { Gps = 0, Sbas = 1, Galileo = 2, BeiDou = 3, Imes = 4, Qzss = 5, Glonass = 6, } impl TryFrom<u8> for GnssId { type Error = String; fn try_from(val: u8) -> Result<GnssId, String> { match val { 0 => Ok(GnssId::Gps), 1 => Ok(GnssId::Sbas), 2 => Ok(GnssId::Galileo), 3 => Ok(GnssId::BeiDou), 4 => Ok(GnssId::Imes), 5 => Ok(GnssId::Qzss), 6 => Ok(GnssId::Glonass), x => Err(format!("invalid GNSS ID: {}", x)), } } }
extern crate termion; use termion::raw::IntoRawMode; use termion::async_stdin; use std::io::{Read, Write, stdout}; use std::thread; use std::time::Duration; fn main() { let stdout = stdout(); let mut stdout = stdout.lock().into_raw_mode().unwrap(); let mut stdin = async_stdin().bytes(); write!(stdout, "{}{}", termion::clear::All, termion::cursor::Goto(1, 1)) .unwrap(); let mut buf = String::new(); 'a: loop { write!(stdout, "{}", termion::clear::CurrentLine).unwrap(); write!(stdout, "\r{} <- This demonstrates the async read input char. \ Between each update a 100 ms. is waited, simply to demonstrate the async fashion. \n\r", buf).unwrap(); while let Some(next) = stdin.next() { match next { Ok(b'q') => break 'a, Ok(c) => buf.push(c as char), Err(e) => panic!("error: {:?}", e), }} stdout.flush().unwrap(); //thread::sleep(Duration::from_millis(200)); //stdout.write_all(b"# ").unwrap(); //stdout.flush().unwrap(); thread::sleep(Duration::from_millis(100)); //stdout.write_all(b"\r #").unwrap(); write!(stdout, "{}", termion::cursor::Goto(1, 1)).unwrap(); stdout.flush().unwrap(); } }
use itertools::Itertools as _; use std::io; type Cargo = char; type CargoStacks = Vec<Vec<Cargo>>; #[derive(Debug)] struct MoveCmd { count: usize, from: usize, to: usize, } fn parse_crate_line(s: &str, stacks: &mut CargoStacks) { for (pos, mut chunk) in s.chars().chunks(4).into_iter().enumerate() { if let Some('[') = chunk.next() { let item = chunk.next().expect("item identifier does not exist"); if let Some(stack) = stacks.get_mut(pos) { stack.push(item); } else { stacks.resize(pos + 1, Vec::default()); stacks[pos].push(item); } assert_eq!(chunk.next(), Some(']')); } } } fn parse_cmd(s: &str) -> io::Result<MoveCmd> { let mut words = s.split_whitespace(); match ( words.next(), words.next(), words.next(), words.next(), words.next(), words.next(), ) { (Some("move"), Some(count), Some("from"), Some(from), Some("to"), Some(to)) => { let count = count .parse() .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?; let from = from .parse() .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?; let to = to .parse() .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?; Ok(MoveCmd { count, from, to }) } _ => Err(io::Error::new( io::ErrorKind::InvalidInput, "invalid command", )), } } fn main() -> io::Result<()> { let mut lines = io::stdin().lines(); let mut stacks = itertools::process_results(&mut lines, |it| { let mut stacks = CargoStacks::default(); for line in it.take_while(|l| !l.is_empty()) { parse_crate_line(&line, &mut stacks); } stacks })?; for stack in &mut stacks { stack.reverse(); } itertools::process_results(&mut lines, |it| { for line in it { let cmd = parse_cmd(&line)?; let from = &mut stacks[cmd.from - 1]; let to_copy = from .iter() .rev() .take(cmd.count) .copied() .rev() .collect::<Vec<_>>(); from.truncate(from.len() - cmd.count); let to = &mut stacks[cmd.to - 1]; to.extend(to_copy); } Ok::<_, io::Error>(()) })??; let tops = stacks .iter() .map(|stack| stack.last().unwrap_or(&' ')) .join(""); println!("{tops}"); Ok(()) }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use common_ast::ast::Expr; use common_ast::ast::Literal; use common_exception::ErrorCode; use common_exception::Result; use crate::binder::Binder; use crate::optimizer::SExpr; use crate::plans::Limit; use crate::BindContext; impl Binder { pub(super) async fn bind_limit( &mut self, _bind_context: &BindContext, child: SExpr, limit: Option<&Expr>, offset: &Option<Expr>, ) -> Result<SExpr> { let limit_cnt = match limit { Some(limit) => Some( Self::bind_limit_argument(limit) .ok_or_else(|| ErrorCode::SemanticError("Invalid LIMIT expression"))? as usize, ), None => None, }; let offset_cnt = if let Some(offset) = offset { Self::bind_limit_argument(offset) .ok_or_else(|| ErrorCode::SemanticError("Invalid OFFSET expression"))? as usize } else { 0 }; let limit_plan = Limit { limit: limit_cnt, offset: offset_cnt, }; let new_expr = SExpr::create_unary(limit_plan.into(), child); Ok(new_expr) } /// So far, we only support integer literal as limit argument. /// So we will try to extract the integer value from the AST directly. /// In the future it's possible to treat the argument as an expression. fn bind_limit_argument(expr: &Expr) -> Option<u64> { match expr { Expr::Literal { lit: Literal::UInt64(value), .. } => Some(*value), _ => None, } } }
use std::collections::HashSet; use instant::Duration; use legion::systems::Builder; use legion::world::SubWorld; use legion::{ component, maybe_changed, Entity, EntityStore, IntoQuery, }; use sourcerenderer_core::{ Matrix4, Platform, }; use crate::transform::interpolation::InterpolatedTransform; use crate::transform::GlobalTransform; use crate::ui::UIDrawData; use crate::{ ActiveCamera, Camera, }; pub trait RendererInterface<P: Platform> { fn register_static_renderable( &self, entity: Entity, transform: &InterpolatedTransform, renderable: &StaticRenderableComponent, ); fn unregister_static_renderable(&self, entity: Entity); fn register_point_light( &self, entity: Entity, transform: &InterpolatedTransform, point_light: &PointLightComponent, ); fn unregister_point_light(&self, entity: Entity); fn register_directional_light( &self, entity: Entity, transform: &InterpolatedTransform, directional_light: &DirectionalLightComponent, ); fn unregister_directional_light(&self, entity: Entity); fn update_camera_transform(&self, camera_transform_mat: Matrix4, fov: f32); fn update_transform(&self, entity: Entity, transform: Matrix4); fn update_lightmap(&self, path: &str); fn end_frame(&self); fn is_saturated(&self) -> bool; fn wait_until_available(&self, timeout: Duration); fn is_running(&self) -> bool; fn update_ui(&self, ui_data: UIDrawData<P::GraphicsBackend>); } #[derive(Clone, Debug, PartialEq)] pub struct StaticRenderableComponent { pub model_path: String, pub receive_shadows: bool, pub cast_shadows: bool, pub can_move: bool, } #[derive(Clone, Debug, PartialEq)] pub struct PointLightComponent { pub intensity: f32, } #[derive(Clone, Debug, PartialEq)] pub struct DirectionalLightComponent { pub intensity: f32, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct Lightmap { pub path: String, } #[derive(Clone, Default, Debug)] pub struct ActiveStaticRenderables(HashSet<Entity>); #[derive(Clone, Default, Debug)] pub struct RegisteredStaticRenderables(HashSet<Entity>); #[derive(Clone, Default, Debug)] pub struct ActivePointLights(HashSet<Entity>); #[derive(Clone, Default, Debug)] pub struct RegisteredPointLights(HashSet<Entity>); #[derive(Clone, Default, Debug)] pub struct ActiveDirectionalLights(HashSet<Entity>); #[derive(Clone, Default, Debug)] pub struct RegisteredDirectionalLights(HashSet<Entity>); pub fn install<P: Platform, R: RendererInterface<P> + Send + Sync + 'static>( systems: &mut Builder, renderer: R, ) { systems.add_system(renderer_system::<P, R>( renderer, ActiveStaticRenderables(HashSet::new()), RegisteredStaticRenderables(HashSet::new()), ActivePointLights(HashSet::new()), RegisteredPointLights(HashSet::new()), ActiveDirectionalLights(HashSet::new()), RegisteredDirectionalLights(HashSet::new()), )); } #[system] #[read_component(StaticRenderableComponent)] #[read_component(InterpolatedTransform)] #[read_component(PointLightComponent)] #[read_component(DirectionalLightComponent)] #[read_component(GlobalTransform)] #[read_component(Camera)] #[read_component(Lightmap)] fn renderer<P: Platform, R: RendererInterface<P> + 'static>( world: &mut SubWorld, #[state] renderer: &R, #[state] active_static_renderables: &mut ActiveStaticRenderables, #[state] registered_static_renderables: &mut RegisteredStaticRenderables, #[state] active_point_lights: &mut ActivePointLights, #[state] registered_point_lights: &mut RegisteredPointLights, #[state] active_directional_lights: &mut ActiveDirectionalLights, #[state] registered_directional_lights: &mut RegisteredDirectionalLights, #[resource] active_camera: &ActiveCamera, ) { if renderer.is_saturated() { return; } let camera_entry = world.entry_ref(active_camera.0).ok(); let interpolated_transform_component = camera_entry .as_ref() .and_then(|entry| entry.get_component::<InterpolatedTransform>().ok()); let camera_component = camera_entry .as_ref() .and_then(|entry| entry.get_component::<Camera>().ok()); let transform_component = camera_entry .as_ref() .and_then(|entry| entry.get_component::<GlobalTransform>().ok()); if let (Some(camera), Some(interpolated), Some(transform)) = ( camera_component, interpolated_transform_component, transform_component, ) { if camera.interpolate_rotation { renderer.update_camera_transform(interpolated.0, camera.fov); } else { let mut combined_transform = transform.0; *combined_transform.column_mut(3) = *interpolated.0.column(3); renderer.update_camera_transform(combined_transform, camera.fov); } } active_static_renderables.0.clear(); let mut static_components_query = <(Entity, &StaticRenderableComponent, &InterpolatedTransform)>::query(); for (entity, component, transform) in static_components_query.iter(world) { if active_static_renderables.0.contains(entity) { continue; } if !registered_static_renderables.0.contains(entity) { renderer.register_static_renderable(*entity, transform, component); registered_static_renderables.0.insert(*entity); } active_static_renderables.0.insert(*entity); } let mut static_components_update_transforms_query = <(Entity, &InterpolatedTransform)>::query() .filter( component::<StaticRenderableComponent>() & maybe_changed::<InterpolatedTransform>(), ); for (entity, transform) in static_components_update_transforms_query.iter(world) { renderer.update_transform(*entity, transform.0); } registered_static_renderables.0.retain(|entity| { if !active_static_renderables.0.contains(entity) { renderer.unregister_static_renderable(*entity); false } else { true } }); let mut point_lights_query = <(Entity, &PointLightComponent, &InterpolatedTransform)>::query(); for (entity, component, transform) in point_lights_query.iter(world) { if active_point_lights.0.contains(entity) { continue; } if !registered_point_lights.0.contains(entity) { renderer.register_point_light(*entity, transform, component); registered_point_lights.0.insert(*entity); } active_point_lights.0.insert(*entity); } let mut point_lights_update_transforms_query = <(Entity, &InterpolatedTransform)>::query() .filter(component::<PointLightComponent>() & maybe_changed::<InterpolatedTransform>()); for (entity, transform) in point_lights_update_transforms_query.iter(world) { renderer.update_transform(*entity, transform.0); } registered_point_lights.0.retain(|entity| { if !active_point_lights.0.contains(entity) { renderer.unregister_point_light(*entity); false } else { true } }); let mut directional_lights_query = <(Entity, &DirectionalLightComponent, &InterpolatedTransform)>::query(); for (entity, component, transform) in directional_lights_query.iter(world) { if active_directional_lights.0.contains(entity) { continue; } if !active_directional_lights.0.contains(entity) { renderer.register_directional_light(*entity, transform, component); active_directional_lights.0.insert(*entity); } active_directional_lights.0.insert(*entity); } let mut directional_lights_update_transforms_query = <(Entity, &InterpolatedTransform)>::query() .filter(component::<PointLightComponent>() & maybe_changed::<InterpolatedTransform>()); for (entity, transform) in directional_lights_update_transforms_query.iter(world) { renderer.update_transform(*entity, transform.0); } registered_directional_lights.0.retain(|entity| { if !active_directional_lights.0.contains(entity) { renderer.unregister_directional_light(*entity); false } else { true } }); let mut lightmap_query = <(&Lightmap,)>::query().filter(maybe_changed::<Lightmap>()); for (lightmap,) in lightmap_query.iter(world) { renderer.update_lightmap(&lightmap.path); break; } renderer.end_frame(); }
//! Blinks an LED // #![deny(warnings)] #![no_std] #![no_main] #![feature(maybe_uninit)] #[macro_use] extern crate cortex_m; #[macro_use] extern crate cortex_m_rt as rt; extern crate stm32l4xx_hal as hal; #[macro_use(block)] extern crate nb; use crate::hal::prelude::*; use crate::hal::delay::Delay; use crate::hal::timer::Timer; use crate::hal::serial::Serial; use crate::hal::i2c::I2c; use crate::rt::ExceptionFrame; use crate::rt::entry; use embedded_hal::{ digital::OutputPin, serial::Write }; use core::panic::PanicInfo; use core::sync::atomic::{self, Ordering}; use micromath::F32Ext; mod gyro; mod nunchuck; mod accel; use crate::nunchuck::Nunchuck; use crate::gyro::Gyro; use crate::accel::Accel; static mut ESTOP_PIN: Option<&mut dyn OutputPin> = None; const TICKRATE: u32 = 50u32; //Hz const TICK_INTERVAL: f32 = 1f32 / (TICKRATE as f32); const BETA: f32 = 1.0f32; const KP: f32 = 5f32; const KD: f32 = 2.0f32; const MAX_DIFFERENTIAL: f32 = 50f32; const MIN_DIFFERENTIAL: f32 = 10f32; const DISABLE_TIMEOUT: f32 = 0.5f32; const TILT_ADJUST_RATE: f32 = 2.0; //Degrees per second #[derive(Debug, PartialEq)] enum DriveState { Disabled, WaitingNegStart, WaitingPosStart, Driving } #[entry] fn main() -> ! { let cp = cortex_m::Peripherals::take().unwrap(); let dp = hal::stm32::Peripherals::take().unwrap(); let mut flash = dp.FLASH.constrain(); // .constrain(); let mut rcc = dp.RCC.constrain(); let clocks = rcc.cfgr.sysclk(32.mhz()).pclk1(32.mhz()).freeze(&mut flash.acr); // Wait for things to stabilize let mut delay = Delay::new(cp.SYST, clocks); delay.delay_ms(200u32); let mut gpiob = dp.GPIOB.split(&mut rcc.ahb2); let mut gpioa = dp.GPIOA.split(&mut rcc.ahb2); let mut estop_pin = gpiob.pb0.into_push_pull_output_with_state(&mut gpiob.moder, &mut gpiob.otyper, hal::gpio::State::High); //TODO: Kinda urgent, write a safe wrapper around interrupt/panic handlers. We're trusting that estop_pin remains // in scope for the rest of eternety unsafe { ESTOP_PIN = Some( core::mem::transmute::<&'_ mut dyn OutputPin, &'static mut dyn OutputPin>(&mut estop_pin) ); } let mut itm = cp.ITM; let stim = &mut itm.stim[0]; //I2C Setup let mut scl = gpioa.pa9.into_open_drain_output(&mut gpioa.moder, &mut gpioa.otyper); scl.internal_pull_up(&mut gpioa.pupdr, true); let scl = scl.into_af4(&mut gpioa.moder, &mut gpioa.afrh); let mut sda = gpioa.pa10.into_open_drain_output(&mut gpioa.moder, &mut gpioa.otyper); sda.internal_pull_up(&mut gpioa.pupdr, true); let sda = sda.into_af4(&mut gpioa.moder, &mut gpioa.afrh); let mut i2c = I2c::i2c1(dp.I2C1, (scl, sda), 400.khz(), clocks, &mut rcc.apb1r1); //Uart Setup let tx = gpiob.pb6.into_af7(&mut gpiob.moder, &mut gpiob.afrl); let rx = gpiob.pb7.into_af7(&mut gpiob.moder, &mut gpiob.afrl); let serial = Serial::usart1(dp.USART1, (tx, rx), 9_600.bps(), clocks, &mut rcc.apb2); let (mut tx, mut _rx) = serial.split(); let mut gyro = Gyro::new(&mut i2c, gyro::DataRate::DR_200, gyro::Bandwidth::BW_3, true, true, true).unwrap(); gyro.set_hpf_cutoff_and_mode(&mut i2c, 6u8, gyro::HighPassMode::NormReset).unwrap(); gyro.enable_high_pass_filter(&mut i2c, true).unwrap(); let mut accel = Accel::new(&mut i2c, 0u8, 7u8, true, true, true).unwrap(); let mut nck = Nunchuck::init(&mut i2c, &mut delay).unwrap(); // Allow time for gyro HPF to stabilize delay.delay_ms(100u32); let accel_data = accel.read(&mut i2c).unwrap(); let mut angle = (accel_data.accel[1] as f32).atan2(accel_data.accel[2] as f32); let mut drive_state = DriveState::Disabled; let mut button_release_time = DISABLE_TIMEOUT; let mut target_angle = 0f32; let mut timer = Timer::tim7(dp.TIM7, TICKRATE.hz(), clocks, &mut rcc.apb1r1); loop { block!(timer.wait()).unwrap(); let nck_data = nck.read(&mut i2c, &mut delay).unwrap(); let gyro_data = gyro.read(&mut i2c).unwrap(); let accel_data = accel.read(&mut i2c).unwrap(); let angular_rate = ((gyro_data.gyro[0] as f32) * 245f32) / 32768f32; let instantaneous_angle = (-(accel_data.accel[1] as f32)).atan2(-(accel_data.accel[2] as f32)) * (180f32 / core::f32::consts::PI); angle += angular_rate * TICK_INTERVAL; angle = angle * (1.0f32 - BETA*TICK_INTERVAL) + (instantaneous_angle * BETA*TICK_INTERVAL); let thrust = -(KP*(angle-target_angle) + KD*angular_rate); let joy_x = -((nck_data.joy[0] as f32) - 128f32) / 96f32; //-1 to +1 ish let joy_y = ((nck_data.joy[1] as f32) - 128f32) / 96f32; //-1 to +1 ish let drive_enable_btn = nck_data.buttons & 0x01 == 0x01; let tilt_adjust_btn = nck_data.buttons & 0x02 == 0x02; let abs_thrust_percent = if thrust > 0f32 { if thrust > 127f32 { 1.0f32 } else { thrust / 127f32 } } else { if thrust < -127f32 { 1.0f32 } else { thrust / -127f32 } }; let differential = joy_x * (MAX_DIFFERENTIAL * (1.0 - abs_thrust_percent) + MIN_DIFFERENTIAL * (abs_thrust_percent)); let mut left_drive = thrust + differential; let mut right_drive = thrust - differential; if left_drive < -127f32 { left_drive = -127f32; } if left_drive > 127f32 { left_drive = 127f32; } if right_drive < -127f32 { right_drive = -127f32; } if right_drive > 127f32 { right_drive = 127f32; } if !drive_enable_btn { if button_release_time >= DISABLE_TIMEOUT { if drive_state != DriveState::Disabled { target_angle = 0f32; motor_drive(&mut tx, Motor::Left, 0i8); motor_drive(&mut tx, Motor::Right, 0i8); drive_state = DriveState::Disabled; } } else { button_release_time += TICK_INTERVAL; } } else { button_release_time = 0f32; } match drive_state { DriveState::Disabled => { if drive_enable_btn { if angle < 0.0f32 { drive_state = DriveState::WaitingNegStart; } else { drive_state = DriveState::WaitingPosStart; } } }, DriveState::WaitingNegStart => { if angle >= 0.0 { drive_state = DriveState::Driving; } }, DriveState::WaitingPosStart => { if angle <= 0.0 { drive_state = DriveState::Driving; } }, DriveState::Driving => { motor_drive(&mut tx, Motor::Left, left_drive as i8); motor_drive(&mut tx, Motor::Right, right_drive as i8); if tilt_adjust_btn { target_angle += joy_y * TILT_ADJUST_RATE * TICK_INTERVAL; } } } //iprintln!(stim, "ds={:?}, brs={}", drive_state, button_release_time); } } const MOTOR_ADDRESS: u8 = 128; enum Motor { Left, Right } fn motor_drive<W: Write<u8>>(writer: &mut W, motor: Motor, speed: i8) { let mut command = match motor { Motor::Left => 0x00, Motor::Right => 0x04 }; let speed: u8 = if speed < 0 { command += 1; if speed == -128 { 127 }else{ (-speed) as u8 } } else { speed as u8 }; let checksum: u8 = (MOTOR_ADDRESS + command + speed) & 0x7f; block!(writer.write(MOTOR_ADDRESS)).map_err(|_| ()).expect("Motor write failure"); block!(writer.write(command)).map_err(|_| ()).expect("Motor write failure"); block!(writer.write(speed)).map_err(|_| ()).expect("Motor write failure"); block!(writer.write(checksum)).map_err(|_| ()).expect("Motor write failure"); } #[exception] fn HardFault(ef: &ExceptionFrame) -> ! { panic!("{:#?}", ef); } #[panic_handler] fn panic(info: &PanicInfo) -> ! { cortex_m::interrupt::disable(); unsafe { if let Some(estop) = &mut ESTOP_PIN { estop.set_low(); } }; let itm = unsafe { &mut *cortex_m::peripheral::ITM::ptr() }; let stim = &mut itm.stim[0]; iprintln!(stim, "{}", info); loop { // add some side effect to prevent this from turning into a UDF instruction // see rust-lang/rust#28728 for details atomic::compiler_fence(Ordering::SeqCst) } }
use std::cell::RefCell; use std::collections::HashMap; use std::fmt; use std::sync::{Arc, Mutex}; use std::time::Duration; use crate::actions::{ActionContext, ActionSet, ContextData}; use crate::config::Config; use crate::queries::{Condition, Query}; use crate::signals::{Signal, SignalEventShared}; use anyhow::Result; use async_trait::async_trait; use tokio::time::sleep; use unic_langid::LanguageIdentifier; impl std::fmt::Debug for UserTask { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("SnipsNlu").finish() } } struct UserTask { query: Arc<Mutex<dyn Query + Send>>, condition: Condition, act_set: ActionSet, } impl UserTask { fn new(query: Arc<Mutex<dyn Query + Send>>, act_set: ActionSet) -> Self { Self { query, act_set, condition: Condition::Changed(RefCell::new(Vec::new())), } } } #[derive(Debug)] pub struct PollQuery { tasks: Vec<UserTask>, } impl PollQuery { pub fn new() -> Self { Self { tasks: Vec::new() } } } #[async_trait(?Send)] impl Signal for PollQuery { fn end_load(&mut self, _curr_lang: &[LanguageIdentifier]) -> Result<()> { Ok(()) } async fn event_loop( &mut self, _signal_event: SignalEventShared, _config: &Config, curr_lang: &[LanguageIdentifier], ) -> Result<()> { loop { sleep(Duration::from_secs(30)).await; for task in &mut self.tasks { if task.condition.check(&task.query, HashMap::new()) { let context = ActionContext { locale: curr_lang[0].to_string(), satellite: None, data: ContextData::Event { event: "called by user signal".into(), }, }; task.act_set.call_all(&context).await; } } } } } impl PollQuery { pub fn add(&mut self, query: Arc<Mutex<dyn Query + Send>>, act_set: ActionSet) -> Result<()> { let task = UserTask::new(query, act_set); self.tasks.push(task); Ok(()) } }
use std::ops::Deref; use crate::{InputType, InputValueError}; pub fn max_items<T: Deref<Target = [E]> + InputType, E>( value: &T, len: usize, ) -> Result<(), InputValueError<T>> { if value.deref().len() <= len { Ok(()) } else { Err(format!( "the value length is {}, must be less than or equal to {}", value.deref().len(), len ) .into()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_max_items() { assert!(max_items(&vec![1, 2], 3).is_ok()); assert!(max_items(&vec![1, 2, 3], 3).is_ok()); assert!(max_items(&vec![1, 2, 3, 4], 3).is_err()); } }
pub mod animation; pub mod character; pub mod common; pub mod render; use specs::World; pub use self::animation::*; pub use self::character::{Character, CharacterController}; pub use self::common::*; pub use self::render::*; pub fn register_components(world: &mut World) { world.register::<EntityID>(); world.register::<Animation>(); world.register::<Position>(); world.register::<TilePosition>(); world.register::<EntityRender>(); world.register::<CharacterController>(); }
use crypto_bench; use openssl::{rand, symm}; use test; fn generate_sealing_key(algorithm: symm::Cipher) -> Result<Vec<u8>, ()> { let mut key_bytes = vec![0u8; algorithm.key_len()]; try!(rand::rand_bytes(&mut key_bytes).map_err(|_| ())); Ok(key_bytes) } fn seal_bench(algorithm: symm::Cipher, chunk_len: usize, ad: &[u8], b: &mut test::Bencher) { let mut tag = vec![0u8; 16]; // 128-bit authentication tags for all AEAD ciphers let data = vec![0u8; chunk_len]; // XXX: This is a little misleading when `ad` isn't empty. b.bytes = chunk_len as u64; let key = generate_sealing_key(algorithm).unwrap(); b.iter(|| { symm::encrypt_aead( algorithm, &key, Some(&crypto_bench::aead::NONCE), ad, &data, &mut tag, ).unwrap(); }); } macro_rules! openssl_seal_bench { ( $benchmark_name:ident, $algorithm:expr, $chunk_len:expr, $ad:expr ) => { #[bench] fn $benchmark_name(b: &mut test::Bencher) { use openssl::symm; use super::super::seal_bench; seal_bench($algorithm, $chunk_len, $ad, b); } } } macro_rules! openssl_seal_benches { ( $name:ident, $algorithm:expr ) => { mod $name { use crypto_bench; use test; // A TLS 1.2 finished message. openssl_seal_bench!(tls12_finished, $algorithm, crypto_bench::aead::TLS12_FINISHED_LEN, &crypto_bench::aead::TLS12_AD); openssl_seal_bench!(tls13_finished, $algorithm, crypto_bench::aead::TLS13_FINISHED_LEN, &crypto_bench::aead::TLS13_AD); // ~1 packet of data in TLS. openssl_seal_bench!(tls12_1350, $algorithm, 1350, &crypto_bench::aead::TLS12_AD); openssl_seal_bench!(tls13_1350, $algorithm, 1350, &crypto_bench::aead::TLS13_AD); openssl_seal_bench!(tls12_4k, $algorithm, 4*1024, &crypto_bench::aead::TLS12_AD); openssl_seal_bench!(tls13_4k, $algorithm, 4*1024, &crypto_bench::aead::TLS13_AD); openssl_seal_bench!(tls12_8k, $algorithm, 8*1024, &crypto_bench::aead::TLS12_AD); openssl_seal_bench!(tls13_8k, $algorithm, 8*1024, &crypto_bench::aead::TLS13_AD); openssl_seal_bench!(tls12_1m, $algorithm, 1024*1024, &crypto_bench::aead::TLS12_AD); openssl_seal_bench!(tls13_1m, $algorithm, 1024*1024, &crypto_bench::aead::TLS13_AD); } } } mod openssl_aead { openssl_seal_benches!(aes_128_gcm, symm::Cipher::aes_128_gcm()); openssl_seal_benches!(aes_256_gcm, symm::Cipher::aes_256_gcm()); #[cfg(feature = "openssl_110")] openssl_seal_benches!(chacha20_poly1305, symm::Cipher::chacha20_poly1305()); }
pub mod cso; pub mod point; pub mod random; pub mod level;
use std::num::NonZeroU16; use std::path::PathBuf; use structopt::StructOpt; use ipfs::{Ipfs, IpfsOptions, IpfsTypes, UninitializedIpfs}; use ipfs::{Multiaddr, Protocol}; use ipfs_http::{config, v0}; #[macro_use] extern crate tracing; #[derive(Debug, StructOpt)] enum Options { /// Should initialize the repository (create directories and such). `js-ipfsd-ctl` calls this /// with two arguments by default, `--bits 1024` and `--profile test`. Init { /// Generated key length #[structopt(long, default_value = "2048")] bits: NonZeroU16, /// List of configuration profiles to apply. Currently only the `Test` and `Default` /// profiles are supported. /// /// `Test` uses ephemeral ports (necessary for conformance tests), `Default` uses `4004`. #[structopt(long, use_delimiter = true, default_value = "default")] profile: Vec<config::Profile>, }, /// Start the IPFS node in the foreground (not detaching from parent process). Daemon, } fn main() { if std::env::var_os("RUST_LOG").is_none() { std::env::set_var( "RUST_LOG", "ipfs_http=trace,ipfs=trace,bitswap=trace,ipfs_unixfs=trace", ); } tracing_subscriber::fmt::init(); let opts = Options::from_args(); println!("Invoked with args: {:?}", opts); // go-ipfs seems to deduce like this let home = std::env::var_os("IPFS_PATH") .map(PathBuf::from) .or_else(|| { std::env::var_os("HOME").map(|tilde| { let mut path = PathBuf::from(tilde); path.push(".rust-ipfs"); path }) }); // FIXME: need to process cmdline args here, but trying to understand js-ipfsd-ctl right now a // bit more. let home = home.unwrap_or_else(|| { eprintln!("IPFS_PATH and HOME unset"); std::process::exit(1); }); let config_path = home.join("config"); let config = match opts { Options::Init { bits, profile } => { println!("initializing IPFS node at {:?}", home); if config_path.is_file() { eprintln!("Error: ipfs configuration file already exists!"); eprintln!("Reinitializing would override your keys."); std::process::exit(1); } let result = config::init(&home, bits, profile); match result { Ok(peer_id) => { // go-ipfs prints here (in addition to earlier "initializing ..."): // // generating {}-bit RSA keypair...done println!("peer identity: {}", peer_id); std::process::exit(0); } Err(config::InitializationError::DirectoryCreationFailed(e)) => { eprintln!("Error: failed to create repository path {:?}: {}", home, e); std::process::exit(1); } Err(config::InitializationError::ConfigCreationFailed(_)) => { // this can be any number of errors like permission denied but these are the // strings from go-ipfs eprintln!("Error: ipfs configuration file already exists!"); eprintln!("Reinitializing would override your keys."); std::process::exit(1); } Err(config::InitializationError::InvalidRsaKeyLength(bits)) => { eprintln!("Error: --bits out of range [2048, 16384]: {}", bits); eprintln!("This is a fake version of ipfs cli which does not support much"); std::process::exit(1); } Err(config::InitializationError::InvalidProfile(profile)) => { eprintln!("Error: unsupported profile selection: {:?}", profile); eprintln!("This is a fake version of ipfs cli which does not support much"); std::process::exit(1); } Err(e) => { eprintln!("Error: {}", e); std::process::exit(1); } } } Options::Daemon => { // FIXME: toctou, should just match for this err? if !config_path.is_file() { eprintln!("Error: no IPFS repo found in {:?}", home); eprintln!("please run: 'ipfs init'"); std::process::exit(1); } std::fs::File::open(config_path) .map_err(config::LoadingError::ConfigurationFileOpening) .and_then(config::load) .unwrap() } }; println!("IPFS_PATH: {:?}", home); println!("Process id: {}", std::process::id()); // TODO: sigterm should initiate graceful shutdown, second time should shutdown right now // NOTE: sigkill ... well surely it will stop the process right away let rt = tokio::runtime::Runtime::new().expect("Failed to create event loop"); rt.block_on(async move { let opts = IpfsOptions { ipfs_path: home.clone(), keypair: config.keypair, bootstrap: Vec::new(), mdns: false, kad_protocol: None, listening_addrs: config.swarm, span: None, }; // TODO: handle errors more gracefully. let (ipfs, task): (Ipfs<ipfs::Types>, _) = UninitializedIpfs::new(opts) .start() .await .expect("Initialization failed"); tokio::spawn(task); let api_link_file = home.join("api"); let (addr, server) = serve(&ipfs, config.api_addr); // shutdown future will handle signalling the exit drop(ipfs); // We can't simply reuse the address from the config as the test profile uses ephemeral // ports. let api_multiaddr = format!("/ip4/{}/tcp/{}", addr.ip(), addr.port()); // this file is looked for when js-ipfsd-ctl checks optimistically if the IPFS_PATH has a // daemon running already. go-ipfs file does not contain newline at the end. let wrote = tokio::fs::write(&api_link_file, &api_multiaddr) .await .is_ok(); println!("API listening on {}", api_multiaddr); println!("daemon is running"); server.await; if wrote { // FIXME: this should probably make sure the contents match what we wrote or do some // locking on the repo, unsure how go-ipfs locks the fsstore let _ = tokio::fs::File::create(&api_link_file) .await .map_err(|e| info!("Failed to truncate {:?}: {}", api_link_file, e)); } }); info!("Shutdown complete"); } fn serve<Types: IpfsTypes>( ipfs: &Ipfs<Types>, listening_addr: Multiaddr, ) -> (std::net::SocketAddr, impl std::future::Future<Output = ()>) { use std::net::SocketAddr; use warp::Filter; let (shutdown_tx, mut shutdown_rx) = tokio::sync::mpsc::channel::<()>(1); let routes = v0::routes(ipfs, shutdown_tx); let routes = routes.with(warp::log(env!("CARGO_PKG_NAME"))); let ipfs = ipfs.clone(); let components = listening_addr.iter().collect::<Vec<_>>(); let socket_addr = match components.as_slice() { [Protocol::Ip4(ip), Protocol::Tcp(port)] => SocketAddr::new((*ip).into(), *port), _ => panic!( "Couldn't convert MultiAddr into SocketAddr: {}", listening_addr ), }; warp::serve(routes).bind_with_graceful_shutdown(socket_addr, async move { let _ = shutdown_rx.recv().await; info!("Shutdown trigger received; starting shutdown"); ipfs.exit_daemon().await; }) }
use std::convert::TryInto; use std::path::Path; use std::fs::OpenOptions; use std::os::raw::c_int; use std::os::unix::io::AsRawFd; use std::io::{Read,Write}; pub fn append_to_file_at_path(path: &Path, buf: &[u8]) -> Result<(),String> { let mut file = match OpenOptions::new().write(true).append(true).open(path) { // let mut file = match OpenOptions::new().write(true).append(true).open(path) { Ok(x) => x, Err(_) => { return Err(format!("Could not open '{}' for appending", path.display())); }, }; if let Err(_) = file.write_all(buf) { return Err(format!("Could not append data to '{}'", path.display())); } if let Err(_) = file.flush() { return Err(format!("Could not append data to '{}' (flush failed)", path.display())); } eprintln!("Appending to file '{}':\n{}", path.display(), std::str::from_utf8(buf).unwrap()); Ok(()) } pub fn slurp_file_at_path(path: &Path) -> Result<Vec<u8>, String> { let mut file = match OpenOptions::new().read(true).open(path) { Ok(x) => x, Err(_) => { return Err(format!("Could not open '{}' for reading", path.display())); }, }; let mut buf = Vec::new(); if let Err(_) = file.read_to_end(&mut buf) { return Err(format!("Could not read data from '{}'", path.display())); } eprintln!("Slurped file '{}':\n{}", path.display(), std::str::from_utf8(&buf).unwrap()); Ok(buf) } pub fn slurp_and_parse_file_at_path<T: std::str::FromStr>(path: &Path) -> Result<T, String> { let buf = slurp_file_at_path(path)?; match std::str::from_utf8(&buf[0..buf.len()-1]) { Ok(s) => { match s.parse::<T>() { Ok(p) => { Ok(p) }, Err(_) => { Err(format!("Data '{}' from file '{}' could not be parsed", s, path.display())) } } }, Err(_) => { Err(format!("Slurped file '{}' is not valid utf8", path.display())) } } } pub fn fd_poll_read(fd: c_int, timeout_ms: c_int) -> bool { let mut pollfd = libc::pollfd{ fd: fd, events: libc::POLLIN, revents: 0, }; let poll_ret = unsafe { libc::poll( &mut pollfd as *mut libc::pollfd, 1, timeout_ms, ) }; match poll_ret { 0 => { false }, 1 => { pollfd.revents & libc::POLLIN != 0 }, _ => { panic!("Unexpected poll return status"); } } } pub fn poll_read(file: &dyn AsRawFd, timeout: std::time::Duration) -> bool { fd_poll_read(file.as_raw_fd(), timeout.as_millis().try_into().unwrap()) } pub fn assert_read(read: &mut dyn Read, expected: &[u8]) -> Result<(),()> { let mut actual: Vec<u8> = vec![0; expected.len()]; if let Err(e) = read.read_exact(&mut actual) { eprintln!("Error trying to read an expected value: {:?}", e); return Err(()); } if actual == expected { Ok(()) } else { Err(()) } }
fn main() { let x = Some("string"); let v: Vec<&str> = x.into_iter().collect(); assert_eq!(v, ["string"]); println!("{:?}", v); }
use crate::traits::UIElement; use crate::traits::UIEvent; use super::util::cursor_in_rect; use piston::input::{ UpdateArgs, RenderArgs, Key }; // Import drawing helper functions use graphics::{Context, rectangle, text, Transformed}; use graphics::character::CharacterCache; use opengl_graphics::GlGraphics; use opengl_graphics::GlyphCache; use opengl_graphics::TextureSettings; pub struct UIDropdown { id: usize, items: Vec<(usize, String)>, base_font_size: f64, position: [f64; 2], rect: [f64; 4], item_height: f64, padding: f64, draw_from_bottom: bool, font: graphics::glyph_cache::rusttype::GlyphCache<'static, (), opengl_graphics::Texture> } impl UIDropdown { pub fn create (id: usize, items: Vec<(usize, String)>, draw_up: bool, position: [f64; 2], min_width: f64, font_size: f64, font_path: String) -> Self { // Copy over the elements let mut i = Vec::new(); for (idx, elem) in items.iter() { i.push((*idx, elem.clone())); } let padding = 5.0; let mut font = GlyphCache::new(font_path.as_str(), (), TextureSettings::new()).unwrap(); let item_height = font_size + 2.0 * padding; let item_count = items.len() as f64; let top_x = position[0]; let mut top_y = position[1]; if draw_up { // Calculate top_y depending on the size top_y -= item_height * item_count; } // Now we need to get the width of that piece of sh** let mut item_width = 0.0; for (_idx, item) in items.iter() { let width = font.width(font_size as u32, item.as_str()).unwrap() + 2.0 * padding; if width > item_width { item_width = width; } } if item_width < min_width { item_width = min_width; } Self { id, items: i, base_font_size: font_size, rect: [top_x, top_y, item_width, item_height * item_count], position, padding, item_height, draw_from_bottom: draw_up, font } } } impl UIElement for UIDropdown { fn render (&mut self, gl: &mut GlGraphics, context: Context, _args: &RenderArgs) { // Draws a dropdown let bg_color = [0.1, 0.2, 0.4, 1.0]; let hover_color = [0.2, 0.6, 0.8, 1.0]; let fg_color = [1.0, 1.0, 1.0, 1.0]; // Now we have the correct x/y coords, the width and the height. Now: DRAW! rectangle(bg_color, self.rect, context.transform, gl); let mut i = -1.0; for (_idx, item) in self.items.iter() { // Draw em' i += 1.0; let item_y = self.rect[1] + i * self.item_height; if cursor_in_rect(self.position, [self.rect[0], item_y, self.rect[2], self.item_height]) { // Hover effect for the item rectangle(hover_color, [self.rect[0], item_y, self.rect[2], self.item_height], context.transform, gl); } text::Text::new_color(fg_color, self.base_font_size as u32).draw( item.as_str(), &mut self.font, &context.draw_state, context.transform.trans(self.rect[0] + self.padding, item_y + self.padding + self.base_font_size), gl ).unwrap(); } } fn update (&mut self, _args: &UpdateArgs) {} fn on_cursor_state (&mut self, _is_over_window: bool) {} // /// Called whenever the cursor position changed fn on_cursor_movement (&mut self, x: f64, y: f64) { self.position = [x, y]; } // /// Called when the left button has been pressed fn on_click (&mut self) -> Option<UIEvent> { // If the cursor is not in the rect there's nothing to do anyway if !cursor_in_rect(self.position, self.rect) { return None; } let mut i = -1.0; for (_idx, _item) in self.items.iter() { // Draw em' i += 1.0; let item_y = self.rect[1] + i * self.item_height; if cursor_in_rect(self.position, [self.rect[0], item_y, self.rect[2], self.item_height]) { // Cursor is within this element // Following event needs to be emitted: return Some(UIEvent::Selection(i as usize, self.id)); } } None // Fallback } // /// Called when a key on the keyboard has been pressed fn on_keypress (&mut self, _key: Key) { // TODO: Handle up/down arrows when this thing has focus } }
fn main() { let string = String::from("carson is my name"); let first_word = find_nth_word(&string, 1); let second_word = find_nth_word(&string, 2); println!("{}", first_word); println!("{}", second_word); } // Finds nth word in a string and returns it as a string fn find_nth_word(input_string: &str, n: i32) -> &str { let mut space_count = 0; // count which word we're on by counting spaces let mut first_space_index = 0; for (i, &current_char) in input_string.as_bytes().iter().enumerate() { if current_char == b' ' { // check if character is a space space_count += 1; if space_count == n - 1 { first_space_index = i + 1; } else if space_count == n { return &input_string[first_space_index..i]; } } } &input_string // Return entire string as default }
pub mod bullets; pub mod tank;
use actix_web::{web, HttpResponse, Result}; use actix_web::dev::{ServiceResponse, Body, ResponseBody}; use actix_web::http::{StatusCode, header}; use actix_web::middleware::errhandlers::{ErrorHandlerResponse, ErrorHandlers}; use tera::{Context, Tera}; pub fn init_error_handlers() -> ErrorHandlers<Body> { ErrorHandlers::new() .handler(StatusCode::NOT_FOUND, not_found) .handler(StatusCode::BAD_REQUEST, bad_request) .handler(StatusCode::INTERNAL_SERVER_ERROR, internal_server_error) .handler(StatusCode::FORBIDDEN, forbidden) } fn not_found<B>(res: ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>> { let error_res = get_error_response(&res, "The resource could not be found."); Ok(ErrorHandlerResponse::Response(error_res.unwrap_or(res))) } fn bad_request<B>(res: ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>> { let error_res = get_error_response(&res, "The request could not be processed."); Ok(ErrorHandlerResponse::Response(error_res.unwrap_or(res))) } fn internal_server_error<B>(res: ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>> { let error_res = get_error_response(&res, "An unexpected error has ocurred. Try gain later."); Ok(ErrorHandlerResponse::Response(error_res.unwrap_or(res))) } fn forbidden<B>(res: ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>> { let error_res = get_error_response(&res, "You are not authorized to perform this request."); Ok(ErrorHandlerResponse::Response(error_res.unwrap_or(res))) } fn get_error_response<B>(res: &ServiceResponse<B>, message: &str) -> Option<ServiceResponse<B>> { let req = res.request(); let tera = req.app_data::<web::Data<Tera>>().map(|t| t.get_ref()); // Attempt to replace response body with template match tera { Some(tera) => { let mut context = Context::new(); // Override generic message with error message, if available let new_message = match res.response().error() { Some(e) => e.to_string(), None => message.to_string(), }; context.insert("message", &new_message); context.insert("reason", res.status().canonical_reason().unwrap_or("Error")); context.insert("status_code", res.status().as_str()); let body = tera.render("error.html.tera", &context); match body { Ok(body) => { let new_res = HttpResponse::build(res.status()) .set_header(header::CONTENT_TYPE, "text/html") .finish(); let new_service_res = ServiceResponse::new(req.clone(), new_res) .map_body(|_, _| ResponseBody::Body(Body::from(body)).into_body()); Some(new_service_res) }, Err(_) => None, } }, None => None, } }
use game::*; const ZOMBIE_SPAWN_CHANCE: f64=0.01; const MAX_ZOMBIES: usize= 30; const WOLF_PACK_SPAWN_CHANCE: f64=0.001; const MAX_WOLF_PACKS: usize=4; impl MonsterSpawner{ pub fn default()->Self{ MonsterSpawner{ zombie_spawn_chance: ZOMBIE_SPAWN_CHANCE, wolf_pack_spawn_chance: WOLF_PACK_SPAWN_CHANCE } } pub fn step(game: &mut Game1){ let mut zombie_spawns=Vec::new(); let mut wolf_pack_spawns=Vec::new(); for player in game.players.iter(){ let spawner=&mut game.monster_spawner; let mover=game.movers.at(player.mover_id.id); let coords=(mover.x, mover.y); if game.rand_gen.gen_range(0.0,1.0)<spawner.zombie_spawn_chance&&game.zombies.count()<MAX_ZOMBIES {zombie_spawns.push(coords.clone())}; if game.rand_gen.gen_range(0.0,1.0)<spawner.wolf_pack_spawn_chance&&game.packs.count()<MAX_WOLF_PACKS {wolf_pack_spawns.push(coords.clone())}; } for (x,y) in zombie_spawns{ Zombie::spawn(game,x,y); } for (x,y) in wolf_pack_spawns{ Pack::spawn(game,x,y); } } }
extern crate num; extern crate argparse; extern crate image; extern crate rand; use std::fs::File; use std::path::Path; use rand::Rng; use argparse::{ArgumentParser, Store, StoreTrue}; use num::complex::Complex; // Shapes to draw trait Drawable { fn draw(&self, px: &mut image::Rgb<u8>, x: u32, y: u32) -> bool; } fn brighten(px: &image::Rgb<u8>, factor: u8) -> image::Rgb<u8> { let white = image::Rgb::<u8> { data: [255, 255, 255] }; let factor = factor as f32 / 100.0f32; // Linear interpolation between the colour and full white. // This brighten function is naive, but fast. let brightened: Vec<u16> = px.data.iter().zip(white.data.iter()).map(|(&fv, &tv)| (fv as f32 + ((factor * ((tv as u16 - fv as u16) as f32)) / 15.0f32)) as u16).collect(); image::Rgb::<u8> { data: [brightened[0] as u8, brightened[1] as u8, brightened[2] as u8] } } struct Mandlebrot { max_iterations: u16, scalex: f32, scaley: f32 } impl Drawable for Mandlebrot { fn draw(&self, px: &mut image::Rgb<u8>, x: u32, y: u32) -> bool { let cy = y as f32 * self.scaley - 2.0; let cx = x as f32 * self.scalex - 2.0; let mut z = Complex::new(cx, cy); let c = Complex::new(-0.4, 0.6); let mut i = 0; for t in 0..self.max_iterations { if z.norm() > 2.0 { break } z = z * z + c; i = t; } // Brighten the pixel at (x, y) by a factor of i // and assign in to the pixel at position (x, y) *px = brighten(px, i as u8); true } } struct Point(u32, u32); struct Rect { origin: Point, length: u32, height: u32, colour: image::Rgb<u8> } impl Drawable for Rect { fn draw(&self, px: &mut image::Rgb<u8>, x: u32, y: u32) -> bool { if x >= self.origin.0 && x <= self.origin.0 + self.length && y >= self.origin.1 && y <= self.origin.1 + self.height { *px = self.colour; true } else { false } } } struct Circle { origin: Point, radius: u32, colour: image::Rgb<u8> } impl Drawable for Circle { fn draw(&self, px: &mut image::Rgb<u8>, x: u32, y: u32) -> bool { if (x as i32 - self.origin.0 as i32).pow(2) + (y as i32 - self.origin.1 as i32).pow(2) < self.radius.pow(2) as i32 { *px = self.colour; true } else { false } } } fn hex_char_to_n(c: char) -> Option<u8> { ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'].iter().position(|&hc| hc == c).map(|i| i as u8) } fn parse_hex(string: &str) -> u8 { let string = String::from(string); string.chars() .rev() .zip((0..string.len())) .fold(0, |acc, (c, p)| if let Some(n) = hex_char_to_n(c) { acc + n * (16 as u8).pow(p as u32) } else { panic!("Invalid hex character: {}", c); }) } fn colour_parse(string: &str) -> image::Rgb<u8> { // Parse hex colour codes in the "#RRGGBB" let data = String::from(string).chars() .skip(1) // Drop the first '#' character .collect::<Vec<char>>() .chunks(2) .map(|chars| if chars.len() == 2 { let mut s = String::new(); for c in chars { s.push(*c); } parse_hex(s.as_str()) } else { panic!("Invalid hex code: {}", string); }) .collect::<Vec<u8>>(); if data.len() == 3 { image::Rgb::<u8> { data: [data[0], data[1], data[2]] } } else { panic!("Invalid hex code: {}", string); } } fn main() { let mut height = 768; let mut width = 1366; let mut background = String::from("#000000"); let mut colours = String::from("#FFFFFF,#FF0000,#00FF00,#0000FF"); let mut shape_type = String::from("Circle"); let mut shape_count = 10; let mut bars = 5; let mut vertical_bars = false; let mut max_radius = 250; let mut max_length = 250; let mut max_height = 250; let mut max_iterations = 256u16; let mut scale = 4.0f32; let mut out = String::new(); { let mut ap = ArgumentParser::new(); ap.set_description("Generate a wallpaper with some colours and shapes."); ap.refer(&mut height) .add_option(&["-h", "--height"], Store, "Set the image height."); ap.refer(&mut width) .add_option(&["-w", "--width"], Store, "Set the image width."); ap.refer(&mut background) .add_option(&["-b", "--background"], Store, "Set the image background colour."); ap.refer(&mut colours) .add_option(&["-c", "--colours"], Store, "Set the image colours (comma separated #RRGGBB values)."); ap.refer(&mut shape_count) .add_option(&["-n", "--num-shapes"], Store, "Set the number of shapes generated (if Circles or Rectangles is selected)."); ap.refer(&mut bars) .add_option(&["--bars"], Store, "Set the number of bars (if Bars style is selected)"); ap.refer(&mut vertical_bars) .add_option(&["--vertical-bars"], StoreTrue, "Set the number of bars (if Bars style is selected)"); ap.refer(&mut shape_type) .add_option(&["-s", "--style"], Store, "Set the style of wallpaper (Circles, Rectangles, Bars, Mandlebrot). Default is Circles."); ap.refer(&mut max_radius) .add_option(&["-r", "--radius"], Store, "Set the maximum radius of the circles (if Circles style is selected)"); ap.refer(&mut max_length) .add_option(&["--rl", "--rect-length"], Store, "Set the maximum length of the rectangles (if Rectangles style is selected)"); ap.refer(&mut max_height) .add_option(&["--rh", "--rect-height"], Store, "Set the maximum height of the rectangles (if Rectangles style is selected)"); ap.refer(&mut out) .add_option(&["-o", "--out"], Store, "Set the output file for the wallpaper"); ap.refer(&mut max_iterations) .add_option(&["--max-iterations"], Store, "Set the maximum iterations for the Mandlebrot generator"); ap.refer(&mut scale) .add_option(&["--fractal-scale"], Store, "Set the scale for the Mandlebrot generator"); ap.parse_args_or_exit(); } let background_colour = colour_parse(background.as_str()); let shape_colours = colours.split(',') .map(|colour| colour_parse(colour)).collect::<Vec<image::Rgb<u8>>>(); let mut imgbuf = image::ImageBuffer::from_pixel(width, height, background_colour); let mut rng = rand::thread_rng(); let mut shapes: Vec<Box<Drawable>> = vec![]; match shape_type.as_str() { "Circle" | "Rectangle" => { for _ in 0..shape_count { match shape_type.as_str() { "Circle" => { shapes.push(Box::new(Circle { origin: Point(rng.gen::<u32>() % width, rng.gen::<u32>() % height), radius: rng.gen::<u32>() % max_radius, colour: *rand::thread_rng().choose(&shape_colours).unwrap() })); } "Rectangle" => { shapes.push(Box::new(Rect { origin: Point(rng.gen::<u32>() % width, rng.gen::<u32>() % height), length: rng.gen::<u32>() % max_length, height: rng.gen::<u32>() % max_height, colour: *rand::thread_rng().choose(&shape_colours).unwrap() })); } _ => panic!("Unsupported shape type: {}", shape_type) } } } "Mandlebrot" => { shapes.push(Box::new(Mandlebrot { max_iterations: max_iterations, scalex: scale / width as f32, scaley: scale / height as f32 })); } "Bars" => { let bar_height: u32 = ((height as f64) / (bars as f64)).ceil() as u32; // Used if --vertical-bars is not set let bar_length: u32 = ((width as f64) / (bars as f64)).ceil() as u32; // Used if --vertical-bars is set for i in (1..bars+1).rev() { shapes.push(Box::new(Rect { origin: Point(0, 0), length: if vertical_bars { bar_length * i } else { width }, height: if !vertical_bars { bar_height * i } else { height }, colour: *rand::thread_rng().choose(&shape_colours).unwrap() })); } } _ => { panic!("Unsupported wallpaper type: {}", shape_type); } } for (x, y, pixel) in imgbuf.enumerate_pixels_mut() { for shape in &shapes { shape.draw(pixel, x, y); } } if out != String::new() { let ref mut fout = File::create(&Path::new(&out)).unwrap(); let _ = image::ImageRgb8(imgbuf).save(fout, image::PNG); } }
use super::{types, ecc}; use std::io::Cursor; use std::error::Error; use rocksdb::DB; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; pub struct DAL { pub db: DB, } const CURRENT_TOKEN_KEY: &str = "current_token"; const FREE_TOKEN_KEY: &str = "free_token"; const TOKEN_KEY_PREFIX: &str = "token_"; impl DAL { pub fn new(db_path: &str) -> Result<DAL, Box<Error>> { let dal = DAL { db: DB::open_default(db_path)?, }; Ok(dal) } pub fn add_token(&mut self, token: &[u8], signed_token: &types::curve::ecp::ECP) -> Result<(), Box<Error>> { let next_token_num = self.get_next_free_token()?; let next_token_num = next_token_num + 1; let point_bytes_len = types::curve::big::MODBYTES + 1; let mut point_bytes = vec![0; point_bytes_len]; signed_token.tobytes(&mut point_bytes, true); let mut val = vec![]; val.write_u32::<LittleEndian>(token.len() as u32)?; val.extend_from_slice(token); val.extend_from_slice(&point_bytes); let next_token_key = format!("{}{}", TOKEN_KEY_PREFIX, next_token_num); self.db.put(next_token_key.as_bytes(), &val)?; self.inc_next_free_token()?; Ok(()) } pub fn get_tokens(&self) -> Result<Vec<(Vec<u8>,types::curve::ecp::ECP)>, Box<Error>> { let current_token_num = self.get_current_token()?; let next_token_num = self.get_next_free_token()?; if current_token_num == next_token_num { return Err("not enough tokens.".into()); } let mut tokens = vec![]; debug!("current_token_num: {}, next_token_num: {}", current_token_num, next_token_num); for i in current_token_num..next_token_num { let current_token_key = format!("{}{}", TOKEN_KEY_PREFIX, i); let stored_token_bytes : &[u8] = &*self.db.get(current_token_key.as_bytes())?.unwrap(); let mut pos : usize = 0; let token_length_bytes = &stored_token_bytes[..4]; let mut rdr = Cursor::new(token_length_bytes); let token_length = rdr.read_u32::<LittleEndian>()?; pos += 4; let token = &stored_token_bytes[pos..pos+(token_length as usize)]; pos += token_length as usize; let ecp_length = types::curve::big::MODBYTES + 1; let p = ecc::ecp_from_bytes(&stored_token_bytes[pos..pos+(ecp_length as usize)])?; tokens.push((token.to_vec(), p)); } Ok(tokens) } pub fn pop_next_token(&mut self) -> Result<(Vec<u8>,types::curve::ecp::ECP), Box<Error>> { let current_token_num = self.get_current_token()?; let next_token_num = self.get_next_free_token()?; if current_token_num == next_token_num { return Err("not enough tokens.".into()); } let current_token_key = format!("{}{}", TOKEN_KEY_PREFIX, current_token_num); let stored_token_bytes : &[u8] = &*self.db.get(current_token_key.as_bytes())?.unwrap(); let mut pos : usize = 0; let token_length_bytes = &stored_token_bytes[..4]; let mut rdr = Cursor::new(token_length_bytes); let token_length = rdr.read_u32::<LittleEndian>()?; pos += 4; let token = &stored_token_bytes[pos..pos+(token_length as usize)]; pos += token_length as usize; let ecp_length = types::curve::big::MODBYTES + 1; let p = ecc::ecp_from_bytes(&stored_token_bytes[pos..pos+(ecp_length as usize)])?; self.inc_current_token()?; Ok((token.to_vec(), p)) } fn get_current_token(&self) -> Result<i64, Box<Error>> { let current_token_num_db = self.db.get(CURRENT_TOKEN_KEY.as_bytes())?; let current_token_num_db : Result<_, Box<Error>> = match current_token_num_db { Some(s) => Ok(s), None => Err("current token num is undefined.".into()), }; if current_token_num_db.is_err() { return Ok(0); }; let current_token_num_db = current_token_num_db.unwrap(); let mut rdr = Cursor::new(&*current_token_num_db); let current_token = rdr.read_u32::<LittleEndian>()?; Ok(current_token as i64) } fn get_next_free_token(&self) -> Result<i64, Box<Error>> { let next_token_num_db = self.db.get(FREE_TOKEN_KEY.as_bytes())?; let next_token_num_db : Result<_, Box<Error>> = match next_token_num_db { Some(s) => Ok(s), None => Err("next token num is undefined.".into()), }; if next_token_num_db.is_err() { return Ok(-1); }; let next_token_num_db = next_token_num_db.unwrap(); let mut rdr = Cursor::new(&*next_token_num_db); let next_token = rdr.read_u32::<LittleEndian>()?; Ok(next_token as i64) } fn inc_current_token(&mut self) -> Result<i64, Box<Error>> { let current_token = self.get_current_token()?; let current_token_num = current_token + 1; let mut current_token_inc = vec![]; current_token_inc.write_u32::<LittleEndian>(current_token_num as u32)?; self.db.put(CURRENT_TOKEN_KEY.as_bytes(), &current_token_inc)?; Ok(current_token_num as i64) } fn inc_next_free_token(&mut self) -> Result<u32, Box<Error>> { let next_token = self.get_next_free_token()?; let next_token_num = next_token + 1; let mut next_token_inc = vec![]; next_token_inc.write_u32::<LittleEndian>(next_token_num as u32)?; self.db.put(FREE_TOKEN_KEY.as_bytes(), &next_token_inc)?; Ok(next_token_num as u32) } pub fn store_spent(&mut self, token: &[u8]) -> Result<(), Box<Error>> { let stored_token_bytes_db = self.db.get(token)?; if !stored_token_bytes_db.is_none() { return Err("token already spent.".into()); } self.db.put(token, &[1])?; Ok(()) } }
#![no_std] #![no_main] mod cmd; mod uart_server; #[macro_use] extern crate lazy_static; extern crate alloc; use alloc::boxed::Box; use hal::prelude::*; use tm4c129x_hal as hal; use cortex_m::peripheral::scb::Exception; use fe_osi; use fe_rtos; #[no_mangle] fn main() -> ! { let p = hal::Peripherals::take().unwrap(); let cp = hal::CorePeripherals::take().unwrap(); let mut sc = p.SYSCTL.constrain(); sc.clock_setup.oscillator = hal::sysctl::Oscillator::Main( hal::sysctl::CrystalFrequency::_25mhz, hal::sysctl::SystemClock::UsePll(hal::sysctl::PllOutputFrequency::_120mhz), ); let clocks = sc.clock_setup.freeze(); let mut porta = p.GPIO_PORTA_AHB.split(&sc.power_control); // Activate UART let uart0 = hal::serial::Serial::uart0( p.UART0, porta .pa1 .into_af_push_pull::<hal::gpio::AF1>(&mut porta.control), porta .pa0 .into_af_push_pull::<hal::gpio::AF1>(&mut porta.control), (), (), 115200_u32.bps(), hal::serial::NewlineMode::SwapLFtoCRLF, &clocks, &sc.power_control, ); fe_rtos::interrupt::int_register(Exception::SysTick.irqn(), fe_rtos::task::sys_tick); fe_rtos::interrupt::int_register(Exception::PendSV.irqn(), fe_rtos::task::context_switch); fe_rtos::interrupt::int_register(Exception::SVCall.irqn(), fe_rtos::syscall::svc_handler); let (uart0_tx, uart0_rx) = uart0.split(); fe_osi::task::task_spawn( fe_rtos::task::DEFAULT_STACK_SIZE, uart_server::uart_transmit_server, Some(Box::new(uart0_tx)), ); fe_osi::task::task_spawn( fe_rtos::task::DEFAULT_STACK_SIZE, uart_server::uart_receive_server, Some(Box::new(uart0_rx)), ); fe_osi::task::task_spawn(fe_rtos::task::DEFAULT_STACK_SIZE, cmd::cmd, None); // Start the FeRTOS scheduler let reload_val: u32 = cortex_m::peripheral::SYST::get_ticks_per_10ms() / 10; fe_rtos::task::start_scheduler(cortex_m::peripheral::SCB::set_pendsv, cp.SYST, reload_val); loop {} }
#![allow(clippy::all)] #![allow(dead_code)] //! This files represent the API endpoints for the IC System API. //! It is meant to be a copy-paste of the System API from the spec, //! and also not exported outside this crate. //! //! Each of these functions are in a private module accessible only //! in this crate. Each function should have a rust-typed version here //! as an export point, and have a fully counterpart that is public //! and declared in [api.rs]. //! //! An example is arg data; the msg_arg_data_copy() takes a pointer //! and a length, there should be two versions of this API endpoint: //! //! 1. [ic0::private::msg_arg_data_copy(i32, i32) -> ()] that is the //! actual export of the system api. //! 2. [api::msg_arg_data() -> Vec<u8>] which calls the size, allocate //! a buffer, and fills it with the data itself. // These two macros are used to being able to copy-paste the system API imports from the // spec without actually changing anything. This makes it possible to generate at build // time the list of imports from the spec. We don't do that (yet) as the spec isn't // open sourced. // The exported methods are in an `internal` module. macro_rules! _ic0_module_ret { ( ( $_: ident : $t: ty ) ) => { $t }; ( ( $_i1: ident : $t1: ty , $_i2: ident : $t2: ty) ) => { ($t1, $t2) }; ( ( $t: ty ) ) => { $t }; ( $t: ty ) => { $t }; } // Declare the module itself as a list of API endpoints. macro_rules! ic0_module { ( $( ic0. $name: ident : ( $( $argname: ident : $argtype: ty ),* ) -> $rettype: tt ; )+ ) => { #[allow(improper_ctypes)] #[cfg(target_arch = "wasm32")] #[link(wasm_import_module = "ic0")] extern "C" { $(pub(super) fn $name($( $argname: $argtype, )*) -> _ic0_module_ret!($rettype) ;)* } $( #[cfg(not(target_arch = "wasm32"))] pub(super) unsafe fn $name($( $argname: $argtype, )*) -> _ic0_module_ret!($rettype) { let _ = ( $( $argname, )* ); // make sure the arguments are used. panic!("{} should only be called inside canisters.", stringify!( $name )); } )* }; } // This is a private module that can only be used internally in this file. // Copy-paste the spec section of the API here. // Current spec version: 0.18.2 /* The comment after each function lists from where these functions may be invoked: I: from canister_init or canister_post_upgrade G: from canister_pre_upgrade U: from canister_update Q: from canister_query … Ry: from a reply callback Rt: from a reject callback C: from a cleanup callback s: the (start) module initialization function F: from canister_inspect_message * = I G U Q Ry Rt C F (NB: Not (start)) */ ic0_module! { ic0.msg_arg_data_size : () -> i32; // I U Q Ry F ic0.msg_arg_data_copy : (dst : i32, offset : i32, size : i32) -> (); // I U Q Ry F ic0.msg_caller_size : () -> i32; // I G U Q F ic0.msg_caller_copy : (dst : i32, offset: i32, size : i32) -> (); // I G U Q F ic0.msg_reject_code : () -> i32; // Ry Rt ic0.msg_reject_msg_size : () -> i32; // Rt ic0.msg_reject_msg_copy : (dst : i32, offset : i32, size : i32) -> (); // Rt ic0.msg_reply_data_append : (src : i32, size : i32) -> (); // U Q Ry Rt ic0.msg_reply : () -> (); // U Q Ry Rt ic0.msg_reject : (src : i32, size : i32) -> (); // U Q Ry Rt ic0.msg_cycles_available : () -> i64; // U Rt Ry ic0.msg_cycles_available128 : () -> (high : i64, low : i64); // U Rt Ry ic0.msg_cycles_refunded : () -> i64; // Rt Ry ic0.msg_cycles_refunded128 : () -> (high : i64, low: i64); // Rt Ry ic0.msg_cycles_accept : ( max_amount : i64) -> ( amount : i64 ); // U Rt Ry ic0.msg_cycles_accept128 : ( max_amount_high : i64, max_amount_low: i64) -> ( amount_high : i64, amount_low: i64 ); // U Rt Ry ic0.canister_self_size : () -> i32; // * ic0.canister_self_copy : (dst : i32, offset : i32, size : i32) -> (); // * ic0.canister_cycle_balance : () -> i64; // * ic0.canister_cycle_balance128 : () -> (high : i64, low : i64); // * ic0.canister_status : () -> i32; // * ic0.msg_method_name_size : () -> i32; // F ic0.msg_method_name_copy : (dst : i32, offset : i32, size : i32) -> (); // F ic0.accept_message : () -> (); // F ic0.call_new : // U Ry Rt H ( callee_src : i32, callee_size : i32, name_src : i32, name_size : i32, reply_fun : i32, reply_env : i32, reject_fun : i32, reject_env : i32 ) -> (); ic0.call_on_cleanup : (fun : i32, env : i32) -> (); // U Ry Rt H ic0.call_data_append : (src : i32, size : i32) -> (); // U Ry Rt H ic0.call_cycles_add : ( amount : i64 ) -> (); // U Ry Rt H ic0.call_cycles_add128 : ( amount_high : i64, amount_low: i64 ) -> (); // U Ry Rt H ic0.call_perform : () -> ( err_code : i32 ); // U Ry Rt H ic0.stable_size : () -> (page_count : i32); // * ic0.stable_grow : (new_pages : i32) -> (old_page_count : i32); // * ic0.stable_write : (offset : i32, src : i32, size : i32) -> (); // * ic0.stable_read : (dst : i32, offset : i32, size : i32) -> (); // * ic0.stable64_size : () -> (page_count : i64); // * ic0.stable64_grow : (new_pages : i64) -> (old_page_count : i64); // * ic0.stable64_write : (offset : i64, src : i64, size : i64) -> (); // * ic0.stable64_read : (dst : i64, offset : i64, size : i64) -> (); // * ic0.certified_data_set : (src: i32, size: i32) -> (); // I G U Ry Rt H ic0.data_certificate_present : () -> i32; // * ic0.data_certificate_size : () -> i32; // * ic0.data_certificate_copy : (dst: i32, offset: i32, size: i32) -> (); // * ic0.time : () -> (timestamp : i64); // * ic0.performance_counter : () -> (counter : i64); // * s ic0.debug_print : (src : i32, size : i32) -> (); // * s ic0.trap : (src : i32, size : i32) -> (); // * s }
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::BTreeMap; use std::collections::HashMap; use std::sync::Arc; use common_base::base::tokio; use common_catalog::table_mutator::TableMutator; use common_exception::ErrorCode; use common_exception::Result; use common_expression::BlockThresholds; use common_expression::DataBlock; use common_expression::Scalar; use common_expression::TableSchema; use common_expression::TableSchemaRef; use common_storages_fuse::pruning::FusePruner; use databend_query::sessions::TableContext; use databend_query::storages::fuse::io::SegmentWriter; use databend_query::storages::fuse::io::TableMetaLocationGenerator; use databend_query::storages::fuse::operations::ReclusterMutator; use storages_common_table_meta::meta; use storages_common_table_meta::meta::BlockMeta; use storages_common_table_meta::meta::ClusterStatistics; use storages_common_table_meta::meta::SegmentInfo; use storages_common_table_meta::meta::Statistics; use storages_common_table_meta::meta::TableSnapshot; use storages_common_table_meta::meta::Versioned; use uuid::Uuid; use crate::storages::fuse::table_test_fixture::TestFixture; #[tokio::test(flavor = "multi_thread")] async fn test_recluster_mutator_block_select() -> Result<()> { let fixture = TestFixture::new().await; let ctx = fixture.ctx(); let location_generator = TableMetaLocationGenerator::with_prefix("_prefix".to_owned()); let data_accessor = ctx.get_data_operator()?.operator(); let seg_writer = SegmentWriter::new(&data_accessor, &location_generator); let gen_test_seg = |cluster_stats: Option<ClusterStatistics>| async { let block_id = Uuid::new_v4().simple().to_string(); let location = (block_id, DataBlock::VERSION); let test_block_meta = Arc::new(BlockMeta::new( 1, 1, 1, HashMap::default(), HashMap::default(), cluster_stats, location.clone(), None, 0, meta::Compression::Lz4Raw, )); let segment = SegmentInfo::new(vec![test_block_meta], Statistics::default()); Ok::<_, ErrorCode>((seg_writer.write_segment(segment).await?, location)) }; let mut test_segment_locations = vec![]; let mut test_block_locations = vec![]; let (segment_location, block_location) = gen_test_seg(Some(ClusterStatistics { cluster_key_id: 0, min: vec![Scalar::from(1i64)], max: vec![Scalar::from(3i64)], level: 0, pages: None, })) .await?; test_segment_locations.push(segment_location); test_block_locations.push(block_location); let (segment_location, block_location) = gen_test_seg(Some(ClusterStatistics { cluster_key_id: 0, min: vec![Scalar::from(2i64)], max: vec![Scalar::from(4i64)], level: 0, pages: None, })) .await?; test_segment_locations.push(segment_location); test_block_locations.push(block_location); let (segment_location, block_location) = gen_test_seg(Some(ClusterStatistics { cluster_key_id: 0, min: vec![Scalar::from(4i64)], max: vec![Scalar::from(5i64)], level: 0, pages: None, })) .await?; test_segment_locations.push(segment_location); test_block_locations.push(block_location); let base_snapshot = TableSnapshot::new( Uuid::new_v4(), &None, None, TableSchema::empty(), Statistics::default(), test_segment_locations.clone(), Some((0, "(id)".to_string())), None, ); let base_snapshot = Arc::new(base_snapshot); let schema = TableSchemaRef::new(TableSchema::empty()); let ctx: Arc<dyn TableContext> = ctx.clone(); let segment_locations = base_snapshot.segments.clone(); let block_metas = FusePruner::create(&ctx, data_accessor.clone(), schema, &None)? .pruning(segment_locations, None, None) .await?; let mut blocks_map: BTreeMap<i32, Vec<(usize, Arc<BlockMeta>)>> = BTreeMap::new(); block_metas.iter().for_each(|(idx, b)| { if let Some(stats) = &b.cluster_stats { blocks_map .entry(stats.level) .or_default() .push((idx.segment_idx, b.clone())); } }); let mut mutator = ReclusterMutator::try_create( ctx, location_generator, base_snapshot, 1.0, BlockThresholds::default(), blocks_map, data_accessor, )?; let need_recluster = mutator.target_select().await?; assert!(need_recluster); assert_eq!(mutator.selected_blocks().len(), 3); Ok(()) }
use std::iter::FromIterator; use std::mem; use crate::repr::Decor; use crate::value::{DEFAULT_LEADING_VALUE_DECOR, DEFAULT_VALUE_DECOR}; use crate::{Item, RawString, Value}; /// Type representing a TOML array, /// payload of the `Value::Array` variant's value #[derive(Debug, Default, Clone)] pub struct Array { // `trailing` represents whitespaces, newlines // and comments in an empty array or after the trailing comma trailing: RawString, trailing_comma: bool, // prefix before `[` and suffix after `]` decor: Decor, pub(crate) span: Option<std::ops::Range<usize>>, // always Vec<Item::Value> pub(crate) values: Vec<Item>, } /// An owned iterator type over `Table`'s key/value pairs. pub type ArrayIntoIter = Box<dyn Iterator<Item = Value>>; /// An iterator type over `Array`'s values. pub type ArrayIter<'a> = Box<dyn Iterator<Item = &'a Value> + 'a>; /// An iterator type over `Array`'s values. pub type ArrayIterMut<'a> = Box<dyn Iterator<Item = &'a mut Value> + 'a>; /// Constructors /// /// See also `FromIterator` impl Array { /// Create an empty `Array` /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// ``` pub fn new() -> Self { Default::default() } pub(crate) fn with_vec(values: Vec<Item>) -> Self { Self { values, ..Default::default() } } } /// Formatting impl Array { /// Auto formats the array. pub fn fmt(&mut self) { decorate_array(self); } /// Set whether the array will use a trailing comma pub fn set_trailing_comma(&mut self, yes: bool) { self.trailing_comma = yes; } /// Whether the array will use a trailing comma pub fn trailing_comma(&self) -> bool { self.trailing_comma } /// Set whitespace after last element pub fn set_trailing(&mut self, trailing: impl Into<RawString>) { self.trailing = trailing.into(); } /// Whitespace after last element pub fn trailing(&self) -> &RawString { &self.trailing } /// Returns the surrounding whitespace pub fn decor_mut(&mut self) -> &mut Decor { &mut self.decor } /// Returns the surrounding whitespace pub fn decor(&self) -> &Decor { &self.decor } /// Returns the location within the original document pub(crate) fn span(&self) -> Option<std::ops::Range<usize>> { self.span.clone() } pub(crate) fn despan(&mut self, input: &str) { self.span = None; self.decor.despan(input); self.trailing.despan(input); for value in &mut self.values { value.despan(input); } } } impl Array { /// Returns an iterator over all values. pub fn iter(&self) -> ArrayIter<'_> { Box::new(self.values.iter().filter_map(Item::as_value)) } /// Returns an iterator over all values. pub fn iter_mut(&mut self) -> ArrayIterMut<'_> { Box::new(self.values.iter_mut().filter_map(Item::as_value_mut)) } /// Returns the length of the underlying Vec. /// /// In some rare cases, placeholder elements will exist. For a more accurate count, call /// `a.iter().count()` /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// arr.push(1); /// arr.push("foo"); /// assert_eq!(arr.len(), 2); /// ``` pub fn len(&self) -> usize { self.values.len() } /// Return true iff `self.len() == 0`. /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// assert!(arr.is_empty()); /// /// arr.push(1); /// arr.push("foo"); /// assert!(! arr.is_empty()); /// ``` pub fn is_empty(&self) -> bool { self.len() == 0 } /// Clears the array, removing all values. Keeps the allocated memory for reuse. pub fn clear(&mut self) { self.values.clear() } /// Returns a reference to the value at the given index, or `None` if the index is out of /// bounds. pub fn get(&self, index: usize) -> Option<&Value> { self.values.get(index).and_then(Item::as_value) } /// Returns a reference to the value at the given index, or `None` if the index is out of /// bounds. pub fn get_mut(&mut self, index: usize) -> Option<&mut Value> { self.values.get_mut(index).and_then(Item::as_value_mut) } /// Appends a new value to the end of the array, applying default formatting to it. /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// arr.push(1); /// arr.push("foo"); /// ``` pub fn push<V: Into<Value>>(&mut self, v: V) { self.value_op(v.into(), true, |items, value| { items.push(Item::Value(value)) }) } /// Appends a new, already formatted value to the end of the array. /// /// # Examples /// /// ```rust /// let formatted_value = "'literal'".parse::<toml_edit::Value>().unwrap(); /// let mut arr = toml_edit::Array::new(); /// arr.push_formatted(formatted_value); /// ``` pub fn push_formatted(&mut self, v: Value) { self.values.push(Item::Value(v)); } /// Inserts an element at the given position within the array, applying default formatting to /// it and shifting all values after it to the right. /// /// # Panics /// /// Panics if `index > len`. /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// arr.push(1); /// arr.push("foo"); /// /// arr.insert(0, "start"); /// ``` pub fn insert<V: Into<Value>>(&mut self, index: usize, v: V) { self.value_op(v.into(), true, |items, value| { items.insert(index, Item::Value(value)) }) } /// Inserts an already formatted value at the given position within the array, shifting all /// values after it to the right. /// /// # Panics /// /// Panics if `index > len`. /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// arr.push(1); /// arr.push("foo"); /// /// let formatted_value = "'start'".parse::<toml_edit::Value>().unwrap(); /// arr.insert_formatted(0, formatted_value); /// ``` pub fn insert_formatted(&mut self, index: usize, v: Value) { self.values.insert(index, Item::Value(v)) } /// Replaces the element at the given position within the array, preserving existing formatting. /// /// # Panics /// /// Panics if `index >= len`. /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// arr.push(1); /// arr.push("foo"); /// /// arr.replace(0, "start"); /// ``` pub fn replace<V: Into<Value>>(&mut self, index: usize, v: V) -> Value { // Read the existing value's decor and preserve it. let existing_decor = self .get(index) .unwrap_or_else(|| panic!("index {} out of bounds (len = {})", index, self.len())) .decor(); let mut value = v.into(); *value.decor_mut() = existing_decor.clone(); self.replace_formatted(index, value) } /// Replaces the element at the given position within the array with an already formatted value. /// /// # Panics /// /// Panics if `index >= len`. /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// arr.push(1); /// arr.push("foo"); /// /// let formatted_value = "'start'".parse::<toml_edit::Value>().unwrap(); /// arr.replace_formatted(0, formatted_value); /// ``` pub fn replace_formatted(&mut self, index: usize, v: Value) -> Value { match mem::replace(&mut self.values[index], Item::Value(v)) { Item::Value(old_value) => old_value, x => panic!("non-value item {:?} in an array", x), } } /// Removes the value at the given index. /// /// # Examples /// /// ```rust /// let mut arr = toml_edit::Array::new(); /// arr.push(1); /// arr.push("foo"); /// /// arr.remove(0); /// assert_eq!(arr.len(), 1); /// ``` pub fn remove(&mut self, index: usize) -> Value { let removed = self.values.remove(index); match removed { Item::Value(v) => v, x => panic!("non-value item {:?} in an array", x), } } /// Retains only the values specified by the `keep` predicate. /// /// In other words, remove all values for which `keep(&value)` returns `false`. /// /// This method operates in place, visiting each element exactly once in the /// original order, and preserves the order of the retained elements. pub fn retain<F>(&mut self, mut keep: F) where F: FnMut(&Value) -> bool, { self.values .retain(|item| item.as_value().map(&mut keep).unwrap_or(false)); } fn value_op<T>( &mut self, v: Value, decorate: bool, op: impl FnOnce(&mut Vec<Item>, Value) -> T, ) -> T { let mut value = v; if !self.is_empty() && decorate { value.decorate(" ", ""); } else if decorate { value.decorate("", ""); } op(&mut self.values, value) } } impl std::fmt::Display for Array { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { crate::encode::Encode::encode(self, f, None, ("", "")) } } impl<V: Into<Value>> Extend<V> for Array { fn extend<T: IntoIterator<Item = V>>(&mut self, iter: T) { for value in iter { self.push_formatted(value.into()); } } } impl<V: Into<Value>> FromIterator<V> for Array { fn from_iter<I>(iter: I) -> Self where I: IntoIterator<Item = V>, { let v = iter.into_iter().map(|a| Item::Value(a.into())); Array { values: v.collect(), ..Default::default() } } } impl IntoIterator for Array { type Item = Value; type IntoIter = ArrayIntoIter; fn into_iter(self) -> Self::IntoIter { Box::new( self.values .into_iter() .filter(|v| v.is_value()) .map(|v| v.into_value().unwrap()), ) } } impl<'s> IntoIterator for &'s Array { type Item = &'s Value; type IntoIter = ArrayIter<'s>; fn into_iter(self) -> Self::IntoIter { self.iter() } } fn decorate_array(array: &mut Array) { for (i, value) in array .values .iter_mut() .filter_map(Item::as_value_mut) .enumerate() { // [value1, value2, value3] if i == 0 { value.decorate(DEFAULT_LEADING_VALUE_DECOR.0, DEFAULT_LEADING_VALUE_DECOR.1); } else { value.decorate(DEFAULT_VALUE_DECOR.0, DEFAULT_VALUE_DECOR.1); } } // Since everything is now on the same line, remove trailing commas and whitespace. array.set_trailing_comma(false); array.set_trailing(""); }
use crate::prelude::*; use std::collections::HashMap; pub struct Models<T> where T: Eq + std::hash::Hash { data: Vec<(PackedScene, Template, usize)>, name_data_lookup: HashMap<&'static str, usize>, t_data_lookup: HashMap<T, usize>, } unsafe impl<T> Sync for Models<T> where T: Eq + std::hash::Hash {} unsafe impl<T> Send for Models<T> where T: Eq + std::hash::Hash {} impl<T> Default for Models<T> where T: Eq + std::hash::Hash { fn default() -> Self { Models::<T> { data: vec![], name_data_lookup: HashMap::new(), t_data_lookup: HashMap::new(), } } } impl<T> Models<T> where T: Eq + std::hash::Hash { pub fn index_from_t(&self, index: &T) -> Option<usize> { match self.t_data_lookup.get(index) { Some(index) => Some(*index), None => None, } } pub fn index_from_alias(&self, index: &str) -> Option<usize> { match self.name_data_lookup.get(&index) { Some(index) => Some(*index), None => None, } } pub fn data_from_alias(&self, index: &str) -> Option<(Template, usize)> { if let Some(index) = self.index_from_alias(index) { return self.data_from_index(index); } None } pub fn data_from_t(&self, index: &T) -> Option<(Template, usize)> { if let Some(index) = self.index_from_t(index) { return self.data_from_index(index); } None } pub fn data_from_index(&self, index: usize) -> Option<(Template, usize)> { if let Some(data) = self.data.get(index) { return Some((data.1.clone(), index)); } return None; } pub(crate) fn scene_from_index(&self, index: usize) -> Option<&PackedScene> { if let Some(data) = self.data.get(index) { return Some(&data.0); } None } pub fn insert(&mut self, alias: Option<&'static str>, t_key: Option<T>, scene: PackedScene, template: Template) -> Option<usize> { let index = self.data.len(); let mut has_valid_key = false; if let Some(key) = alias { self.name_data_lookup.insert(key, index); has_valid_key = true; } if let Some(key) = t_key { self.t_data_lookup.insert(key, index); has_valid_key = true; } if has_valid_key { self.data.push((scene, template, index)); return Some(index); } None } } #[derive(Clone, Copy, Debug, PartialEq)] pub enum Template { None, Scene, ASprite(AnimSprite), APlayer(AnimPlayer), ATree(AnimTree), } impl Default for Template { fn default() -> Self { Template::None } } #[derive(Clone, Copy, Debug, PartialEq)] pub struct AnimSprite { pub flip_h: bool, pub flip_v: bool, pub playing: bool, pub animation: &'static str, } impl Default for AnimSprite { fn default() -> Self { AnimSprite { flip_h: false, flip_v: false, playing: true, animation: "", } } } impl AnimSprite { } #[derive(Clone, Copy, Debug, PartialEq, Default)] pub struct AnimPlayer { } impl AnimPlayer { } #[derive(Clone, Copy, Debug, PartialEq, Default)] pub struct AnimTree { } impl AnimTree { }
use cgmath::{ BaseFloat, Basis2, EuclideanSpace, Euler, Quaternion, Rad, Rotation, Rotation2, Vector3, VectorSpace, Zero, }; use Pose; /// Velocity /// /// ### Type parameters: /// /// - `L`: Linear velocity, usually `Vector2` or `Vector3` /// - `A`: Angular velocity, usually `Scalar` or `Vector3` #[derive(Debug, Clone, PartialEq)] #[cfg_attr(feature = "serializable", derive(Serialize, Deserialize))] pub struct Velocity<L, A> { linear: L, angular: A, } impl<L, A> Default for Velocity<L, A> where L: Zero, A: Clone + Zero, { fn default() -> Self { Self::new(L::zero(), A::zero()) } } impl<L, A> Velocity<L, A> where L: Zero, A: Clone + Zero, { /// Create new velocity object, with both linear and angular velocity pub fn new(linear: L, angular: A) -> Self { Self { linear, angular } } /// Create new velocity object with only linear velocity pub fn from_linear(linear: L) -> Self { Self::new(linear, A::zero()) } /// Set linear velocity pub fn set_linear(&mut self, linear: L) { self.linear = linear; } /// Get linear velocity pub fn linear(&self) -> &L { &self.linear } /// Set angular velocity pub fn set_angular(&mut self, angular: A) { self.angular = angular; } /// Get angular velocity pub fn angular(&self) -> &A { &self.angular } /// Apply velocity to pose. /// /// ### Parameters: /// /// - `pose`: Pose to apply the velocity to /// - `dt`: Time step /// /// ### Type parameters: /// /// - `B`: Transform type (`BodyPose3` or similar) /// - `P`: Positional quantity, usually `Point2` or `Point3` /// - `R`: Rotational quantity, usually `Basis2` or `Quaternion` pub fn apply<B, P, R>(&self, pose: &B, dt: L::Scalar) -> B where P: EuclideanSpace<Scalar = L::Scalar, Diff = L>, L: VectorSpace, L::Scalar: BaseFloat, R: ApplyAngular<L::Scalar, A> + Rotation<P>, B: Pose<P, R>, { B::new( self.apply_linear(pose.position(), dt), self.apply_angular(pose.rotation(), dt), ) } /// Apply linear velocity to a positional quantity /// /// ### Parameters: /// /// - `linear`: Positional value /// - `dt`: Time step /// /// ### Type parameters: /// /// - `P`: Positional quantity, usually `Point2` or `Point3` pub fn apply_linear<P>(&self, linear: P, dt: L::Scalar) -> P where P: EuclideanSpace<Scalar = L::Scalar, Diff = L>, L::Scalar: BaseFloat, L: VectorSpace, { linear + self.linear * dt } /// Apply angular velocity to a rotational quantity /// /// ### Parameters: /// /// - `rotation`: Rotational value /// - `dt`: Time step /// /// ### Type parameters: /// /// - `R`: Rotational quantity, usually `Basis2` or `Quaternion` pub fn apply_angular<R>(&self, rotation: R, dt: L::Scalar) -> R where R: ApplyAngular<L::Scalar, A>, L: VectorSpace, L::Scalar: BaseFloat, { rotation.apply(&self.angular, dt) } } /// Apply an angular velocity to a rotational quantity /// /// ### Type parameters: /// /// - `A`: Angular velocity, usually `Scalar` or `Vector3` pub trait ApplyAngular<S, A> { /// Apply given velocity fn apply(&self, velocity: &A, dt: S) -> Self; } impl<S> ApplyAngular<S, S> for S where S: BaseFloat, { fn apply(&self, velocity: &S, dt: S) -> Self { *self + *velocity * dt } } impl<S> ApplyAngular<S, S> for Basis2<S> where S: BaseFloat, { fn apply(&self, velocity: &S, dt: S) -> Self { *self * Basis2::from_angle(Rad(*velocity * dt)) } } impl<S> ApplyAngular<S, Vector3<S>> for Quaternion<S> where S: BaseFloat, { fn apply(&self, velocity: &Vector3<S>, dt: S) -> Self { self * Quaternion::from(Euler { x: Rad(velocity.x * dt), y: Rad(velocity.y * dt), z: Rad(velocity.z * dt), }) } } #[cfg(test)] mod tests_f32 { use cgmath::{Basis2, Point2, Point3, Rad, Rotation2, Rotation3, Transform, Vector2}; use super::*; use physics2d::BodyPose2; use physics3d::BodyPose3; #[test] fn test_velocity_linear() { let velocity = Velocity::new(Vector2::new(1., 1.), 0.); let pose = Point2::<f32>::new(0., 0.); let pose = velocity.apply_linear(pose, 0.1); assert_eq!(Point2::new(0.1, 0.1), pose); } #[test] fn test_velocity_2d_angular() { let velocity = Velocity::new(Vector2::new(1., 1.), 1.); let orientation = Basis2::<f32>::from_angle(Rad(0.)); let orientation = velocity.apply_angular(orientation, 0.1); assert_eq!(Basis2::from_angle(Rad(0.1)), orientation); } #[test] fn test_velocity_3d_angular() { let velocity = Velocity::new(Vector3::new(1., 1., 1.), Vector3::new(0., 1., 0.)); let orientation = Quaternion::<f32>::from_angle_y(Rad(0.)); let orientation = velocity.apply_angular(orientation, 0.1); assert_eq!(Quaternion::from_angle_y(Rad(0.1)), orientation); } #[test] fn test_velocity_full_2d() { let velocity = Velocity::new(Vector2::new(1., 1.), 1.); let pose = BodyPose2::<f32>::one(); let pose = velocity.apply(&pose, 0.1); assert_eq!(Point2::new(0.1, 0.1), pose.position()); assert_eq!(Basis2::from_angle(Rad(0.1)), pose.rotation()); } #[test] fn test_velocity_full_3d() { let velocity = Velocity::new(Vector3::new(1., 1., 1.), Vector3::new(0., 1., 0.)); let pose = BodyPose3::<f32>::one(); let pose = velocity.apply(&pose, 0.1); assert_eq!(Point3::new(0.1, 0.1, 0.1), pose.position()); assert_eq!(Quaternion::from_angle_y(Rad(0.1)), pose.rotation()); } #[test] fn test_apply_angular_basis2() { let orientation = Basis2::<f32>::from_angle(Rad(0.)); let velocity = 0.5; let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); assert_ulps_eq!(Basis2::from_angle(Rad(0.2)), orientation); } #[test] fn test_apply_angular_real() { let orientation: f32 = 0.; let velocity = 0.5; let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); assert_eq!(0.2, orientation); } #[test] fn test_apply_angular_quat() { let orientation = Quaternion::<f32>::from_angle_x(Rad(0.)); let velocity = Vector3::new(0.5, 0., 0.); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); assert_ulps_eq!(Quaternion::from_angle_x(Rad(0.2)), orientation); } } #[cfg(test)] mod tests_f64 { use cgmath::{Basis2, Point2, Point3, Rad, Rotation2, Rotation3, Transform, Vector2}; use super::*; use physics2d::BodyPose2; use physics3d::BodyPose3; #[test] fn test_velocity_linear() { let velocity = Velocity::new(Vector2::new(1., 1.), 0.); let pose = Point2::<f64>::new(0., 0.); let pose = velocity.apply_linear(pose, 0.1); assert_eq!(Point2::new(0.1, 0.1), pose); } #[test] fn test_velocity_2d_angular() { let velocity = Velocity::new(Vector2::new(1., 1.), 1.); let orientation = Basis2::<f64>::from_angle(Rad(0.)); let orientation = velocity.apply_angular(orientation, 0.1); assert_eq!(Basis2::from_angle(Rad(0.1)), orientation); } #[test] fn test_velocity_3d_angular() { let velocity = Velocity::new(Vector3::new(1., 1., 1.), Vector3::new(0., 1., 0.)); let orientation = Quaternion::<f64>::from_angle_y(Rad(0.)); let orientation = velocity.apply_angular(orientation, 0.1); assert_eq!(Quaternion::from_angle_y(Rad(0.1)), orientation); } #[test] fn test_velocity_full_2d() { let velocity = Velocity::new(Vector2::new(1., 1.), 1.); let pose = BodyPose2::<f64>::one(); let pose = velocity.apply(&pose, 0.1); assert_eq!(Point2::new(0.1, 0.1), pose.position()); assert_eq!(Basis2::from_angle(Rad(0.1)), pose.rotation()); } #[test] fn test_velocity_full_3d() { let velocity = Velocity::new(Vector3::new(1., 1., 1.), Vector3::new(0., 1., 0.)); let pose = BodyPose3::<f64>::one(); let pose = velocity.apply(&pose, 0.1); assert_eq!(Point3::new(0.1, 0.1, 0.1), pose.position()); assert_eq!(Quaternion::from_angle_y(Rad(0.1)), pose.rotation()); } #[test] fn test_apply_angular_basis2() { let orientation = Basis2::<f64>::from_angle(Rad(0.)); let velocity = 0.5; let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); assert_ulps_eq!(Basis2::from_angle(Rad(0.2)), orientation); } #[test] fn test_apply_angular_real() { let orientation: f64 = 0.; let velocity = 0.5; let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); assert_eq!(0.2, orientation); } #[test] fn test_apply_angular_quat() { let orientation = Quaternion::<f64>::from_angle_x(Rad(0.)); let velocity = Vector3::new(0.5, 0., 0.); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); let orientation = orientation.apply(&velocity, 0.1); assert_ulps_eq!(Quaternion::from_angle_x(Rad(0.2)), orientation); } }
use crate::arithmetic_command::ArithmeticCommand; use crate::segment::Segment; #[derive(Debug)] pub enum Command { Arithmetic(ArithmeticCommand), Push(Segment, i32), Pop(Segment, i32), }
use core::fmt; pub struct Error { repr: Repr, } pub type Result<T> = core::result::Result<T, Error>; #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] pub enum ErrorKind { } impl From<ErrorKind> for Error { #[inline] fn from(kind: ErrorKind) -> Error { Error { repr: Repr::Simple(kind) } } } enum Repr { Message(Box<str>), Simple(ErrorKind), } impl fmt::Debug for Repr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Repr::Message(msg) => f.write_str(msg), Repr::Simple(kind) => f.debug_tuple("kind").field(kind).finish(), } } } impl fmt::Debug for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&self.repr, f) } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self.repr { Repr::Message(msg) => write!(f, "{}", msg), Repr::Simple(kind) => write!(f, "{:?}", kind), } } } impl std::error::Error for Error {} impl serde::ser::Error for Error { fn custom<T: fmt::Display>(msg: T) -> Error { Error { repr: Repr::Message(msg.to_string().into_boxed_str()) } } } impl serde::de::Error for Error { fn custom<T: fmt::Display>(msg: T) -> Error { Error { repr: Repr::Message(msg.to_string().into_boxed_str()) } } }
extern crate oxygengine_core as core; pub mod audio_asset_protocol; pub mod component; pub mod resource; pub mod system; pub mod prelude { pub use crate::{audio_asset_protocol::*, component::*, resource::*, system::*}; } use crate::{ component::{AudioSource, AudioSourcePrefabProxy}, resource::Audio, system::AudioSystem, }; use core::{app::AppBuilder, assets::database::AssetsDatabase, prefab::PrefabManager}; pub fn bundle_installer<A>(builder: &mut AppBuilder, data: A) where A: Audio + 'static, { builder.install_resource(data); builder.install_system(AudioSystem::<A>::default(), "audio", &[]); } pub fn protocols_installer(database: &mut AssetsDatabase) { database.register(audio_asset_protocol::AudioAssetProtocol); } pub fn prefabs_installer(prefabs: &mut PrefabManager) { prefabs.register_component_factory_proxy::<AudioSource, AudioSourcePrefabProxy>("AudioSource"); }
use chrono::prelude::*; use log::{debug, info}; use yew::{html, Component, ComponentLink, Html, ShouldRender}; pub struct Test { counter: i32, link: ComponentLink<Self>, } #[derive(Clone)] pub enum Msg { Increment, Decrement, Bulk(Vec<Msg>), } impl Component for Test { type Message = Msg; type Properties = (); fn create(_: Self::Properties, link: ComponentLink<Self>) -> Self { Test { counter: 0, link } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::Increment => { self.counter += 1; info!("incrementing counter to {}", self.counter); true } Msg::Decrement => { self.counter -= 1; debug!("decrementing counter to {}", self.counter); true } Msg::Bulk(msgs) => { for msg in msgs { self.update(msg); } true } } } fn view(&self) -> Html { html! { <> { self.action_button(Msg::Increment) } { self.action_button(Msg::Decrement) } { self.action_button(Msg::Bulk(vec![Msg::Increment, Msg::Increment])) } <p>{ self.counter }</p> <p>{ Local::now() }</p> </> } } } impl Test { fn action_button(&self, msg: Msg) -> Html { let (text, color) = match msg { Msg::Increment => ("Increment", "is-info"), Msg::Decrement => ("Decrement", "is-danger"), Msg::Bulk(_) => ("Bulk", "is-success"), }; html! { <button class=("button", color) onclick=self.link.callback(move |_| msg.clone())> {text} </button> } } }
pub mod aarch64;
mod utils; use std::{fs::File, io::{BufReader, Read}, os::unix::prelude::FileExt}; use utils:: *; pub fn main() { let mut f = File::open("./demo.mp4").unwrap(); let mut buf = vec![]; f.read_to_end(&mut buf).unwrap(); // println!("{:?}", buf.len()); read_info(buf); // let mut new_f = File::create("./new.mp4").unwrap(); // new_f.write_all_at(&mut buf, 0).unwrap(); }
// Copyright 2019. The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the // following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following // disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the // following disclaimer in the documentation and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote // products derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use crate::{ base_node::comms_interface::{error::CommsInterfaceError, BlockEvent, NodeCommsRequest, NodeCommsResponse}, blocks::{Block, BlockHeader, NewBlockTemplate}, chain_storage::{ChainMetadata, HistoricalBlock}, proof_of_work::{Difficulty, PowAlgorithm}, }; use futures::{stream::Fuse, StreamExt}; use tari_broadcast_channel::Subscriber; use tari_service_framework::reply_channel::SenderService; use tower_service::Service; /// The InboundNodeCommsInterface provides an interface to request information from the current local node by other /// internal services. #[derive(Clone)] pub struct LocalNodeCommsInterface { request_sender: SenderService<NodeCommsRequest, Result<NodeCommsResponse, CommsInterfaceError>>, block_sender: SenderService<Block, Result<(), CommsInterfaceError>>, block_event_stream: Subscriber<BlockEvent>, } impl LocalNodeCommsInterface { /// Construct a new LocalNodeCommsInterface with the specified SenderService. pub fn new( request_sender: SenderService<NodeCommsRequest, Result<NodeCommsResponse, CommsInterfaceError>>, block_sender: SenderService<Block, Result<(), CommsInterfaceError>>, block_event_stream: Subscriber<BlockEvent>, ) -> Self { Self { request_sender, block_sender, block_event_stream, } } pub fn get_block_event_stream(&self) -> Subscriber<BlockEvent> { self.block_event_stream.clone() } pub fn get_block_event_stream_fused(&self) -> Fuse<Subscriber<BlockEvent>> { self.get_block_event_stream().fuse() } /// Request metadata from the current local node. pub async fn get_metadata(&mut self) -> Result<ChainMetadata, CommsInterfaceError> { match self.request_sender.call(NodeCommsRequest::GetChainMetadata).await?? { NodeCommsResponse::ChainMetadata(metadata) => Ok(metadata), _ => Err(CommsInterfaceError::UnexpectedApiResponse), } } /// Request the block header of the current tip at the block height pub async fn get_blocks(&mut self, block_heights: Vec<u64>) -> Result<Vec<HistoricalBlock>, CommsInterfaceError> { match self .request_sender .call(NodeCommsRequest::FetchBlocks(block_heights)) .await?? { NodeCommsResponse::HistoricalBlocks(blocks) => Ok(blocks), _ => Err(CommsInterfaceError::UnexpectedApiResponse), } } /// Request the block header of the current tip at the block height pub async fn get_headers(&mut self, block_heights: Vec<u64>) -> Result<Vec<BlockHeader>, CommsInterfaceError> { match self .request_sender .call(NodeCommsRequest::FetchHeaders(block_heights)) .await?? { NodeCommsResponse::BlockHeaders(headers) => Ok(headers), _ => Err(CommsInterfaceError::UnexpectedApiResponse), } } /// Request the construction of a new mineable block template from the base node service. pub async fn get_new_block_template( &mut self, pow_algorithm: PowAlgorithm, ) -> Result<NewBlockTemplate, CommsInterfaceError> { match self .request_sender .call(NodeCommsRequest::GetNewBlockTemplate(pow_algorithm)) .await?? { NodeCommsResponse::NewBlockTemplate(new_block_template) => Ok(new_block_template), _ => Err(CommsInterfaceError::UnexpectedApiResponse), } } /// Request from base node service the construction of a block from a block template. pub async fn get_new_block(&mut self, block_template: NewBlockTemplate) -> Result<Block, CommsInterfaceError> { match self .request_sender .call(NodeCommsRequest::GetNewBlock(block_template)) .await?? { NodeCommsResponse::NewBlock(block) => Ok(block), _ => Err(CommsInterfaceError::UnexpectedApiResponse), } } /// Request the PoW target difficulty for mining on the main chain from the base node service. pub async fn get_target_difficulty( &mut self, pow_algorithm: PowAlgorithm, ) -> Result<Difficulty, CommsInterfaceError> { match self .request_sender .call(NodeCommsRequest::GetTargetDifficulty(pow_algorithm)) .await?? { NodeCommsResponse::TargetDifficulty(difficulty) => Ok(difficulty), _ => Err(CommsInterfaceError::UnexpectedApiResponse), } } /// Submit a block to the base node service. pub async fn submit_block(&mut self, block: Block) -> Result<(), CommsInterfaceError> { self.block_sender.call(block).await? } }
pub mod composite_deep_scan_line; pub mod deep_frame_buffer; pub mod deep_image; pub mod deep_image_channel; pub mod deep_image_io; pub mod deep_image_level; pub mod deep_scan_line_input_file; pub mod deep_scan_line_input_part; pub mod deep_scan_line_output_file; pub mod deep_scan_line_output_part; pub mod deep_tiled_input_file; pub mod deep_tiled_input_part; pub mod deep_tiled_output_file; pub mod deep_tiled_output_part; pub mod sample_count_channel; pub use composite_deep_scan_line::CompositeDeepScanLine; pub use deep_frame_buffer::{DeepFrameBuffer, DeepSlice}; pub use deep_image::DeepImage; pub use deep_image_channel::{DeepChannelF16, DeepChannelF32, DeepChannelU32}; pub use deep_image_io::{ load_deep_image, save_deep_image, save_deep_image_with_header, }; pub use openexr_sys::DeepImageState; pub use deep_image_level::DeepImageLevel; pub use deep_scan_line_input_file::DeepScanLineInputFile; pub use deep_scan_line_input_part::DeepScanLineInputPart; pub use deep_scan_line_output_file::DeepScanLineOutputFile; pub use deep_scan_line_output_part::DeepScanLineOutputPart; pub use deep_tiled_input_file::DeepTiledInputFile; pub use deep_tiled_input_part::DeepTiledInputPart; pub use deep_tiled_output_file::DeepTiledOutputFile; pub use deep_tiled_output_part::DeepTiledOutputPart; pub use sample_count_channel::SampleCountChannel;
// ignore-compare-mode-nll // revisions: base nll // [nll]compile-flags: -Zborrowck=mir // Test a "pass-through" object-lifetime-default that produces errors. #![allow(dead_code)] trait SomeTrait { fn dummy(&self) { } } struct MyBox<T:?Sized> { r: Box<T> } fn deref<T>(ss: &T) -> T { // produces the type of a deref without worrying about whether a // move out would actually be legal loop { } } fn load0(ss: &MyBox<dyn SomeTrait>) -> MyBox<dyn SomeTrait> { deref(ss) } fn load1<'a,'b>(a: &'a MyBox<dyn SomeTrait>, b: &'b MyBox<dyn SomeTrait>) -> &'b MyBox<dyn SomeTrait> { a //[base]~^ ERROR lifetime mismatch //[nll]~^^ ERROR lifetime may not live long enough } fn load2<'a>(ss: &MyBox<dyn SomeTrait + 'a>) -> MyBox<dyn SomeTrait + 'a> { load0(ss) //[base]~^ ERROR mismatched types //[nll]~^^ ERROR borrowed data escapes outside of function } fn main() { }
use std::env; use std::io; use std::io::prelude::*; use std::process; use std::str; const ALLTEXT_VERSION: &'static str = env!("CARGO_PKG_VERSION"); fn main() { let mut args = env::args(); args.next(); // skipping arg[0] let mut show_help = false; let mut null_delimiter = false; if let Some(arg) = args.next() { match arg.as_ref() { "--null" => { null_delimiter = true }, "--help" => { show_help = true }, "--version" => { println!("alltext {}", ALLTEXT_VERSION); process::exit(0); }, _ => { println!("unknown argument"); process::exit(1); } } } if show_help { println!("{}", ALLTEXT_HELP); process::exit(0); } let delimiter: u8; if null_delimiter { delimiter = b'\0'; } else { delimiter = b'\n'; }; let stdin = io::stdin(); let mut stdin = stdin.lock(); let mut input_raw: Vec<u8> = Vec::new(); while stdin.read_until(delimiter, &mut input_raw).unwrap() > 0 { let mut output = String::new(); if null_delimiter { if input_raw.last().is_some() { let needs_pop = { let last: &u8 = input_raw.last().unwrap(); last == &delimiter }; if needs_pop { input_raw.pop(); } } } { let buffer = String::from_utf8_lossy(&input_raw); for c in buffer.chars() { let pp: String = match c { '\x00' => String::from("NUL"), '\x0A' => String::from("LF"), '\x0D' => String::from("CR"), '\x1b' => String::from("ESC"), '\x20' => String::from("Space"), '\x00'...'\x20' => format!("{}", c as i32), _ => c.to_string() }; output.push_str(&pp); output.push(' '); } } &input_raw.clear(); output.pop(); println!("{}", output); } } static ALLTEXT_HELP: &'static str = "\ alltext - information about text input (including non-printable characters) options: --null use NUL (\\0) as line delimiter instead of default LF (\\n) --version print version example: printf \"Hello world.\\r\\n\" | alltext\ ";
//! UI channel use super::NUM_CHANNEL_CONFIGS; use static_assertions::const_assert_eq; // See https://github.com/torvalds/linux/blob/master/drivers/gpu/drm/sun4i/sun8i_mixer.h#L75 // for the format values register! { Attr, u32, RW, Fields [ Enable WIDTH(U1) OFFSET(U0), Format WIDTH(U4) OFFSET(U8) [ XRgb8888 = U4, Rgb565 = U10 ] ] } register! { Size, u32, RW, Fields [ SizeWidth WIDTH(U16) OFFSET(U0), SizeHeight WIDTH(U16) OFFSET(U16), ] } register! { Coord, u32, RW, Fields [ Bits WIDTH(U32) OFFSET(U0), ] } register! { Pitch, u32, RW, Fields [ Bits WIDTH(U32) OFFSET(U0), ] } register! { TopLAddr, u32, RW, Fields [ Bits WIDTH(U32) OFFSET(U0), ] } register! { BotLAddr, u32, RW, Fields [ Bits WIDTH(U32) OFFSET(U0), ] } register! { FColor, u32, RW, Fields [ Bits WIDTH(U32) OFFSET(U0), ] } register! { TopHAddr, u32, RW, Fields [ Bits WIDTH(U32) OFFSET(U0), ] } register! { BotHAddr, u32, RW, Fields [ Bits WIDTH(U32) OFFSET(U0), ] } register! { OvlSize, u32, RW, Fields [ SizeWidth WIDTH(U16) OFFSET(U0), SizeHeight WIDTH(U16) OFFSET(U16), ] } const_assert_eq!(core::mem::size_of::<ConfigRegisterSubBlock>(), 0x020); #[repr(C)] pub struct ConfigRegisterSubBlock { pub attr: Attr::Register, // 0x000 pub size: Size::Register, // 0x004 pub coord: Coord::Register, // 0x008 pub pitch: Pitch::Register, // 0x00C pub top_laddr: TopLAddr::Register, // 0x010 pub bot_laddr: BotLAddr::Register, // 0x014 pub fcolor: FColor::Register, // 0x018 __reserved_0: u32, // 0x01C } const_assert_eq!(core::mem::size_of::<RegisterBlock>(), 0x1000); #[repr(C)] pub struct RegisterBlock { pub cfg: [ConfigRegisterSubBlock; NUM_CHANNEL_CONFIGS], // 0x000 pub top_haddr: TopHAddr::Register, // 0x080 pub bot_haddr: BotHAddr::Register, // 0x084 pub ovl_size: OvlSize::Register, // 0x088 __reserved_0: [u32; 989], // 0x08C }
//! Implementation of a DataFusion `TableProvider` in terms of `QueryChunk`s use async_trait::async_trait; use std::{collections::HashSet, sync::Arc}; use arrow::{ datatypes::{Fields, Schema as ArrowSchema, SchemaRef as ArrowSchemaRef}, error::ArrowError, }; use datafusion::{ datasource::TableProvider, error::{DataFusionError, Result as DataFusionResult}, execution::context::SessionState, logical_expr::{TableProviderFilterPushDown, TableType}, optimizer::utils::{conjunction, split_conjunction}, physical_plan::{ expressions::col as physical_col, filter::FilterExec, projection::ProjectionExec, ExecutionPlan, }, prelude::Expr, }; use observability_deps::tracing::trace; use schema::{sort::SortKey, Schema}; use crate::{ chunk_order_field, util::{arrow_sort_key_exprs, df_physical_expr}, QueryChunk, CHUNK_ORDER_COLUMN_NAME, }; use snafu::{ResultExt, Snafu}; mod adapter; mod deduplicate; pub mod overlap; mod physical; mod record_batch_exec; pub use self::overlap::group_potential_duplicates; pub use deduplicate::{DeduplicateExec, RecordBatchDeduplicator}; pub(crate) use physical::{chunks_to_physical_nodes, PartitionedFileExt}; pub(crate) use record_batch_exec::RecordBatchesExec; #[derive(Debug, Snafu)] pub enum Error { #[snafu(display( "Internal error: no chunk pruner provided to builder for {}", table_name, ))] InternalNoChunkPruner { table_name: String }, #[snafu(display("Internal error: Cannot create projection select expr '{}'", source,))] InternalSelectExpr { source: datafusion::error::DataFusionError, }, #[snafu(display("Internal error adding sort operator '{}'", source,))] InternalSort { source: datafusion::error::DataFusionError, }, #[snafu(display("Internal error adding filter operator '{}'", source,))] InternalFilter { source: datafusion::error::DataFusionError, }, #[snafu(display("Internal error adding projection operator '{}'", source,))] InternalProjection { source: datafusion::error::DataFusionError, }, } pub type Result<T, E = Error> = std::result::Result<T, E>; impl From<Error> for ArrowError { // Wrap an error into an arrow error fn from(e: Error) -> Self { Self::ExternalError(Box::new(e)) } } impl From<Error> for DataFusionError { // Wrap an error into a datafusion error fn from(e: Error) -> Self { Self::ArrowError(e.into()) } } /// Something that can prune chunks based on their metadata pub trait ChunkPruner: Sync + Send + std::fmt::Debug { /// prune `chunks`, if possible, based on predicate. fn prune_chunks( &self, table_name: &str, table_schema: &Schema, chunks: Vec<Arc<dyn QueryChunk>>, filters: &[Expr], ) -> Result<Vec<Arc<dyn QueryChunk>>>; } /// Builds a `ChunkTableProvider` from a series of `QueryChunk`s /// and ensures the schema across the chunks is compatible and /// consistent. #[derive(Debug)] pub struct ProviderBuilder { table_name: Arc<str>, schema: Schema, chunks: Vec<Arc<dyn QueryChunk>>, deduplication: bool, } impl ProviderBuilder { pub fn new(table_name: Arc<str>, schema: Schema) -> Self { assert_eq!(schema.find_index_of(CHUNK_ORDER_COLUMN_NAME), None); Self { table_name, schema, chunks: Vec::new(), deduplication: true, } } pub fn with_enable_deduplication(mut self, enable_deduplication: bool) -> Self { self.deduplication = enable_deduplication; self } /// Add a new chunk to this provider pub fn add_chunk(mut self, chunk: Arc<dyn QueryChunk>) -> Self { self.chunks.push(chunk); self } /// Create the Provider pub fn build(self) -> Result<ChunkTableProvider> { Ok(ChunkTableProvider { iox_schema: self.schema, table_name: self.table_name, chunks: self.chunks, deduplication: self.deduplication, }) } } /// Implementation of a DataFusion TableProvider in terms of QueryChunks /// /// This allows DataFusion to see data from Chunks as a single table, as well as /// push predicates and selections down to chunks #[derive(Debug)] pub struct ChunkTableProvider { table_name: Arc<str>, /// The IOx schema (wrapper around Arrow Schemaref) for this table iox_schema: Schema, /// The chunks chunks: Vec<Arc<dyn QueryChunk>>, /// do deduplication deduplication: bool, } impl ChunkTableProvider { /// Return the IOx schema view for the data provided by this provider pub fn iox_schema(&self) -> &Schema { &self.iox_schema } /// Return the Arrow schema view for the data provided by this provider pub fn arrow_schema(&self) -> ArrowSchemaRef { self.iox_schema.as_arrow() } /// Return the table name pub fn table_name(&self) -> &str { self.table_name.as_ref() } /// Running deduplication or not pub fn deduplication(&self) -> bool { self.deduplication } } #[async_trait] impl TableProvider for ChunkTableProvider { fn as_any(&self) -> &dyn std::any::Any { self } /// Schema with all available columns across all chunks fn schema(&self) -> ArrowSchemaRef { self.arrow_schema() } async fn scan( &self, ctx: &SessionState, projection: Option<&Vec<usize>>, filters: &[Expr], _limit: Option<usize>, ) -> std::result::Result<Arc<dyn ExecutionPlan>, DataFusionError> { trace!("Create a scan node for ChunkTableProvider"); let schema_with_chunk_order = Arc::new(ArrowSchema::new( self.iox_schema .as_arrow() .fields .iter() .cloned() .chain(std::iter::once(chunk_order_field())) .collect::<Fields>(), )); let pk = self.iox_schema().primary_key(); let dedup_sort_key = SortKey::from_columns(pk.iter().copied()); // Create data stream from chunk data. This is the most simple data stream possible and contains duplicates and // has no filters at all. let plan = chunks_to_physical_nodes( &schema_with_chunk_order, None, self.chunks.clone(), ctx.config().target_partitions(), ); // De-dup before doing anything else, because all logical expressions act on de-duplicated data. let plan = if self.deduplication { let sort_exprs = arrow_sort_key_exprs(&dedup_sort_key, &plan.schema()); Arc::new(DeduplicateExec::new(plan, sort_exprs, true)) } else { plan }; // Filter as early as possible (AFTER de-dup!). Predicate pushdown will eventually push down parts of this. let plan = if let Some(expr) = filters.iter().cloned().reduce(|a, b| a.and(b)) { let maybe_expr = if !self.deduplication { let dedup_cols = pk.into_iter().collect::<HashSet<_>>(); conjunction( split_conjunction(&expr) .into_iter() .filter(|expr| { let Ok(expr_cols) = expr.to_columns() else {return false}; expr_cols .into_iter() .all(|c| dedup_cols.contains(c.name.as_str())) }) .cloned(), ) } else { Some(expr) }; if let Some(expr) = maybe_expr { Arc::new(FilterExec::try_new( df_physical_expr(plan.as_ref(), expr)?, plan, )?) } else { plan } } else { plan }; // Project at last because it removes columns and hence other operations may fail. Projection pushdown will // optimize that later. // Always project because we MUST make sure that chunk order col doesn't leak to the user or to our parquet // files. let default_projection: Vec<_> = (0..self.iox_schema.len()).collect(); let projection = projection.unwrap_or(&default_projection); let select_exprs = self .iox_schema() .select_by_indices(projection) .as_arrow() .fields() .iter() .map(|f| { let field_name = f.name(); let physical_expr = physical_col(field_name, &self.schema()).context(InternalSelectExprSnafu)?; Ok((physical_expr, field_name.to_string())) }) .collect::<Result<Vec<_>>>()?; let plan = Arc::new(ProjectionExec::try_new(select_exprs, plan)?); Ok(plan) } /// Filter pushdown specification fn supports_filter_pushdown( &self, _filter: &Expr, ) -> DataFusionResult<TableProviderFilterPushDown> { if self.deduplication { Ok(TableProviderFilterPushDown::Exact) } else { Ok(TableProviderFilterPushDown::Inexact) } } fn table_type(&self) -> TableType { TableType::Base } } #[cfg(test)] mod test { use super::*; use crate::{ exec::IOxSessionContext, test::{format_execution_plan, TestChunk}, }; use datafusion::prelude::{col, lit}; use predicate::Predicate; #[tokio::test] async fn provider_scan_default() { let table_name = "t"; let chunk1 = Arc::new( TestChunk::new(table_name) .with_id(1) .with_tag_column("tag1") .with_tag_column("tag2") .with_f64_field_column("field") .with_time_column(), ) as Arc<dyn QueryChunk>; let chunk2 = Arc::new( TestChunk::new(table_name) .with_id(2) .with_dummy_parquet_file() .with_tag_column("tag1") .with_tag_column("tag2") .with_f64_field_column("field") .with_time_column(), ) as Arc<dyn QueryChunk>; let schema = chunk1.schema().clone(); let ctx = IOxSessionContext::with_testing(); let state = ctx.inner().state(); let provider = ProviderBuilder::new(Arc::from(table_name), schema) .add_chunk(Arc::clone(&chunk1)) .add_chunk(Arc::clone(&chunk2)) .build() .unwrap(); // simple plan let plan = provider.scan(&state, None, &[], None).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // projection let plan = provider .scan(&state, Some(&vec![1, 3]), &[], None) .await .unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[tag1@1 as tag1, time@3 as time]" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // filters let expr = vec![lit(false)]; let expr_ref = expr.iter().collect::<Vec<_>>(); assert_eq!( provider.supports_filters_pushdown(&expr_ref).unwrap(), vec![TableProviderFilterPushDown::Exact] ); let plan = provider.scan(&state, None, &expr, None).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " FilterExec: false" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // limit pushdown is unimplemented at the moment let plan = provider.scan(&state, None, &[], Some(1)).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); } #[tokio::test] async fn provider_scan_no_dedup() { let table_name = "t"; let chunk1 = Arc::new( TestChunk::new(table_name) .with_id(1) .with_tag_column("tag1") .with_tag_column("tag2") .with_f64_field_column("field") .with_time_column(), ) as Arc<dyn QueryChunk>; let chunk2 = Arc::new( TestChunk::new(table_name) .with_id(2) .with_dummy_parquet_file() .with_tag_column("tag1") .with_tag_column("tag2") .with_f64_field_column("field") .with_time_column(), ) as Arc<dyn QueryChunk>; let schema = chunk1.schema().clone(); let ctx = IOxSessionContext::with_testing(); let state = ctx.inner().state(); let provider = ProviderBuilder::new(Arc::from(table_name), schema) .add_chunk(Arc::clone(&chunk1)) .add_chunk(Arc::clone(&chunk2)) .with_enable_deduplication(false) .build() .unwrap(); // simple plan let plan = provider.scan(&state, None, &[], None).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // projection let plan = provider .scan(&state, Some(&vec![1, 3]), &[], None) .await .unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[tag1@1 as tag1, time@3 as time]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // filters // Expressions on fields are NOT pushed down because they cannot be pushed through de-dup. let expr = vec![ lit(false), col("tag1").eq(lit("foo")), col("field").eq(lit(1.0)), ]; let expr_ref = expr.iter().collect::<Vec<_>>(); assert_eq!( provider.supports_filters_pushdown(&expr_ref).unwrap(), vec![ TableProviderFilterPushDown::Inexact, TableProviderFilterPushDown::Inexact, TableProviderFilterPushDown::Inexact ] ); let plan = provider.scan(&state, None, &expr, None).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " FilterExec: false AND tag1@1 = CAST(foo AS Dictionary(Int32, Utf8))" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // limit pushdown is unimplemented at the moment let plan = provider.scan(&state, None, &[], Some(1)).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); } #[tokio::test] async fn provider_scan_retention() { let table_name = "t"; let pred = Predicate::default() .with_retention(100) .filter_expr() .unwrap(); let chunk1 = Arc::new( TestChunk::new(table_name) .with_id(1) .with_tag_column("tag1") .with_tag_column("tag2") .with_f64_field_column("field") .with_time_column(), ) as Arc<dyn QueryChunk>; let chunk2 = Arc::new( TestChunk::new(table_name) .with_id(2) .with_dummy_parquet_file() .with_tag_column("tag1") .with_tag_column("tag2") .with_f64_field_column("field") .with_time_column(), ) as Arc<dyn QueryChunk>; let schema = chunk1.schema().clone(); let ctx = IOxSessionContext::with_testing(); let state = ctx.inner().state(); let provider = ProviderBuilder::new(Arc::from(table_name), schema) .add_chunk(Arc::clone(&chunk1)) .add_chunk(Arc::clone(&chunk2)) .build() .unwrap(); // simple plan let plan = provider .scan(&state, None, &[pred.clone()], None) .await .unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " FilterExec: time@3 > 100" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // projection let plan = provider .scan(&state, Some(&vec![1, 3]), &[pred.clone()], None) .await .unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[tag1@1 as tag1, time@3 as time]" - " FilterExec: time@3 > 100" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // filters let expr = vec![lit(false), pred.clone()]; let expr_ref = expr.iter().collect::<Vec<_>>(); assert_eq!( provider.supports_filters_pushdown(&expr_ref).unwrap(), vec![ TableProviderFilterPushDown::Exact, TableProviderFilterPushDown::Exact ] ); let plan = provider.scan(&state, None, &expr, None).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " FilterExec: false AND time@3 > 100" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); // limit pushdown is unimplemented at the moment let plan = provider.scan(&state, None, &[pred], Some(1)).await.unwrap(); insta::assert_yaml_snapshot!( format_execution_plan(&plan), @r###" --- - " ProjectionExec: expr=[field@0 as field, tag1@1 as tag1, tag2@2 as tag2, time@3 as time]" - " FilterExec: time@3 > 100" - " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]" - " UnionExec" - " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0" - " ParquetExec: file_groups={1 group: [[2.parquet]]}, projection=[field, tag1, tag2, time, __chunk_order], output_ordering=[__chunk_order@4 ASC]" "### ); } }
#[doc = "Reader of register CALR"] pub type R = crate::R<u32, super::CALR>; #[doc = "Writer for register CALR"] pub type W = crate::W<u32, super::CALR>; #[doc = "Register CALR `reset()`'s with value 0"] impl crate::ResetValue for super::CALR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `CALM`"] pub type CALM_R = crate::R<u16, u16>; #[doc = "Write proxy for field `CALM`"] pub struct CALM_W<'a> { w: &'a mut W, } impl<'a> CALM_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0x01ff) | ((value as u32) & 0x01ff); self.w } } #[doc = "Reader of field `CALW16`"] pub type CALW16_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CALW16`"] pub struct CALW16_W<'a> { w: &'a mut W, } impl<'a> CALW16_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13); self.w } } #[doc = "Reader of field `CALW8`"] pub type CALW8_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CALW8`"] pub struct CALW8_W<'a> { w: &'a mut W, } impl<'a> CALW8_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14); self.w } } #[doc = "Reader of field `CALP`"] pub type CALP_R = crate::R<bool, bool>; #[doc = "Write proxy for field `CALP`"] pub struct CALP_W<'a> { w: &'a mut W, } impl<'a> CALP_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15); self.w } } impl R { #[doc = "Bits 0:8 - Calibration minus The frequency of the calendar is reduced by masking CALM out of 220 RTCCLK pulses (32 seconds if the input frequency is 32768 Hz). This decreases the frequency of the calendar with a resolution of 0.9537 ppm. To increase the frequency of the calendar, this feature should be used in conjunction with CALP. See Section24.3.12: RTC smooth digital calibration on page13."] #[inline(always)] pub fn calm(&self) -> CALM_R { CALM_R::new((self.bits & 0x01ff) as u16) } #[doc = "Bit 13 - Use a 16-second calibration cycle period When CALW16 is set to 1, the 16-second calibration cycle period is selected.This bit must not be set to 1 if CALW8=1. Note: CALM\\[0\\] is stuck at 0 when CALW16= 1. Refer to Section24.3.12: RTC smooth digital calibration."] #[inline(always)] pub fn calw16(&self) -> CALW16_R { CALW16_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 14 - Use an 8-second calibration cycle period When CALW8 is set to 1, the 8-second calibration cycle period is selected. Note: CALM\\[1:0\\] are stuck at 00; when CALW8= 1. Refer to Section24.3.12: RTC smooth digital calibration."] #[inline(always)] pub fn calw8(&self) -> CALW8_R { CALW8_R::new(((self.bits >> 14) & 0x01) != 0) } #[doc = "Bit 15 - Increase frequency of RTC by 488.5 ppm This feature is intended to be used in conjunction with CALM, which lowers the frequency of the calendar with a fine resolution. if the input frequency is 32768 Hz, the number of RTCCLK pulses added during a 32-second window is calculated as follows: (512 * CALP) - CALM. Refer to Section24.3.12: RTC smooth digital calibration."] #[inline(always)] pub fn calp(&self) -> CALP_R { CALP_R::new(((self.bits >> 15) & 0x01) != 0) } } impl W { #[doc = "Bits 0:8 - Calibration minus The frequency of the calendar is reduced by masking CALM out of 220 RTCCLK pulses (32 seconds if the input frequency is 32768 Hz). This decreases the frequency of the calendar with a resolution of 0.9537 ppm. To increase the frequency of the calendar, this feature should be used in conjunction with CALP. See Section24.3.12: RTC smooth digital calibration on page13."] #[inline(always)] pub fn calm(&mut self) -> CALM_W { CALM_W { w: self } } #[doc = "Bit 13 - Use a 16-second calibration cycle period When CALW16 is set to 1, the 16-second calibration cycle period is selected.This bit must not be set to 1 if CALW8=1. Note: CALM\\[0\\] is stuck at 0 when CALW16= 1. Refer to Section24.3.12: RTC smooth digital calibration."] #[inline(always)] pub fn calw16(&mut self) -> CALW16_W { CALW16_W { w: self } } #[doc = "Bit 14 - Use an 8-second calibration cycle period When CALW8 is set to 1, the 8-second calibration cycle period is selected. Note: CALM\\[1:0\\] are stuck at 00; when CALW8= 1. Refer to Section24.3.12: RTC smooth digital calibration."] #[inline(always)] pub fn calw8(&mut self) -> CALW8_W { CALW8_W { w: self } } #[doc = "Bit 15 - Increase frequency of RTC by 488.5 ppm This feature is intended to be used in conjunction with CALM, which lowers the frequency of the calendar with a fine resolution. if the input frequency is 32768 Hz, the number of RTCCLK pulses added during a 32-second window is calculated as follows: (512 * CALP) - CALM. Refer to Section24.3.12: RTC smooth digital calibration."] #[inline(always)] pub fn calp(&mut self) -> CALP_W { CALP_W { w: self } } }